use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, }; use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use ast::mut_visit::{noop_visit_expr, MutVisitor}; use ast::{Path, PathSegment}; use core::mem; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::Spacing; use rustc_ast::util::case::Case; use rustc_ast::util::classify; use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity}; use rustc_ast::visit::Visitor; use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID}; use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind}; use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits}; use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind}; use rustc_ast_pretty::pprust; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::{ AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult, StashKey, }; use rustc_macros::Subdiagnostic; use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded}; use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP; use rustc_session::lint::BuiltinLintDiagnostics; use rustc_span::source_map::{self, Span, Spanned}; use rustc_span::symbol::kw::PathRoot; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{BytePos, Pos}; use thin_vec::{thin_vec, ThinVec}; /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); $p.bump(); return Ok(e); } token::NtPath(path) => { let path = (**path).clone(); $p.bump(); return Ok($p.mk_expr($p.prev_token.span, ExprKind::Path(None, path))); } token::NtBlock(block) => { let block = block.clone(); $p.bump(); return Ok($p.mk_expr($p.prev_token.span, ExprKind::Block(block, None))); } _ => {} }; } }; } #[derive(Debug)] pub(super) enum LhsExpr { NotYetParsed, AttributesParsed(AttrWrapper), AlreadyParsed { expr: P, starts_statement: bool }, } impl From> for LhsExpr { /// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)` /// and `None` into `LhsExpr::NotYetParsed`. /// /// This conversion does not allocate. fn from(o: Option) -> Self { if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed } } } impl From> for LhsExpr { /// Converts the `expr: P` into `LhsExpr::AlreadyParsed { expr, starts_statement: false }`. /// /// This conversion does not allocate. fn from(expr: P) -> Self { LhsExpr::AlreadyParsed { expr, starts_statement: false } } } #[derive(Debug)] enum DestructuredFloat { /// 1e2 Single(Symbol, Span), /// 1. TrailingDot(Symbol, Span, Span), /// 1.2 | 1.2e3 MiddleDot(Symbol, Span, Span, Symbol, Span), /// Invalid Error, } impl<'a> Parser<'a> { /// Parses an expression. #[inline] pub fn parse_expr(&mut self) -> PResult<'a, P> { self.current_closure.take(); self.parse_expr_res(Restrictions::empty(), None) } /// Parses an expression, forcing tokens to be collected pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P> { self.collect_tokens_no_attrs(|this| this.parse_expr()) } pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> { self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value }) } fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P> { match self.parse_expr_res(restrictions, None) { Ok(expr) => Ok(expr), Err(mut err) => match self.token.ident() { Some((Ident { name: kw::Underscore, .. }, false)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` err.emit(); self.bump(); Ok(self.mk_expr(self.prev_token.span, ExprKind::Err)) } _ => Err(err), }, } } /// Parses a sequence of expressions delimited by parentheses. fn parse_expr_paren_seq(&mut self) -> PResult<'a, ThinVec>> { self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore(Restrictions::empty())) .map(|(r, _)| r) } /// Parses an expression, subject to the given restrictions. #[inline] pub(super) fn parse_expr_res( &mut self, r: Restrictions, already_parsed_attrs: Option, ) -> PResult<'a, P> { self.with_res(r, |this| this.parse_expr_assoc(already_parsed_attrs)) } /// Parses an associative expression. /// /// This parses an expression accounting for associativity and precedence of the operators in /// the expression. #[inline] fn parse_expr_assoc( &mut self, already_parsed_attrs: Option, ) -> PResult<'a, P> { self.parse_expr_assoc_with(0, already_parsed_attrs.into()) } /// Parses an associative expression with operators of at least `min_prec` precedence. pub(super) fn parse_expr_assoc_with( &mut self, min_prec: usize, lhs: LhsExpr, ) -> PResult<'a, P> { let mut starts_stmt = false; let mut lhs = if let LhsExpr::AlreadyParsed { expr, starts_statement } = lhs { starts_stmt = starts_statement; expr } else { let attrs = match lhs { LhsExpr::AttributesParsed(attrs) => Some(attrs), _ => None, }; if self.token.is_range_separator() { return self.parse_expr_prefix_range(attrs); } else { self.parse_expr_prefix(attrs)? } }; if !self.should_continue_as_assoc_expr(&lhs) { return Ok(lhs); } self.expected_tokens.push(TokenType::Operator); while let Some(op) = self.check_assoc_op() { let lhs_span = self.interpolated_or_expr_span(&lhs); let cur_op_span = self.token.span; let restrictions = if op.node.is_assign_like() { self.restrictions & Restrictions::NO_STRUCT_LITERAL } else { self.restrictions }; let prec = op.node.precedence(); if prec < min_prec { break; } // Check for deprecated `...` syntax if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq { self.err_dotdotdot_syntax(self.token.span); } if self.token == token::LArrow { self.err_larrow_operator(self.token.span); } self.bump(); if op.node.is_comparison() { if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { return Ok(expr); } } // Look for JS' `===` and `!==` and recover if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual) && self.token.kind == token::Eq && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); let sugg = match op.node { AssocOp::Equal => "==", AssocOp::NotEqual => "!=", _ => unreachable!(), } .into(); let invalid = format!("{sugg}="); self.sess.emit_err(errors::InvalidComparisonOperator { span: sp, invalid: invalid.clone(), sub: errors::InvalidComparisonOperatorSub::Correctable { span: sp, invalid, correct: sugg, }, }); self.bump(); } // Look for PHP's `<>` and recover if op.node == AssocOp::Less && self.token.kind == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); self.sess.emit_err(errors::InvalidComparisonOperator { span: sp, invalid: "<>".into(), sub: errors::InvalidComparisonOperatorSub::Correctable { span: sp, invalid: "<>".into(), correct: "!=".into(), }, }); self.bump(); } // Look for C++'s `<=>` and recover if op.node == AssocOp::LessEqual && self.token.kind == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); self.sess.emit_err(errors::InvalidComparisonOperator { span: sp, invalid: "<=>".into(), sub: errors::InvalidComparisonOperatorSub::Spaceship(sp), }); self.bump(); } if self.prev_token == token::BinOp(token::Plus) && self.token == token::BinOp(token::Plus) && self.prev_token.span.between(self.token.span).is_empty() { let op_span = self.prev_token.span.to(self.token.span); // Eat the second `+` self.bump(); lhs = self.recover_from_postfix_increment(lhs, op_span, starts_stmt)?; continue; } if self.prev_token == token::BinOp(token::Minus) && self.token == token::BinOp(token::Minus) && self.prev_token.span.between(self.token.span).is_empty() && !self.look_ahead(1, |tok| tok.can_begin_expr()) { let op_span = self.prev_token.span.to(self.token.span); // Eat the second `-` self.bump(); lhs = self.recover_from_postfix_decrement(lhs, op_span, starts_stmt)?; continue; } let op = op.node; // Special cases: if op == AssocOp::As { lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; continue; } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to // generalise it to the Fixity::None code. lhs = self.parse_expr_range(prec, lhs, op, cur_op_span)?; break; } let fixity = op.fixity(); let prec_adjustment = match fixity { Fixity::Right => 0, Fixity::Left => 1, // We currently have no non-associative operators that are not handled above by // the special cases. The code is here only for future convenience. Fixity::None => 1, }; let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::NotYetParsed) })?; let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight | AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); self.mk_expr(span, binary) } AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, token::Minus => BinOpKind::Sub, token::Star => BinOpKind::Mul, token::Slash => BinOpKind::Div, token::Percent => BinOpKind::Rem, token::Caret => BinOpKind::BitXor, token::And => BinOpKind::BitAnd, token::Or => BinOpKind::BitOr, token::Shl => BinOpKind::Shl, token::Shr => BinOpKind::Shr, }; let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); self.mk_expr(span, aopexpr) } AssocOp::As | AssocOp::DotDot | AssocOp::DotDotEq => { self.span_bug(span, "AssocOp should have been handled by special case") } }; if let Fixity::None = fixity { break; } } Ok(lhs) } fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool { match (self.expr_is_complete(lhs), AssocOp::from_token(&self.token)) { // Semi-statement forms are odd: // See https://github.com/rust-lang/rust/issues/29071 (true, None) => false, (false, _) => true, // Continue parsing the expression. // An exhaustive check is done in the following block, but these are checked first // because they *are* ambiguous but also reasonable looking incorrect syntax, so we // want to keep their span info to improve diagnostics in these cases in a later stage. (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3` (true, Some(AssocOp::Subtract)) | // `{ 42 } -5` (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus) (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }` (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure) (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42` => { // These cases are ambiguous and can't be identified in the parser alone. // // Bitwise AND is left out because guessing intent is hard. We can make // suggestions based on the assumption that double-refs are rarely intentional, // and closures are distinct enough that they don't get mixed up with their // return value. let sp = self.sess.source_map().start_point(self.token.span); self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span); false } (true, Some(op)) if !op.can_continue_expr_unambiguously() => false, (true, Some(_)) => { self.error_found_expr_would_be_stmt(lhs); true } } } /// We've found an expression that would be parsed as a statement, /// but the next token implies this should be parsed as an expression. /// For example: `if let Some(x) = x { x } else { 0 } / 2`. fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.sess.emit_err(errors::FoundExprWouldBeStmt { span: self.token.span, token: self.token.clone(), suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } /// Possibly translate the current token to an associative operator. /// The method does not advance the current token. /// /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively. fn check_assoc_op(&self) -> Option> { let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) { // When parsing const expressions, stop parsing when encountering `>`. ( Some( AssocOp::ShiftRight | AssocOp::Greater | AssocOp::GreaterEqual | AssocOp::AssignOp(token::BinOpToken::Shr), ), _, ) if self.restrictions.contains(Restrictions::CONST_EXPR) => { return None; } (Some(op), _) => (op, self.token.span), (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => { self.sess.emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "and".into(), sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span), }); (AssocOp::LAnd, span) } (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => { self.sess.emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "or".into(), sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span), }); (AssocOp::LOr, span) } _ => return None, }; Some(source_map::respan(span, op)) } /// Checks if this expression is a successfully parsed statement. fn expr_is_complete(&self, e: &Expr) -> bool { self.restrictions.contains(Restrictions::STMT_EXPR) && !classify::expr_requires_semi_to_be_stmt(e) } /// Parses `x..y`, `x..=y`, and `x..`/`x..=`. /// The other two variants are handled in `parse_prefix_range_expr` below. fn parse_expr_range( &mut self, prec: usize, lhs: P, op: AssocOp, cur_op_span: Span, ) -> PResult<'a, P> { let rhs = if self.is_at_start_of_range_notation_rhs() { Some(self.parse_expr_assoc_with(prec + 1, LhsExpr::NotYetParsed)?) } else { None }; let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span); let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span); let limits = if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed }; let range = self.mk_range(Some(lhs), rhs, limits); Ok(self.mk_expr(span, range)) } fn is_at_start_of_range_notation_rhs(&self) -> bool { if self.token.can_begin_expr() { // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. if self.token == token::OpenDelim(Delimiter::Brace) { return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); } true } else { false } } /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`. fn parse_expr_prefix_range(&mut self, attrs: Option) -> PResult<'a, P> { // Check for deprecated `...` syntax. if self.token == token::DotDotDot { self.err_dotdotdot_syntax(self.token.span); } debug_assert!( self.token.is_range_separator(), "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq", self.token ); let limits = match self.token.kind { token::DotDot => RangeLimits::HalfOpen, _ => RangeLimits::Closed, }; let op = AssocOp::from_token(&self.token); // FIXME: `parse_prefix_range_expr` is called when the current // token is `DotDot`, `DotDotDot`, or `DotDotEq`. If we haven't already // parsed attributes, then trying to parse them here will always fail. // We should figure out how we want attributes on range expressions to work. let attrs = self.parse_or_use_outer_attributes(attrs)?; self.collect_tokens_for_expr(attrs, |this, attrs| { let lo = this.token.span; this.bump(); let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() { // RHS must be parsed with more associativity than the dots. this.parse_expr_assoc_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed) .map(|x| (lo.to(x.span), Some(x)))? } else { (lo, None) }; let range = this.mk_range(None, opt_end, limits); Ok(this.mk_expr_with_attrs(span, range, attrs)) }) } /// Parses a prefix-unary-operator expr. fn parse_expr_prefix(&mut self, attrs: Option) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(attrs)?; let lo = self.token.span; macro_rules! make_it { ($this:ident, $attrs:expr, |this, _| $body:expr) => { $this.collect_tokens_for_expr($attrs, |$this, attrs| { let (hi, ex) = $body?; Ok($this.mk_expr_with_attrs(lo.to(hi), ex, attrs)) }) }; } let this = self; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() match this.token.uninterpolate().kind { // `!expr` token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)), // `~expr` token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `-expr` token::BinOp(token::Minus) => { make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg)) } // `*expr` token::BinOp(token::Star) => { make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref)) } // `&expr` and `&&expr` token::BinOp(token::And) | token::AndAnd => { make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo)) } // `+lit` token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => { let mut err = errors::LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None, }; // a block on the LHS might have been intended to be an expression instead if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) { err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp)); } else { err.remove_plus = Some(lo); } this.sess.emit_err(err); this.bump(); this.parse_expr_prefix(None) } // Recover from `++x`: token::BinOp(token::Plus) if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) => { let starts_stmt = this.prev_token == token::Semi || this.prev_token == token::CloseDelim(Delimiter::Brace); let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span)); // Eat both `+`s. this.bump(); this.bump(); let operand_expr = this.parse_expr_dot_or_call(Default::default())?; this.recover_from_prefix_increment(operand_expr, pre_span, starts_stmt) } token::Ident(..) if this.token.is_keyword(kw::Box) => { make_it!(this, attrs, |this, _| this.parse_expr_box(lo)) } token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => { make_it!(this, attrs, |this, _| this.recover_not_expr(lo)) } _ => return this.parse_expr_dot_or_call(Some(attrs)), } } fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P)> { self.bump(); let expr = self.parse_expr_prefix(None)?; let span = self.interpolated_or_expr_span(&expr); Ok((lo.to(span), expr)) } fn parse_expr_unary(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> { let (span, expr) = self.parse_expr_prefix_common(lo)?; Ok((span, self.mk_unary(op, expr))) } /// Recover on `~expr` in favor of `!expr`. fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { self.sess.emit_err(errors::TildeAsUnaryOperator(lo)); self.parse_expr_unary(lo, UnOp::Not) } /// Parse `box expr` - this syntax has been removed, but we still parse this /// for now to provide an automated way to fix usages of it fn parse_expr_box(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { let (span, expr) = self.parse_expr_prefix_common(lo)?; let code = self.sess.source_map().span_to_snippet(span.with_lo(lo.hi())).unwrap(); self.sess.emit_err(errors::BoxSyntaxRemoved { span, code: code.trim() }); // So typechecking works, parse `box ` as `::std::boxed::Box::new(expr)` let path = Path { span, segments: [ PathSegment::from_ident(Ident::with_dummy_span(PathRoot)), PathSegment::from_ident(Ident::with_dummy_span(sym::std)), PathSegment::from_ident(Ident::from_str("boxed")), PathSegment::from_ident(Ident::from_str("Box")), PathSegment::from_ident(Ident::with_dummy_span(sym::new)), ] .into(), tokens: None, }; let path = self.mk_expr(span, ExprKind::Path(None, path)); Ok((span, self.mk_call(path, ThinVec::from([expr])))) } fn is_mistaken_not_ident_negation(&self) -> bool { let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, _ => t.is_whole_expr(), }; self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr) } /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { let negated_token = self.look_ahead(1, |t| t.clone()); let sub_diag = if negated_token.is_numeric_lit() { errors::NotAsNegationOperatorSub::SuggestNotBitwise } else if negated_token.is_bool_lit() { errors::NotAsNegationOperatorSub::SuggestNotLogical } else { errors::NotAsNegationOperatorSub::SuggestNotDefault }; self.sess.emit_err(errors::NotAsNegationOperator { negated: negated_token.span, negated_desc: super::token_descr(&negated_token), // Span the `not` plus trailing whitespace to avoid // trailing whitespace after the `!` in our suggestion sub: sub_diag( self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)), ), }); self.parse_expr_unary(lo, UnOp::Not) } /// Returns the span of expr if it was not interpolated, or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { TokenKind::Interpolated(..) => self.prev_token.span, _ => expr.span, } } fn parse_assoc_op_cast( &mut self, lhs: P, lhs_span: Span, expr_kind: fn(P, P) -> ExprKind, ) -> PResult<'a, P> { let mk_expr = |this: &mut Self, lhs: P, rhs: P| { this.mk_expr(this.mk_expr_sp(&lhs, lhs_span, rhs.span), expr_kind(lhs, rhs)) }; // Save the state of the parser before parsing type normally, in case there is a // LessThan comparison after this cast. let parser_snapshot_before_type = self.clone(); let cast_expr = match self.parse_as_cast_ty() { Ok(rhs) => mk_expr(self, lhs, rhs), Err(type_err) => { if !self.may_recover() { return Err(type_err); } // Rewind to before attempting to parse the type with generics, to recover // from situations like `x as usize < y` in which we first tried to parse // `usize < y` as a type with generic arguments. let parser_snapshot_after_type = mem::replace(self, parser_snapshot_before_type); // Check for typo of `'a: loop { break 'a }` with a missing `'`. match (&lhs.kind, &self.token.kind) { ( // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), token::Ident(kw::For | kw::Loop | kw::While, false), ) if segments.len() == 1 => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { ident: Ident::from_str_and_span( &format!("'{}", segments[0].ident), segments[0].ident.span, ), }; match self.parse_expr_labeled(label, false) { Ok(expr) => { type_err.cancel(); self.sess.emit_err(errors::MalformedLoopLabel { span: label.ident.span, correct_label: label.ident, }); return Ok(expr); } Err(err) => { err.cancel(); self.restore_snapshot(snapshot); } } } _ => {} } match self.parse_path(PathStyle::Expr) { Ok(path) => { let span_after_type = parser_snapshot_after_type.token.span; let expr = mk_expr( self, lhs, self.mk_ty(path.span, TyKind::Path(None, path.clone())), ); let args_span = self.look_ahead(1, |t| t.span).to(span_after_type); let suggestion = errors::ComparisonOrShiftInterpretedAsGenericSugg { left: expr.span.shrink_to_lo(), right: expr.span.shrink_to_hi(), }; match self.token.kind { token::Lt => { self.sess.emit_err(errors::ComparisonInterpretedAsGeneric { comparison: self.token.span, r#type: path, args: args_span, suggestion, }) } token::BinOp(token::Shl) => { self.sess.emit_err(errors::ShiftInterpretedAsGeneric { shift: self.token.span, r#type: path, args: args_span, suggestion, }) } _ => { // We can end up here even without `<` being the next token, for // example because `parse_ty_no_plus` returns `Err` on keywords, // but `parse_path` returns `Ok` on them due to error recovery. // Return original error and parser state. *self = parser_snapshot_after_type; return Err(type_err); } }; // Successfully parsed the type path leaving a `<` yet to parse. type_err.cancel(); // Keep `x as usize` as an expression in AST and continue parsing. expr } Err(path_err) => { // Couldn't parse as a path, return original error and parser state. path_err.cancel(); *self = parser_snapshot_after_type; return Err(type_err); } } } }; self.parse_and_disallow_postfix_after_cast(cast_expr) } /// Parses a postfix operators such as `.`, `?`, or index (`[]`) after a cast, /// then emits an error and returns the newly parsed tree. /// The resulting parse tree for `&x as T[0]` has a precedence of `((&x) as T)[0]`. fn parse_and_disallow_postfix_after_cast( &mut self, cast_expr: P, ) -> PResult<'a, P> { if let ExprKind::Type(_, _) = cast_expr.kind { panic!("ExprKind::Type must not be parsed"); } let span = cast_expr.span; let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?; // Check if an illegal postfix operator has been added after the cast. // If the resulting expression is not a cast, it is an illegal postfix operator. if !matches!(with_postfix.kind, ExprKind::Cast(_, _)) { let msg = format!( "cast cannot be followed by {}", match with_postfix.kind { ExprKind::Index(..) => "indexing", ExprKind::Try(_) => "`?`", ExprKind::Field(_, _) => "a field access", ExprKind::MethodCall(_) => "a method call", ExprKind::Call(_, _) => "a function call", ExprKind::Await(_, _) => "`.await`", ExprKind::Err => return Ok(with_postfix), _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"), } ); let mut err = self.struct_span_err(span, msg); let suggest_parens = |err: &mut Diagnostic| { let suggestions = vec![ (span.shrink_to_lo(), "(".to_string()), (span.shrink_to_hi(), ")".to_string()), ]; err.multipart_suggestion( "try surrounding the expression in parentheses", suggestions, Applicability::MachineApplicable, ); }; suggest_parens(&mut err); err.emit(); }; Ok(with_postfix) } /// Parse `& mut? ` or `& raw [ const | mut ] `. fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { self.expect_and()?; let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon); let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below. let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo); let expr = if self.token.is_range_separator() { self.parse_expr_prefix_range(None) } else { self.parse_expr_prefix(None) }?; let hi = self.interpolated_or_expr_span(&expr); let span = lo.to(hi); if let Some(lt) = lifetime { self.error_remove_borrow_lifetime(span, lt.ident.span); } Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) } fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) { self.sess.emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span }); } /// Parse `mut?` or `raw [ const | mut ]`. fn parse_borrow_modifiers(&mut self, lo: Span) -> (ast::BorrowKind, ast::Mutability) { if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) { // `raw [ const | mut ]`. let found_raw = self.eat_keyword(kw::Raw); assert!(found_raw); let mutability = self.parse_const_or_mut().unwrap(); self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span)); (ast::BorrowKind::Raw, mutability) } else { // `mut?` (ast::BorrowKind::Ref, self.parse_mutability()) } } /// Parses `a.b` or `a(13)` or `a[4]` or just `a`. fn parse_expr_dot_or_call(&mut self, attrs: Option) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(attrs)?; self.collect_tokens_for_expr(attrs, |this, attrs| { let base = this.parse_expr_bottom()?; let span = this.interpolated_or_expr_span(&base); this.parse_expr_dot_or_call_with(base, span, attrs) }) } pub(super) fn parse_expr_dot_or_call_with( &mut self, e0: P, lo: Span, mut attrs: ast::AttrVec, ) -> PResult<'a, P> { // Stitch the list of outer attributes onto the return value. // A little bit ugly, but the best way given the current code // structure let res = self.parse_expr_dot_or_call_with_(e0, lo); if attrs.is_empty() { res } else { res.map(|expr| { expr.map(|mut expr| { attrs.extend(expr.attrs); expr.attrs = attrs; expr }) }) } } fn parse_expr_dot_or_call_with_(&mut self, mut e: P, lo: Span) -> PResult<'a, P> { loop { let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { // we are using noexpect here because we don't expect a `?` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Question) } else { self.eat(&token::Question) }; if has_question { // `expr?` e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { // we are using noexpect here because we don't expect a `.` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Dot) } else { self.eat(&token::Dot) }; if has_dot { // expr.f e = self.parse_dot_suffix_expr(lo, e)?; continue; } if self.expr_is_complete(&e) { return Ok(e); } e = match self.token.kind { token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e), token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?, _ => return Ok(e), } } } fn parse_dot_suffix_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { match self.token.uninterpolate().kind { token::Ident(..) => self.parse_dot_suffix(base, lo), token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { Ok(self.parse_expr_tuple_field_access(lo, base, symbol, suffix, None)) } token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => { Ok(self.parse_expr_tuple_field_access_float(lo, base, symbol, suffix)) } _ => { self.error_unexpected_after_dot(); Ok(base) } } } fn error_unexpected_after_dot(&self) { let actual = pprust::token_to_string(&self.token); let span = self.token.span; let sm = self.sess.source_map(); let (span, actual) = match (&self.token.kind, self.subparser_name) { (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => (span.shrink_to_hi(), actual.into()), _ => (span, actual), }; self.sess.emit_err(errors::UnexpectedTokenAfterDot { span, actual }); } // We need an identifier or integer, but the next token is a float. // Break the float into components to extract the identifier or integer. // FIXME: With current `TokenCursor` it's hard to break tokens into more than 2 // parts unless those parts are processed immediately. `TokenCursor` should either // support pushing "future tokens" (would be also helpful to `break_and_eat`), or // we should break everything including floats into more basic proc-macro style // tokens in the lexer (probably preferable). // See also `TokenKind::break_two_token_op` which does similar splitting of `>>` into `>`. fn break_up_float(&mut self, float: Symbol) -> DestructuredFloat { #[derive(Debug)] enum FloatComponent { IdentLike(String), Punct(char), } use FloatComponent::*; let float_str = float.as_str(); let mut components = Vec::new(); let mut ident_like = String::new(); for c in float_str.chars() { if c == '_' || c.is_ascii_alphanumeric() { ident_like.push(c); } else if matches!(c, '.' | '+' | '-') { if !ident_like.is_empty() { components.push(IdentLike(mem::take(&mut ident_like))); } components.push(Punct(c)); } else { panic!("unexpected character in a float token: {c:?}") } } if !ident_like.is_empty() { components.push(IdentLike(ident_like)); } // With proc macros the span can refer to anything, the source may be too short, // or too long, or non-ASCII. It only makes sense to break our span into components // if its underlying text is identical to our float literal. let span = self.token.span; let can_take_span_apart = || self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref(); match &*components { // 1e2 [IdentLike(i)] => { DestructuredFloat::Single(Symbol::intern(&i), span) } // 1. [IdentLike(i), Punct('.')] => { let (ident_span, dot_span) = if can_take_span_apart() { let (span, ident_len) = (span.data(), BytePos::from_usize(i.len())); let ident_span = span.with_hi(span.lo + ident_len); let dot_span = span.with_lo(span.lo + ident_len); (ident_span, dot_span) } else { (span, span) }; let symbol = Symbol::intern(&i); DestructuredFloat::TrailingDot(symbol, ident_span, dot_span) } // 1.2 | 1.2e3 [IdentLike(i1), Punct('.'), IdentLike(i2)] => { let (ident1_span, dot_span, ident2_span) = if can_take_span_apart() { let (span, ident1_len) = (span.data(), BytePos::from_usize(i1.len())); let ident1_span = span.with_hi(span.lo + ident1_len); let dot_span = span .with_lo(span.lo + ident1_len) .with_hi(span.lo + ident1_len + BytePos(1)); let ident2_span = self.token.span.with_lo(span.lo + ident1_len + BytePos(1)); (ident1_span, dot_span, ident2_span) } else { (span, span, span) }; let symbol1 = Symbol::intern(&i1); let symbol2 = Symbol::intern(&i2); DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) } // 1e+ | 1e- (recovered) [IdentLike(_), Punct('+' | '-')] | // 1e+2 | 1e-2 [IdentLike(_), Punct('+' | '-'), IdentLike(_)] | // 1.2e+ | 1.2e- [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-')] | // 1.2e+3 | 1.2e-3 [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => { // See the FIXME about `TokenCursor` above. self.error_unexpected_after_dot(); DestructuredFloat::Error } _ => panic!("unexpected components in a float token: {components:?}"), } } fn parse_expr_tuple_field_access_float( &mut self, lo: Span, base: P, float: Symbol, suffix: Option, ) -> P { match self.break_up_float(float) { // 1e2 DestructuredFloat::Single(sym, _sp) => { self.parse_expr_tuple_field_access(lo, base, sym, suffix, None) } // 1. DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => { assert!(suffix.is_none()); self.token = Token::new(token::Ident(sym, false), ident_span); let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token)) } // 1.2 | 1.2e3 DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => { self.token = Token::new(token::Ident(symbol1, false), ident1_span); // This needs to be `Spacing::Alone` to prevent regressions. // See issue #76399 and PR #76285 for more details let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1)); let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None) } DestructuredFloat::Error => base, } } fn parse_field_name_maybe_tuple(&mut self) -> PResult<'a, ThinVec> { let token::Literal(token::Lit { kind: token::Float, symbol, suffix }) = self.token.kind else { return Ok(thin_vec![self.parse_field_name()?]); }; Ok(match self.break_up_float(symbol) { // 1e2 DestructuredFloat::Single(sym, sp) => { self.bump(); thin_vec![Ident::new(sym, sp)] } // 1. DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => { assert!(suffix.is_none()); // Analogous to `Self::break_and_eat` self.break_last_token = true; // This might work, in cases like `1. 2`, and might not, // in cases like `offset_of!(Ty, 1.)`. It depends on what comes // after the float-like token, and therefore we have to make // the other parts of the parser think that there is a dot literal. self.token = Token::new(token::Ident(sym, false), sym_span); self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing)); thin_vec![Ident::new(sym, sym_span)] } // 1.2 | 1.2e3 DestructuredFloat::MiddleDot(symbol1, ident1_span, _dot_span, symbol2, ident2_span) => { self.bump(); thin_vec![Ident::new(symbol1, ident1_span), Ident::new(symbol2, ident2_span)] } DestructuredFloat::Error => { self.bump(); thin_vec![Ident::new(symbol, self.prev_token.span)] } }) } fn parse_expr_tuple_field_access( &mut self, lo: Span, base: P, field: Symbol, suffix: Option, next_token: Option<(Token, Spacing)>, ) -> P { match next_token { Some(next_token) => self.bump_with(next_token), None => self.bump(), } let span = self.prev_token.span; let field = ExprKind::Field(base, Ident::new(field, span)); if let Some(suffix) = suffix { self.expect_no_tuple_index_suffix(span, suffix); } self.mk_expr(lo.to(span), field) } /// Parse a function call expression, `expr(...)`. fn parse_expr_fn_call(&mut self, lo: Span, fun: P) -> P { let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) { Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) } else { None }; let open_paren = self.token.span; let mut seq = self .parse_expr_paren_seq() .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args))); if let Some(expr) = self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot) { return expr; } self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq) } /// If we encounter a parser state that looks like the user has written a `struct` literal with /// parentheses instead of braces, recover the parser state and provide suggestions. #[instrument(skip(self, seq, snapshot), level = "trace")] fn maybe_recover_struct_lit_bad_delims( &mut self, lo: Span, open_paren: Span, seq: &mut PResult<'a, P>, snapshot: Option<(SnapshotParser<'a>, ExprKind)>, ) -> Option> { if !self.may_recover() { return None; } match (seq.as_mut(), snapshot) { (Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => { snapshot.bump(); // `(` match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) { Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) => { // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. self.restore_snapshot(snapshot); let close_paren = self.prev_token.span; let span = lo.to(close_paren); // filter shorthand fields let fields: Vec<_> = fields.into_iter().filter(|field| !field.is_shorthand).collect(); if !fields.is_empty() && // `token.kind` should not be compared here. // This is because the `snapshot.token.kind` is treated as the same as // that of the open delim in `TokenTreesReader::parse_token_tree`, even if they are different. self.span_to_snippet(close_paren).is_ok_and(|snippet| snippet == ")") { let mut replacement_err = errors::ParenthesesWithStructFields { span, r#type: path, braces_for_struct: errors::BracesForStructLiteral { first: open_paren, second: close_paren, }, no_fields_for_fn: errors::NoFieldsForFnCall { fields: fields .into_iter() .map(|field| field.span.until(field.expr.span)) .collect(), }, } .into_diagnostic(&self.sess.span_diagnostic); replacement_err.emit(); let old_err = mem::replace(err, replacement_err); old_err.cancel(); } else { err.emit(); } return Some(self.mk_expr_err(span)); } Ok(_) => {} Err(err) => err.cancel(), } } _ => {} } None } /// Parse an indexing expression `expr[...]`. fn parse_expr_index(&mut self, lo: Span, base: P) -> PResult<'a, P> { let prev_span = self.prev_token.span; let open_delim_span = self.token.span; self.bump(); // `[` let index = self.parse_expr()?; self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?; self.expect(&token::CloseDelim(Delimiter::Bracket))?; Ok(self.mk_expr( lo.to(self.prev_token.span), self.mk_index(base, index, open_delim_span.to(self.prev_token.span)), )) } /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await) { return Ok(self.mk_await_expr(self_arg, lo)); } let fn_span_lo = self.token.span; let mut seg = self.parse_path_segment(PathStyle::Expr, None)?; self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]); self.check_turbofish_missing_angle_brackets(&mut seg); if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { // Method call `expr.f()` let args = self.parse_expr_paren_seq()?; let fn_span = fn_span_lo.to(self.prev_token.span); let span = lo.to(self.prev_token.span); Ok(self.mk_expr( span, ExprKind::MethodCall(Box::new(ast::MethodCall { seg, receiver: self_arg, args, span: fn_span, })), )) } else { // Field access `expr.f` if let Some(args) = seg.args { self.sess.emit_err(errors::FieldExpressionWithGeneric(args.span())); } let span = lo.to(self.prev_token.span); Ok(self.mk_expr(span, ExprKind::Field(self_arg, seg.ident))) } } /// At the bottom (top?) of the precedence hierarchy, /// Parses things like parenthesized exprs, macros, `return`, etc. /// /// N.B., this does not parse outer attributes, and is private because it only works /// correctly if called from `parse_dot_or_call_expr()`. fn parse_expr_bottom(&mut self) -> PResult<'a, P> { maybe_recover_from_interpolated_ty_qpath!(self, true); maybe_whole_expr!(self); // Outer attributes are already parsed and will be // added to the return value after the fact. let restrictions = self.restrictions; self.with_res(restrictions - Restrictions::ALLOW_LET, |this| { // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. let lo = this.token.span; if let token::Literal(_) = this.token.kind { // This match arm is a special-case of the `_` match arm below and // could be removed without changing functionality, but it's faster // to have it here, especially for programs with large constants. this.parse_expr_lit() } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { this.parse_expr_tuple_parens(restrictions) } else if this.check(&token::OpenDelim(Delimiter::Brace)) { this.parse_expr_block(None, lo, BlockCheckMode::Default) } else if this.check(&token::BinOp(token::Or)) || this.check(&token::OrOr) { this.parse_expr_closure().map_err(|mut err| { // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }` // then suggest parens around the lhs. if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) { err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp)); } err }) } else if this.check(&token::OpenDelim(Delimiter::Bracket)) { this.parse_expr_array_or_repeat(Delimiter::Bracket) } else if this.is_builtin() { this.parse_expr_builtin() } else if this.check_path() { this.parse_expr_path_start() } else if this.check_keyword(kw::Move) || this.check_keyword(kw::Static) || this.check_const_closure() { this.parse_expr_closure() } else if this.eat_keyword(kw::If) { this.parse_expr_if() } else if this.check_keyword(kw::For) { if this.choose_generics_over_qpath(1) { this.parse_expr_closure() } else { assert!(this.eat_keyword(kw::For)); this.parse_expr_for(None, this.prev_token.span) } } else if this.eat_keyword(kw::While) { this.parse_expr_while(None, this.prev_token.span) } else if let Some(label) = this.eat_label() { this.parse_expr_labeled(label, true) } else if this.eat_keyword(kw::Loop) { let sp = this.prev_token.span; this.parse_expr_loop(None, this.prev_token.span).map_err(|mut err| { err.span_label(sp, "while parsing this `loop` expression"); err }) } else if this.eat_keyword(kw::Match) { let match_sp = this.prev_token.span; this.parse_expr_match().map_err(|mut err| { err.span_label(match_sp, "while parsing this `match` expression"); err }) } else if this.eat_keyword(kw::Unsafe) { let sp = this.prev_token.span; this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err( |mut err| { err.span_label(sp, "while parsing this `unsafe` expression"); err }, ) } else if this.check_inline_const(0) { this.parse_const_block(lo.to(this.token.span), false) } else if this.may_recover() && this.is_do_catch_block() { this.recover_do_catch() } else if this.is_try_block() { this.expect_keyword(kw::Try)?; this.parse_try_block(lo) } else if this.eat_keyword(kw::Return) { this.parse_expr_return() } else if this.eat_keyword(kw::Continue) { this.parse_expr_continue(lo) } else if this.eat_keyword(kw::Break) { this.parse_expr_break() } else if this.eat_keyword(kw::Yield) { this.parse_expr_yield() } else if this.is_do_yeet() { this.parse_expr_yeet() } else if this.eat_keyword(kw::Become) { this.parse_expr_become() } else if this.check_keyword(kw::Let) { this.parse_expr_let(restrictions) } else if this.eat_keyword(kw::Underscore) { Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore)) } else if this.token.uninterpolated_span().at_least_rust_2018() { // `Span:.at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. if this.check_keyword(kw::Async) { if this.is_async_block() { // Check for `async {` and `async move {`. this.parse_async_block() } else { this.parse_expr_closure() } } else if this.eat_keyword(kw::Await) { this.recover_incorrect_await_syntax(lo, this.prev_token.span) } else { this.parse_expr_lit() } } else { this.parse_expr_lit() } }) } fn parse_expr_lit(&mut self) -> PResult<'a, P> { let lo = self.token.span; match self.parse_opt_token_lit() { Some((token_lit, _)) => { let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit)); self.maybe_recover_from_bad_qpath(expr) } None => self.try_macro_suggestion(), } } fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P> { let lo = self.token.span; self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let (es, trailing_comma) = match self.parse_seq_to_end( &token::CloseDelim(Delimiter::Parenthesis), SeqSep::trailing_allowed(token::Comma), |p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)), ) { Ok(x) => x, Err(err) => { return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err))); } }; let kind = if es.len() == 1 && !trailing_comma { // `(e)` is parenthesized `e`. ExprKind::Paren(es.into_iter().next().unwrap()) } else { // `(e,)` is a tuple with only one field, `e`. ExprKind::Tup(es) }; let expr = self.mk_expr(lo.to(self.prev_token.span), kind); self.maybe_recover_from_bad_qpath(expr) } fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `[` or other open delim let close = &token::CloseDelim(close_delim); let kind = if self.eat(close) { // Empty vector ExprKind::Array(ThinVec::new()) } else { // Non-empty vector let first_expr = self.parse_expr()?; if self.eat(&token::Semi) { // Repeating array syntax: `[ 0; 512 ]` let count = self.parse_expr_anon_const()?; self.expect(close)?; ExprKind::Repeat(first_expr, count) } else if self.eat(&token::Comma) { // Vector with two or more elements. let sep = SeqSep::trailing_allowed(token::Comma); let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?; exprs.insert(0, first_expr); ExprKind::Array(exprs) } else { // Vector with one element self.expect(close)?; ExprKind::Array(thin_vec![first_expr]) } }; let expr = self.mk_expr(lo.to(self.prev_token.span), kind); self.maybe_recover_from_bad_qpath(expr) } fn parse_expr_path_start(&mut self) -> PResult<'a, P> { let maybe_eq_tok = self.prev_token.clone(); let (qself, path) = if self.eat_lt() { let lt_span = self.prev_token.span; let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| { // Suggests using '<=' if there is an error parsing qpath when the previous token // is an '=' token. Only emits suggestion if the '<' token and '=' token are // directly adjacent (i.e. '=<') if maybe_eq_tok.kind == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() { let eq_lt = maybe_eq_tok.span.to(lt_span); err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified); } err })?; (Some(qself), path) } else { (None, self.parse_path(PathStyle::Expr)?) }; // `!`, as an operator, is prefix, so we know this isn't that. let (span, kind) = if self.eat(&token::Not) { // MACRO INVOCATION expression if qself.is_some() { self.sess.emit_err(errors::MacroInvocationWithQualifiedPath(path.span)); } let lo = path.span; let mac = P(MacCall { path, args: self.parse_delim_args()?, }); (lo.to(self.prev_token.span), ExprKind::MacCall(mac)) } else if self.check(&token::OpenDelim(Delimiter::Brace)) && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) { if qself.is_some() { self.sess.gated_spans.gate(sym::more_qualified_paths, path.span); } return expr; } else { (path.span, ExprKind::Path(qself, path)) }; let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `'label: $expr`. The label is already parsed. pub(super) fn parse_expr_labeled( &mut self, label_: Label, mut consume_colon: bool, ) -> PResult<'a, P> { let lo = label_.ident.span; let label = Some(label_); let ate_colon = self.eat(&token::Colon); let expr = if self.eat_keyword(kw::While) { self.parse_expr_while(label, lo) } else if self.eat_keyword(kw::For) { self.parse_expr_for(label, lo) } else if self.eat_keyword(kw::Loop) { self.parse_expr_loop(label, lo) } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() { self.parse_expr_block(label, lo, BlockCheckMode::Default) } else if !ate_colon && self.may_recover() && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma) || self.token.is_op()) { let (lit, _) = self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| { self_.sess.create_err(errors::UnexpectedTokenAfterLabel { span: self_.token.span, remove_label: None, enclose_in_block: None, }) }); consume_colon = false; Ok(self.mk_expr(lo, ExprKind::Lit(lit))) } else if !ate_colon && (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt)) { // We're probably inside of a `Path<'a>` that needs a turbofish self.sess.emit_err(errors::UnexpectedTokenAfterLabel { span: self.token.span, remove_label: None, enclose_in_block: None, }); consume_colon = false; Ok(self.mk_expr_err(lo)) } else { let mut err = errors::UnexpectedTokenAfterLabel { span: self.token.span, remove_label: None, enclose_in_block: None, }; // Continue as an expression in an effort to recover on `'label: non_block_expr`. let expr = self.parse_expr().map(|expr| { let span = expr.span; let found_labeled_breaks = { struct FindLabeledBreaksVisitor(bool); impl<'ast> Visitor<'ast> for FindLabeledBreaksVisitor { fn visit_expr_post(&mut self, ex: &'ast Expr) { if let ExprKind::Break(Some(_label), _) = ex.kind { self.0 = true; } } } let mut vis = FindLabeledBreaksVisitor(false); vis.visit_expr(&expr); vis.0 }; // Suggestion involves adding a labeled block. // // If there are no breaks that may use this label, suggest removing the label and // recover to the unmodified expression. if !found_labeled_breaks { err.remove_label = Some(lo.until(span)); return expr; } err.enclose_in_block = Some(errors::UnexpectedTokenAfterLabelSugg { left: span.shrink_to_lo(), right: span.shrink_to_hi(), }); // Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`. let stmt = self.mk_stmt(span, StmtKind::Expr(expr)); let blk = self.mk_block(thin_vec![stmt], BlockCheckMode::Default, span); self.mk_expr(span, ExprKind::Block(blk, label)) }); self.sess.emit_err(err); expr }?; if !ate_colon && consume_colon { self.sess.emit_err(errors::RequireColonAfterLabeledExpression { span: expr.span, label: lo, label_end: lo.shrink_to_hi(), }); } Ok(expr) } /// Emit an error when a char is parsed as a lifetime because of a missing quote. pub(super) fn recover_unclosed_char( &self, lifetime: Ident, mk_lit_char: impl FnOnce(Symbol, Span) -> L, err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>, ) -> L { if let Some(mut diag) = self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar) { diag.span_suggestion_verbose( lifetime.span.shrink_to_hi(), "add `'` to close the char literal", "'", Applicability::MaybeIncorrect, ) .emit(); } else { err(self) .span_suggestion_verbose( lifetime.span.shrink_to_hi(), "add `'` to close the char literal", "'", Applicability::MaybeIncorrect, ) .emit(); } let name = lifetime.without_first_quote().name; mk_lit_char(name, lifetime.span) } /// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead. fn recover_do_catch(&mut self) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `do` self.bump(); // `catch` let span = lo.to(self.prev_token.span); self.sess.emit_err(errors::DoCatchSyntaxRemoved { span }); self.parse_try_block(lo) } /// Parse an expression if the token can begin one. fn parse_expr_opt(&mut self) -> PResult<'a, Option>> { Ok(if self.token.can_begin_expr() { Some(self.parse_expr()?) } else { None }) } /// Parse `"return" expr?`. fn parse_expr_return(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Ret(self.parse_expr_opt()?); let expr = self.mk_expr(lo.to(self.prev_token.span), kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"do" "yeet" expr?`. fn parse_expr_yeet(&mut self) -> PResult<'a, P> { let lo = self.token.span; self.bump(); // `do` self.bump(); // `yeet` let kind = ExprKind::Yeet(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); self.sess.gated_spans.gate(sym::yeet_expr, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"become" expr`, with `"become"` token already eaten. fn parse_expr_become(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Become(self.parse_expr()?); let span = lo.to(self.prev_token.span); self.sess.gated_spans.gate(sym::explicit_tail_calls, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten. /// If the label is followed immediately by a `:` token, the label and `:` are /// parsed as part of the expression (i.e. a labeled loop). The language team has /// decided in #87026 to require parentheses as a visual aid to avoid confusion if /// the break expression of an unlabeled break is a labeled loop (as in /// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value /// expression only gets a warning for compatibility reasons; and a labeled break /// with a labeled loop does not even get a warning because there is no ambiguity. fn parse_expr_break(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let mut label = self.eat_label(); let kind = if self.token == token::Colon && let Some(label) = label.take() { // The value expression can be a labeled loop, see issue #86948, e.g.: // `loop { break 'label: loop { break 'label 42; }; }` let lexpr = self.parse_expr_labeled(label, true)?; self.sess.emit_err(errors::LabeledLoopInBreak { span: lexpr.span, sub: errors::WrapExpressionInParentheses { left: lexpr.span.shrink_to_lo(), right: lexpr.span.shrink_to_hi(), }, }); Some(lexpr) } else if self.token != token::OpenDelim(Delimiter::Brace) || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) { let mut expr = self.parse_expr_opt()?; if let Some(expr) = &mut expr { if label.is_some() && matches!( expr.kind, ExprKind::While(_, _, None) | ExprKind::ForLoop(_, _, _, None) | ExprKind::Loop(_, None, _) | ExprKind::Block(_, None) ) { self.sess.buffer_lint_with_diagnostic( BREAK_WITH_LABEL_AND_LOOP, lo.to(expr.span), ast::CRATE_NODE_ID, "this labeled break expression is easy to confuse with an unlabeled break with a labeled value expression", BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span), ); } // Recover `break label aaaaa` if self.may_recover() && let ExprKind::Path(None, p) = &expr.kind && let [segment] = &*p.segments && let &ast::PathSegment { ident, args: None, .. } = segment && let Some(next) = self.parse_expr_opt()? { label = Some(self.recover_ident_into_label(ident)); *expr = next; } } expr } else { None }; let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind)); self.maybe_recover_from_bad_qpath(expr) } /// Parse `"continue" label?`. fn parse_expr_continue(&mut self, lo: Span) -> PResult<'a, P> { let mut label = self.eat_label(); // Recover `continue label` -> `continue 'label` if self.may_recover() && label.is_none() && let Some((ident, _)) = self.token.ident() { self.bump(); label = Some(self.recover_ident_into_label(ident)); } let kind = ExprKind::Continue(label); Ok(self.mk_expr(lo.to(self.prev_token.span), kind)) } /// Parse `"yield" expr?`. fn parse_expr_yield(&mut self) -> PResult<'a, P> { let lo = self.prev_token.span; let kind = ExprKind::Yield(self.parse_expr_opt()?); let span = lo.to(self.prev_token.span); self.sess.gated_spans.gate(sym::generators, span); let expr = self.mk_expr(span, kind); self.maybe_recover_from_bad_qpath(expr) } /// Parse `builtin # ident(args,*)`. fn parse_expr_builtin(&mut self) -> PResult<'a, P> { self.parse_builtin(|this, lo, ident| { if ident.name == sym::offset_of { return Ok(Some(this.parse_expr_offset_of(lo)?)); } Ok(None) }) } pub(crate) fn parse_builtin( &mut self, parse: impl FnOnce(&mut Parser<'a>, Span, Ident) -> PResult<'a, Option>, ) -> PResult<'a, T> { let lo = self.token.span; self.bump(); // `builtin` self.bump(); // `#` let Some((ident, false)) = self.token.ident() else { let err = errors::ExpectedBuiltinIdent { span: self.token.span } .into_diagnostic(&self.sess.span_diagnostic); return Err(err); }; self.sess.gated_spans.gate(sym::builtin_syntax, ident.span); self.bump(); self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?; let ret = if let Some(res) = parse(self, lo, ident)? { Ok(res) } else { let err = errors::UnknownBuiltinConstruct { span: lo.to(ident.span), name: ident.name } .into_diagnostic(&self.sess.span_diagnostic); return Err(err); }; self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?; ret } pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P> { let container = self.parse_ty()?; self.expect(&TokenKind::Comma)?; let seq_sep = SeqSep { sep: Some(token::Dot), trailing_sep_allowed: false }; let (fields, _trailing, _recovered) = self.parse_seq_to_before_end( &TokenKind::CloseDelim(Delimiter::Parenthesis), seq_sep, Parser::parse_field_name_maybe_tuple, )?; let fields = fields.into_iter().flatten().collect::>(); let span = lo.to(self.token.span); Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields.into()))) } /// Returns a string literal if the next token is a string literal. /// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind, /// and returns `None` if the next token is not literal at all. pub fn parse_str_lit(&mut self) -> Result> { match self.parse_opt_meta_item_lit() { Some(lit) => match lit.kind { ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit { style, symbol: lit.symbol, suffix: lit.suffix, span: lit.span, symbol_unescaped, }), _ => Err(Some(lit)), }, None => Err(None), } } pub(crate) fn mk_token_lit_char(name: Symbol, span: Span) -> (token::Lit, Span) { (token::Lit { symbol: name, suffix: None, kind: token::Char }, span) } fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit { ast::MetaItemLit { symbol: name, suffix: None, kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')), span, } } fn handle_missing_lit( &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { if let token::Interpolated(nt) = &self.token.kind && let token::NtExpr(e) | token::NtLiteral(e) = &**nt && matches!(e.kind, ExprKind::Err) { let mut err = errors::InvalidInterpolatedExpression { span: self.token.span } .into_diagnostic(&self.sess.span_diagnostic); err.downgrade_to_delayed_bug(); return Err(err); } let token = self.token.clone(); let err = |self_: &Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.struct_span_err(token.span, msg) }; // On an error path, eagerly consider a lifetime to be an unclosed character lit if self.token.is_lifetime() { let lt = self.expect_lifetime(); Ok(self.recover_unclosed_char(lt.ident, mk_lit_char, err)) } else { Err(err(self)) } } pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> { self.parse_opt_token_lit() .ok_or(()) .or_else(|()| self.handle_missing_lit(Parser::mk_token_lit_char)) } pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> { self.parse_opt_meta_item_lit() .ok_or(()) .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char)) } fn recover_after_dot(&mut self) -> Option { let mut recovered = None; if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where // dot would follow an optional literal, so we do this unconditionally. recovered = self.look_ahead(1, |next_token| { if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = next_token.kind { // If this integer looks like a float, then recover as such. // // We will never encounter the exponent part of a floating // point literal here, since there's no use of the exponent // syntax that also constitutes a valid integer, so we need // not check for that. if suffix.map_or(true, |s| s == sym::f32 || s == sym::f64) && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') && self.token.span.hi() == next_token.span.lo() { let s = String::from("0.") + symbol.as_str(); let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); return Some(Token::new(kind, self.token.span.to(next_token.span))); } } None }); if let Some(token) = &recovered { self.bump(); self.sess.emit_err(errors::FloatLiteralRequiresIntegerPart { span: token.span, correct: pprust::token_to_string(token).into_owned(), }); } } recovered } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { let recovered = self.recover_after_dot(); let token = recovered.as_ref().unwrap_or(&self.token); let span = token.span; token::Lit::from_token(token).map(|token_lit| { self.bump(); (token_lit, span) }) } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option { let recovered = self.recover_after_dot(); let token = recovered.as_ref().unwrap_or(&self.token); match token::Lit::from_token(token) { Some(lit) => { match MetaItemLit::from_token_lit(lit, token.span) { Ok(lit) => { self.bump(); Some(lit) } Err(err) => { let span = token.uninterpolated_span(); self.bump(); report_lit_error(&self.sess, err, lit, span); // Pack possible quotes and prefixes from the original literal into // the error literal's symbol so they can be pretty-printed faithfully. let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); let symbol = Symbol::intern(&suffixless_lit.to_string()); let lit = token::Lit::new(token::Err, symbol, lit.suffix); Some( MetaItemLit::from_token_lit(lit, span) .unwrap_or_else(|_| unreachable!()), ) } } } None => None, } } pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) { if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) { // #59553: warn instead of reject out of hand to allow the fix to percolate // through the ecosystem when people fix their macros self.sess.emit_warning(errors::InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: Some(()), }); } else { self.sess.emit_err(errors::InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None, }); } } /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). /// Keep this in sync with `Token::can_begin_literal_maybe_minus`. pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); let lo = self.token.span; let minus_present = self.eat(&token::BinOp(token::Minus)); let (token_lit, span) = self.parse_token_lit()?; let expr = self.mk_expr(span, ExprKind::Lit(token_lit)); if minus_present { Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr))) } else { Ok(expr) } } fn is_array_like_block(&mut self) -> bool { self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) && self.look_ahead(2, |t| t == &token::Comma) && self.look_ahead(3, |t| t.can_begin_expr()) } /// Emits a suggestion if it looks like the user meant an array but /// accidentally used braces, causing the code to be interpreted as a block /// expression. fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option> { let mut snapshot = self.create_snapshot_for_diagnostic(); match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) { Ok(arr) => { self.sess.emit_err(errors::ArrayBracketsInsteadOfSpaces { span: arr.span, sub: errors::ArrayBracketsInsteadOfSpacesSugg { left: lo, right: snapshot.prev_token.span, }, }); self.restore_snapshot(snapshot); Some(self.mk_expr_err(arr.span)) } Err(e) => { e.cancel(); None } } } fn suggest_missing_semicolon_before_array( &self, prev_span: Span, open_delim_span: Span, ) -> PResult<'a, ()> { if !self.may_recover() { return Ok(()); } if self.token.kind == token::Comma { if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) { return Ok(()); } let mut snapshot = self.create_snapshot_for_diagnostic(); snapshot.bump(); match snapshot.parse_seq_to_before_end( &token::CloseDelim(Delimiter::Bracket), SeqSep::trailing_allowed(token::Comma), |p| p.parse_expr(), ) { Ok(_) // When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`, // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`. // This is because the `token.kind` of the close delim is treated as the same as // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different. // Therefore, `token.kind` should not be compared here. if snapshot .span_to_snippet(snapshot.token.span) .is_ok_and(|snippet| snippet == "]") => { return Err(errors::MissingSemicolonBeforeArray { open_delim: open_delim_span, semicolon: prev_span.shrink_to_hi(), }.into_diagnostic(&self.sess.span_diagnostic)); } Ok(_) => (), Err(err) => err.cancel(), } } Ok(()) } /// Parses a block or unsafe block. pub(super) fn parse_expr_block( &mut self, opt_label: Option