use super::errors::{ AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks, BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignemnt, GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure, RustcBoxAttributeError, UnderscoreExprLhsAssign, }; use super::ResolverAstLoweringExt; use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs}; use crate::{FnDeclKind, ImplTraitPosition}; use rustc_ast::attr; use rustc_ast::ptr::P as AstP; use rustc_ast::*; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::definitions::DefPathData; use rustc_session::errors::report_lit_error; use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned}; use rustc_span::symbol::{sym, Ident}; use rustc_span::DUMMY_SP; use thin_vec::thin_vec; impl<'hir> LoweringContext<'_, 'hir> { fn lower_exprs(&mut self, exprs: &[AstP]) -> &'hir [hir::Expr<'hir>] { self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x))) } pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> { self.arena.alloc(self.lower_expr_mut(e)) } pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> { ensure_sufficient_stack(|| { match &e.kind { // Paranthesis expression does not have a HirId and is handled specially. ExprKind::Paren(ex) => { let mut ex = self.lower_expr_mut(ex); // Include parens in span, but only if it is a super-span. if e.span.contains(ex.span) { ex.span = self.lower_span(e.span); } // Merge attributes into the inner expression. if !e.attrs.is_empty() { let old_attrs = self.attrs.get(&ex.hir_id.local_id).map(|la| *la).unwrap_or(&[]); self.attrs.insert( ex.hir_id.local_id, &*self.arena.alloc_from_iter( e.attrs .iter() .map(|a| self.lower_attr(a)) .chain(old_attrs.iter().cloned()), ), ); } return ex; } // Desugar `ExprForLoop` // from: `[opt_ident]: for in ` // // This also needs special handling because the HirId of the returned `hir::Expr` will not // correspond to the `e.id`, so `lower_expr_for` handles attribute lowering itself. ExprKind::ForLoop(pat, head, body, opt_label) => { return self.lower_expr_for(e, pat, head, body, *opt_label); } _ => (), } let hir_id = self.lower_node_id(e.id); self.lower_attrs(hir_id, &e.attrs); let kind = match &e.kind { ExprKind::Box(inner) => hir::ExprKind::Box(self.lower_expr(inner)), ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), ExprKind::ConstBlock(anon_const) => { let anon_const = self.lower_anon_const(anon_const); hir::ExprKind::ConstBlock(anon_const) } ExprKind::Repeat(expr, count) => { let expr = self.lower_expr(expr); let count = self.lower_array_length(count); hir::ExprKind::Repeat(expr, count) } ExprKind::Tup(elts) => hir::ExprKind::Tup(self.lower_exprs(elts)), ExprKind::Call(f, args) => { if e.attrs.get(0).map_or(false, |a| a.has_name(sym::rustc_box)) { if let [inner] = &args[..] && e.attrs.len() == 1 { let kind = hir::ExprKind::Box(self.lower_expr(&inner)); return hir::Expr { hir_id, kind, span: self.lower_span(e.span) }; } else { self.tcx.sess.emit_err(RustcBoxAttributeError { span: e.span }); hir::ExprKind::Err } } else if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) { self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args) } else { let f = self.lower_expr(f); hir::ExprKind::Call(f, self.lower_exprs(args)) } } ExprKind::MethodCall(box MethodCall { seg, receiver, args, span }) => { let hir_seg = self.arena.alloc(self.lower_path_segment( e.span, seg, ParamMode::Optional, ParenthesizedGenericArgs::Err, &ImplTraitContext::Disallowed(ImplTraitPosition::Path), )); let receiver = self.lower_expr(receiver); let args = self.arena.alloc_from_iter(args.iter().map(|x| self.lower_expr_mut(x))); hir::ExprKind::MethodCall(hir_seg, receiver, args, self.lower_span(*span)) } ExprKind::Binary(binop, lhs, rhs) => { let binop = self.lower_binop(*binop); let lhs = self.lower_expr(lhs); let rhs = self.lower_expr(rhs); hir::ExprKind::Binary(binop, lhs, rhs) } ExprKind::Unary(op, ohs) => { let op = self.lower_unop(*op); let ohs = self.lower_expr(ohs); hir::ExprKind::Unary(op, ohs) } ExprKind::Lit(token_lit) => { let lit_kind = match LitKind::from_token_lit(*token_lit) { Ok(lit_kind) => lit_kind, Err(err) => { report_lit_error(&self.tcx.sess.parse_sess, err, *token_lit, e.span); LitKind::Err } }; hir::ExprKind::Lit(respan(self.lower_span(e.span), lit_kind)) } ExprKind::IncludedBytes(bytes) => hir::ExprKind::Lit(respan( self.lower_span(e.span), LitKind::ByteStr(bytes.clone(), StrStyle::Cooked), )), ExprKind::Cast(expr, ty) => { let expr = self.lower_expr(expr); let ty = self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type)); hir::ExprKind::Cast(expr, ty) } ExprKind::Type(expr, ty) => { let expr = self.lower_expr(expr); let ty = self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type)); hir::ExprKind::Type(expr, ty) } ExprKind::AddrOf(k, m, ohs) => { let ohs = self.lower_expr(ohs); hir::ExprKind::AddrOf(*k, *m, ohs) } ExprKind::Let(pat, scrutinee, span) => { hir::ExprKind::Let(self.arena.alloc(hir::Let { hir_id: self.next_id(), span: self.lower_span(*span), pat: self.lower_pat(pat), ty: None, init: self.lower_expr(scrutinee), })) } ExprKind::If(cond, then, else_opt) => { self.lower_expr_if(cond, then, else_opt.as_deref()) } ExprKind::While(cond, body, opt_label) => self.with_loop_scope(e.id, |this| { let span = this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None); this.lower_expr_while_in_loop_scope(span, cond, body, *opt_label) }), ExprKind::Loop(body, opt_label, span) => self.with_loop_scope(e.id, |this| { hir::ExprKind::Loop( this.lower_block(body, false), this.lower_label(*opt_label), hir::LoopSource::Loop, this.lower_span(*span), ) }), ExprKind::TryBlock(body) => self.lower_expr_try_block(body), ExprKind::Match(expr, arms) => hir::ExprKind::Match( self.lower_expr(expr), self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))), hir::MatchSource::Normal, ), ExprKind::Async(capture_clause, closure_node_id, block) => self.make_async_expr( *capture_clause, hir_id, *closure_node_id, None, e.span, hir::AsyncGeneratorKind::Block, |this| this.with_new_scopes(|this| this.lower_block_expr(block)), ), ExprKind::Await(expr) => { let dot_await_span = if expr.span.hi() < e.span.hi() { let span_with_whitespace = self .tcx .sess .source_map() .span_extend_while(expr.span, char::is_whitespace) .unwrap_or(expr.span); span_with_whitespace.shrink_to_hi().with_hi(e.span.hi()) } else { // this is a recovered `await expr` e.span }; self.lower_expr_await(dot_await_span, expr) } ExprKind::Closure(box Closure { binder, capture_clause, constness, asyncness, movability, fn_decl, body, fn_decl_span, fn_arg_span, }) => { if let Async::Yes { closure_id, .. } = asyncness { self.lower_expr_async_closure( binder, *capture_clause, e.id, hir_id, *closure_id, fn_decl, body, *fn_decl_span, *fn_arg_span, ) } else { self.lower_expr_closure( binder, *capture_clause, e.id, *constness, *movability, fn_decl, body, *fn_decl_span, *fn_arg_span, ) } } ExprKind::Block(blk, opt_label) => { let opt_label = self.lower_label(*opt_label); hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label) } ExprKind::Assign(el, er, span) => self.lower_expr_assign(el, er, *span, e.span), ExprKind::AssignOp(op, el, er) => hir::ExprKind::AssignOp( self.lower_binop(*op), self.lower_expr(el), self.lower_expr(er), ), ExprKind::Field(el, ident) => { hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(*ident)) } ExprKind::Index(el, er) => { hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er)) } ExprKind::Range(Some(e1), Some(e2), RangeLimits::Closed) => { self.lower_expr_range_closed(e.span, e1, e2) } ExprKind::Range(e1, e2, lims) => { self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), *lims) } ExprKind::Underscore => { self.tcx.sess.emit_err(UnderscoreExprLhsAssign { span: e.span }); hir::ExprKind::Err } ExprKind::Path(qself, path) => { let qpath = self.lower_qpath( e.id, qself, path, ParamMode::Optional, &ImplTraitContext::Disallowed(ImplTraitPosition::Path), ); hir::ExprKind::Path(qpath) } ExprKind::Break(opt_label, opt_expr) => { let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x)); hir::ExprKind::Break(self.lower_jump_destination(e.id, *opt_label), opt_expr) } ExprKind::Continue(opt_label) => { hir::ExprKind::Continue(self.lower_jump_destination(e.id, *opt_label)) } ExprKind::Ret(e) => { let e = e.as_ref().map(|x| self.lower_expr(x)); hir::ExprKind::Ret(e) } ExprKind::Yeet(sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()), ExprKind::InlineAsm(asm) => { hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm)) } ExprKind::Struct(se) => { let rest = match &se.rest { StructRest::Base(e) => Some(self.lower_expr(e)), StructRest::Rest(sp) => { self.tcx.sess.emit_err(BaseExpressionDoubleDot { span: *sp }); Some(&*self.arena.alloc(self.expr_err(*sp))) } StructRest::None => None, }; hir::ExprKind::Struct( self.arena.alloc(self.lower_qpath( e.id, &se.qself, &se.path, ParamMode::Optional, &ImplTraitContext::Disallowed(ImplTraitPosition::Path), )), self.arena .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))), rest, ) } ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), ExprKind::Err => hir::ExprKind::Err, ExprKind::Try(sub_expr) => self.lower_expr_try(e.span, sub_expr), ExprKind::Paren(_) | ExprKind::ForLoop(..) => unreachable!("already handled"), ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span), }; hir::Expr { hir_id, kind, span: self.lower_span(e.span) } }) } fn lower_unop(&mut self, u: UnOp) -> hir::UnOp { match u { UnOp::Deref => hir::UnOp::Deref, UnOp::Not => hir::UnOp::Not, UnOp::Neg => hir::UnOp::Neg, } } fn lower_binop(&mut self, b: BinOp) -> hir::BinOp { Spanned { node: match b.node { BinOpKind::Add => hir::BinOpKind::Add, BinOpKind::Sub => hir::BinOpKind::Sub, BinOpKind::Mul => hir::BinOpKind::Mul, BinOpKind::Div => hir::BinOpKind::Div, BinOpKind::Rem => hir::BinOpKind::Rem, BinOpKind::And => hir::BinOpKind::And, BinOpKind::Or => hir::BinOpKind::Or, BinOpKind::BitXor => hir::BinOpKind::BitXor, BinOpKind::BitAnd => hir::BinOpKind::BitAnd, BinOpKind::BitOr => hir::BinOpKind::BitOr, BinOpKind::Shl => hir::BinOpKind::Shl, BinOpKind::Shr => hir::BinOpKind::Shr, BinOpKind::Eq => hir::BinOpKind::Eq, BinOpKind::Lt => hir::BinOpKind::Lt, BinOpKind::Le => hir::BinOpKind::Le, BinOpKind::Ne => hir::BinOpKind::Ne, BinOpKind::Ge => hir::BinOpKind::Ge, BinOpKind::Gt => hir::BinOpKind::Gt, }, span: self.lower_span(b.span), } } fn lower_legacy_const_generics( &mut self, mut f: Expr, args: Vec>, legacy_args_idx: &[usize], ) -> hir::ExprKind<'hir> { let ExprKind::Path(None, path) = &mut f.kind else { unreachable!(); }; // Split the arguments into const generics and normal arguments let mut real_args = vec![]; let mut generic_args = vec![]; for (idx, arg) in args.into_iter().enumerate() { if legacy_args_idx.contains(&idx) { let parent_def_id = self.current_hir_id_owner; let node_id = self.next_node_id(); // Add a definition for the in-band const def. self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst, f.span); let anon_const = AnonConst { id: node_id, value: arg }; generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const))); } else { real_args.push(arg); } } // Add generic args to the last element of the path. let last_segment = path.segments.last_mut().unwrap(); assert!(last_segment.args.is_none()); last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs { span: DUMMY_SP, args: generic_args, }))); // Now lower everything as normal. let f = self.lower_expr(&f); hir::ExprKind::Call(f, self.lower_exprs(&real_args)) } fn lower_expr_if( &mut self, cond: &Expr, then: &Block, else_opt: Option<&Expr>, ) -> hir::ExprKind<'hir> { let lowered_cond = self.lower_cond(cond); let then_expr = self.lower_block_expr(then); if let Some(rslt) = else_opt { hir::ExprKind::If( lowered_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt)), ) } else { hir::ExprKind::If(lowered_cond, self.arena.alloc(then_expr), None) } } // Lowers a condition (i.e. `cond` in `if cond` or `while cond`), wrapping it in a terminating scope // so that temporaries created in the condition don't live beyond it. fn lower_cond(&mut self, cond: &Expr) -> &'hir hir::Expr<'hir> { fn has_let_expr(expr: &Expr) -> bool { match &expr.kind { ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs), ExprKind::Let(..) => true, _ => false, } } // We have to take special care for `let` exprs in the condition, e.g. in // `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the // condition in this case. // // In order to mantain the drop behavior for the non `let` parts of the condition, // we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially // gets transformed into `if { let _t = foo; _t } && let pat = val` match &cond.kind { ExprKind::Binary(op @ Spanned { node: ast::BinOpKind::And, .. }, lhs, rhs) if has_let_expr(cond) => { let op = self.lower_binop(*op); let lhs = self.lower_cond(lhs); let rhs = self.lower_cond(rhs); self.arena.alloc(self.expr(cond.span, hir::ExprKind::Binary(op, lhs, rhs))) } ExprKind::Let(..) => self.lower_expr(cond), _ => { let cond = self.lower_expr(cond); let reason = DesugaringKind::CondTemporary; let span_block = self.mark_span_with_reason(reason, cond.span, None); self.expr_drop_temps(span_block, cond) } } } // We desugar: `'label: while $cond $body` into: // // ``` // 'label: loop { // if { let _t = $cond; _t } { // $body // } // else { // break; // } // } // ``` // // Wrap in a construct equivalent to `{ let _t = $cond; _t }` // to preserve drop semantics since `while $cond { ... }` does not // let temporaries live outside of `cond`. fn lower_expr_while_in_loop_scope( &mut self, span: Span, cond: &Expr, body: &Block, opt_label: Option