From 698f8c2f01ea549d77d7dc3338a12e04c11057b9 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:02:58 +0200 Subject: Adding upstream version 1.64.0+dfsg1. Signed-off-by: Daniel Baumann --- compiler/rustc_ast_lowering/src/expr.rs | 1914 +++++++++++++++++++++++++++++++ 1 file changed, 1914 insertions(+) create mode 100644 compiler/rustc_ast_lowering/src/expr.rs (limited to 'compiler/rustc_ast_lowering/src/expr.rs') diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs new file mode 100644 index 000000000..fb6715ff1 --- /dev/null +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -0,0 +1,1914 @@ +use super::ResolverAstLoweringExt; +use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs}; +use crate::{FnDeclKind, ImplTraitPosition}; + +use rustc_ast::attr; +use rustc_ast::ptr::P as AstP; +use rustc_ast::*; +use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_data_structures::thin_vec::ThinVec; +use rustc_errors::struct_span_err; +use rustc_hir as hir; +use rustc_hir::def::Res; +use rustc_hir::definitions::DefPathData; +use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned}; +use rustc_span::symbol::{sym, Ident}; +use rustc_span::DUMMY_SP; + +impl<'hir> LoweringContext<'_, 'hir> { + fn lower_exprs(&mut self, exprs: &[AstP]) -> &'hir [hir::Expr<'hir>] { + self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x))) + } + + pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> { + self.arena.alloc(self.lower_expr_mut(e)) + } + + pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> { + ensure_sufficient_stack(|| { + let kind = match e.kind { + ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)), + ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), + ExprKind::ConstBlock(ref anon_const) => { + let anon_const = self.lower_anon_const(anon_const); + hir::ExprKind::ConstBlock(anon_const) + } + ExprKind::Repeat(ref expr, ref count) => { + let expr = self.lower_expr(expr); + let count = self.lower_array_length(count); + hir::ExprKind::Repeat(expr, count) + } + ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)), + ExprKind::Call(ref f, ref args) => { + if e.attrs.get(0).map_or(false, |a| a.has_name(sym::rustc_box)) { + if let [inner] = &args[..] && e.attrs.len() == 1 { + let kind = hir::ExprKind::Box(self.lower_expr(&inner)); + let hir_id = self.lower_node_id(e.id); + return hir::Expr { hir_id, kind, span: self.lower_span(e.span) }; + } else { + self.tcx.sess + .struct_span_err( + e.span, + "#[rustc_box] requires precisely one argument \ + and no other attributes are allowed", + ) + .emit(); + hir::ExprKind::Err + } + } else if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) { + self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args) + } else { + let f = self.lower_expr(f); + hir::ExprKind::Call(f, self.lower_exprs(args)) + } + } + ExprKind::MethodCall(ref seg, ref args, span) => { + let hir_seg = self.arena.alloc(self.lower_path_segment( + e.span, + seg, + ParamMode::Optional, + ParenthesizedGenericArgs::Err, + ImplTraitContext::Disallowed(ImplTraitPosition::Path), + )); + let args = self.lower_exprs(args); + hir::ExprKind::MethodCall(hir_seg, args, self.lower_span(span)) + } + ExprKind::Binary(binop, ref lhs, ref rhs) => { + let binop = self.lower_binop(binop); + let lhs = self.lower_expr(lhs); + let rhs = self.lower_expr(rhs); + hir::ExprKind::Binary(binop, lhs, rhs) + } + ExprKind::Unary(op, ref ohs) => { + let op = self.lower_unop(op); + let ohs = self.lower_expr(ohs); + hir::ExprKind::Unary(op, ohs) + } + ExprKind::Lit(ref l) => { + hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone())) + } + ExprKind::Cast(ref expr, ref ty) => { + let expr = self.lower_expr(expr); + let ty = + self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + hir::ExprKind::Cast(expr, ty) + } + ExprKind::Type(ref expr, ref ty) => { + let expr = self.lower_expr(expr); + let ty = + self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type)); + hir::ExprKind::Type(expr, ty) + } + ExprKind::AddrOf(k, m, ref ohs) => { + let ohs = self.lower_expr(ohs); + hir::ExprKind::AddrOf(k, m, ohs) + } + ExprKind::Let(ref pat, ref scrutinee, span) => { + hir::ExprKind::Let(self.arena.alloc(hir::Let { + hir_id: self.next_id(), + span: self.lower_span(span), + pat: self.lower_pat(pat), + ty: None, + init: self.lower_expr(scrutinee), + })) + } + ExprKind::If(ref cond, ref then, ref else_opt) => { + self.lower_expr_if(cond, then, else_opt.as_deref()) + } + ExprKind::While(ref cond, ref body, opt_label) => { + self.with_loop_scope(e.id, |this| { + let span = + this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None); + this.lower_expr_while_in_loop_scope(span, cond, body, opt_label) + }) + } + ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| { + hir::ExprKind::Loop( + this.lower_block(body, false), + this.lower_label(opt_label), + hir::LoopSource::Loop, + DUMMY_SP, + ) + }), + ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body), + ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match( + self.lower_expr(expr), + self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))), + hir::MatchSource::Normal, + ), + ExprKind::Async(capture_clause, closure_node_id, ref block) => self + .make_async_expr( + capture_clause, + closure_node_id, + None, + block.span, + hir::AsyncGeneratorKind::Block, + |this| this.with_new_scopes(|this| this.lower_block_expr(block)), + ), + ExprKind::Await(ref expr) => { + let span = if expr.span.hi() < e.span.hi() { + expr.span.shrink_to_hi().with_hi(e.span.hi()) + } else { + // this is a recovered `await expr` + e.span + }; + self.lower_expr_await(span, expr) + } + ExprKind::Closure( + ref binder, + capture_clause, + asyncness, + movability, + ref decl, + ref body, + fn_decl_span, + ) => { + if let Async::Yes { closure_id, .. } = asyncness { + self.lower_expr_async_closure( + binder, + capture_clause, + e.id, + closure_id, + decl, + body, + fn_decl_span, + ) + } else { + self.lower_expr_closure( + binder, + capture_clause, + e.id, + movability, + decl, + body, + fn_decl_span, + ) + } + } + ExprKind::Block(ref blk, opt_label) => { + let opt_label = self.lower_label(opt_label); + hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label) + } + ExprKind::Assign(ref el, ref er, span) => { + self.lower_expr_assign(el, er, span, e.span) + } + ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp( + self.lower_binop(op), + self.lower_expr(el), + self.lower_expr(er), + ), + ExprKind::Field(ref el, ident) => { + hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(ident)) + } + ExprKind::Index(ref el, ref er) => { + hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er)) + } + ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => { + self.lower_expr_range_closed(e.span, e1, e2) + } + ExprKind::Range(ref e1, ref e2, lims) => { + self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims) + } + ExprKind::Underscore => { + self.tcx + .sess.struct_span_err( + e.span, + "in expressions, `_` can only be used on the left-hand side of an assignment", + ) + .span_label(e.span, "`_` not allowed here") + .emit(); + hir::ExprKind::Err + } + ExprKind::Path(ref qself, ref path) => { + let qpath = self.lower_qpath( + e.id, + qself, + path, + ParamMode::Optional, + ImplTraitContext::Disallowed(ImplTraitPosition::Path), + ); + hir::ExprKind::Path(qpath) + } + ExprKind::Break(opt_label, ref opt_expr) => { + let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x)); + hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr) + } + ExprKind::Continue(opt_label) => { + hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label)) + } + ExprKind::Ret(ref e) => { + let e = e.as_ref().map(|x| self.lower_expr(x)); + hir::ExprKind::Ret(e) + } + ExprKind::Yeet(ref sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()), + ExprKind::InlineAsm(ref asm) => { + hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm)) + } + ExprKind::Struct(ref se) => { + let rest = match &se.rest { + StructRest::Base(e) => Some(self.lower_expr(e)), + StructRest::Rest(sp) => { + self.tcx + .sess + .struct_span_err(*sp, "base expression required after `..`") + .span_label(*sp, "add a base expression here") + .emit(); + Some(&*self.arena.alloc(self.expr_err(*sp))) + } + StructRest::None => None, + }; + hir::ExprKind::Struct( + self.arena.alloc(self.lower_qpath( + e.id, + &se.qself, + &se.path, + ParamMode::Optional, + ImplTraitContext::Disallowed(ImplTraitPosition::Path), + )), + self.arena + .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))), + rest, + ) + } + ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), + ExprKind::Err => hir::ExprKind::Err, + ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr), + ExprKind::Paren(ref ex) => { + let mut ex = self.lower_expr_mut(ex); + // Include parens in span, but only if it is a super-span. + if e.span.contains(ex.span) { + ex.span = self.lower_span(e.span); + } + // Merge attributes into the inner expression. + if !e.attrs.is_empty() { + let old_attrs = + self.attrs.get(&ex.hir_id.local_id).map(|la| *la).unwrap_or(&[]); + self.attrs.insert( + ex.hir_id.local_id, + &*self.arena.alloc_from_iter( + e.attrs + .iter() + .map(|a| self.lower_attr(a)) + .chain(old_attrs.iter().cloned()), + ), + ); + } + return ex; + } + + // Desugar `ExprForLoop` + // from: `[opt_ident]: for in ` + ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => { + return self.lower_expr_for(e, pat, head, body, opt_label); + } + ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span), + }; + + let hir_id = self.lower_node_id(e.id); + self.lower_attrs(hir_id, &e.attrs); + hir::Expr { hir_id, kind, span: self.lower_span(e.span) } + }) + } + + fn lower_unop(&mut self, u: UnOp) -> hir::UnOp { + match u { + UnOp::Deref => hir::UnOp::Deref, + UnOp::Not => hir::UnOp::Not, + UnOp::Neg => hir::UnOp::Neg, + } + } + + fn lower_binop(&mut self, b: BinOp) -> hir::BinOp { + Spanned { + node: match b.node { + BinOpKind::Add => hir::BinOpKind::Add, + BinOpKind::Sub => hir::BinOpKind::Sub, + BinOpKind::Mul => hir::BinOpKind::Mul, + BinOpKind::Div => hir::BinOpKind::Div, + BinOpKind::Rem => hir::BinOpKind::Rem, + BinOpKind::And => hir::BinOpKind::And, + BinOpKind::Or => hir::BinOpKind::Or, + BinOpKind::BitXor => hir::BinOpKind::BitXor, + BinOpKind::BitAnd => hir::BinOpKind::BitAnd, + BinOpKind::BitOr => hir::BinOpKind::BitOr, + BinOpKind::Shl => hir::BinOpKind::Shl, + BinOpKind::Shr => hir::BinOpKind::Shr, + BinOpKind::Eq => hir::BinOpKind::Eq, + BinOpKind::Lt => hir::BinOpKind::Lt, + BinOpKind::Le => hir::BinOpKind::Le, + BinOpKind::Ne => hir::BinOpKind::Ne, + BinOpKind::Ge => hir::BinOpKind::Ge, + BinOpKind::Gt => hir::BinOpKind::Gt, + }, + span: self.lower_span(b.span), + } + } + + fn lower_legacy_const_generics( + &mut self, + mut f: Expr, + args: Vec>, + legacy_args_idx: &[usize], + ) -> hir::ExprKind<'hir> { + let ExprKind::Path(None, ref mut path) = f.kind else { + unreachable!(); + }; + + // Split the arguments into const generics and normal arguments + let mut real_args = vec![]; + let mut generic_args = vec![]; + for (idx, arg) in args.into_iter().enumerate() { + if legacy_args_idx.contains(&idx) { + let parent_def_id = self.current_hir_id_owner; + let node_id = self.next_node_id(); + + // Add a definition for the in-band const def. + self.create_def(parent_def_id, node_id, DefPathData::AnonConst); + + let anon_const = AnonConst { id: node_id, value: arg }; + generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const))); + } else { + real_args.push(arg); + } + } + + // Add generic args to the last element of the path. + let last_segment = path.segments.last_mut().unwrap(); + assert!(last_segment.args.is_none()); + last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs { + span: DUMMY_SP, + args: generic_args, + }))); + + // Now lower everything as normal. + let f = self.lower_expr(&f); + hir::ExprKind::Call(f, self.lower_exprs(&real_args)) + } + + fn lower_expr_if( + &mut self, + cond: &Expr, + then: &Block, + else_opt: Option<&Expr>, + ) -> hir::ExprKind<'hir> { + let lowered_cond = self.lower_expr(cond); + let new_cond = self.manage_let_cond(lowered_cond); + let then_expr = self.lower_block_expr(then); + if let Some(rslt) = else_opt { + hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt))) + } else { + hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), None) + } + } + + // If `cond` kind is `let`, returns `let`. Otherwise, wraps and returns `cond` + // in a temporary block. + fn manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir> { + fn has_let_expr<'hir>(expr: &'hir hir::Expr<'hir>) -> bool { + match expr.kind { + hir::ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs), + hir::ExprKind::Let(..) => true, + _ => false, + } + } + if has_let_expr(cond) { + cond + } else { + let reason = DesugaringKind::CondTemporary; + let span_block = self.mark_span_with_reason(reason, cond.span, None); + self.expr_drop_temps(span_block, cond, AttrVec::new()) + } + } + + // We desugar: `'label: while $cond $body` into: + // + // ``` + // 'label: loop { + // if { let _t = $cond; _t } { + // $body + // } + // else { + // break; + // } + // } + // ``` + // + // Wrap in a construct equivalent to `{ let _t = $cond; _t }` + // to preserve drop semantics since `while $cond { ... }` does not + // let temporaries live outside of `cond`. + fn lower_expr_while_in_loop_scope( + &mut self, + span: Span, + cond: &Expr, + body: &Block, + opt_label: Option