diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:02:58 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:02:58 +0000 |
commit | 698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch) | |
tree | 173a775858bd501c378080a10dca74132f05bc50 /compiler/rustc_mir_build/src/build/expr | |
parent | Initial commit. (diff) | |
download | rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip |
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/as_constant.rs | 152 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/as_operand.rs | 184 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/as_place.rs | 820 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/as_rvalue.rs | 694 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/as_temp.rs | 119 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/category.rs | 92 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/into.rs | 599 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/mod.rs | 70 | ||||
-rw-r--r-- | compiler/rustc_mir_build/src/build/expr/stmt.rs | 149 |
9 files changed, 2879 insertions, 0 deletions
diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/build/expr/as_constant.rs new file mode 100644 index 000000000..648d10b9e --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/as_constant.rs @@ -0,0 +1,152 @@ +//! See docs in build/expr/mod.rs + +use crate::build::{parse_float_into_constval, Builder}; +use rustc_ast as ast; +use rustc_hir::def_id::DefId; +use rustc_middle::mir::interpret::{ + Allocation, ConstValue, LitToConstError, LitToConstInput, Scalar, +}; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::subst::SubstsRef; +use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, TyCtxt}; +use rustc_target::abi::Size; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr`, yielding a compile-time constant. Assumes that + /// `expr` is a valid compile-time constant! + pub(crate) fn as_constant(&mut self, expr: &Expr<'tcx>) -> Constant<'tcx> { + let create_uneval_from_def_id = + |tcx: TyCtxt<'tcx>, def_id: DefId, ty: Ty<'tcx>, substs: SubstsRef<'tcx>| { + let uneval = ty::Unevaluated::new(ty::WithOptConstParam::unknown(def_id), substs); + tcx.mk_const(ty::ConstS { kind: ty::ConstKind::Unevaluated(uneval), ty }) + }; + + let this = self; + let tcx = this.tcx; + let Expr { ty, temp_lifetime: _, span, ref kind } = *expr; + match *kind { + ExprKind::Scope { region_scope: _, lint_level: _, value } => { + this.as_constant(&this.thir[value]) + } + ExprKind::Literal { lit, neg } => { + let literal = + match lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg }) { + Ok(c) => c, + Err(LitToConstError::Reported) => ConstantKind::Ty(tcx.const_error(ty)), + Err(LitToConstError::TypeError) => { + bug!("encountered type error in `lit_to_mir_constant") + } + }; + + Constant { span, user_ty: None, literal } + } + ExprKind::NonHirLiteral { lit, user_ty } => { + let user_ty = user_ty.map(|user_ty| { + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span, + user_ty, + inferred_ty: ty, + }) + }); + let literal = ConstantKind::Val(ConstValue::Scalar(Scalar::Int(lit)), ty); + + Constant { span, user_ty: user_ty, literal } + } + ExprKind::ZstLiteral { user_ty } => { + let user_ty = user_ty.map(|user_ty| { + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span, + user_ty, + inferred_ty: ty, + }) + }); + let literal = ConstantKind::Val(ConstValue::ZeroSized, ty); + + Constant { span, user_ty: user_ty, literal } + } + ExprKind::NamedConst { def_id, substs, user_ty } => { + let user_ty = user_ty.map(|user_ty| { + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span, + user_ty, + inferred_ty: ty, + }) + }); + let literal = ConstantKind::Ty(create_uneval_from_def_id(tcx, def_id, ty, substs)); + + Constant { user_ty, span, literal } + } + ExprKind::ConstParam { param, def_id: _ } => { + let const_param = + tcx.mk_const(ty::ConstS { kind: ty::ConstKind::Param(param), ty: expr.ty }); + let literal = ConstantKind::Ty(const_param); + + Constant { user_ty: None, span, literal } + } + ExprKind::ConstBlock { did: def_id, substs } => { + let literal = ConstantKind::Ty(create_uneval_from_def_id(tcx, def_id, ty, substs)); + + Constant { user_ty: None, span, literal } + } + ExprKind::StaticRef { alloc_id, ty, .. } => { + let const_val = ConstValue::Scalar(Scalar::from_pointer(alloc_id.into(), &tcx)); + let literal = ConstantKind::Val(const_val, ty); + + Constant { span, user_ty: None, literal } + } + _ => span_bug!(span, "expression is not a valid constant {:?}", kind), + } + } +} + +#[instrument(skip(tcx, lit_input))] +pub(crate) fn lit_to_mir_constant<'tcx>( + tcx: TyCtxt<'tcx>, + lit_input: LitToConstInput<'tcx>, +) -> Result<ConstantKind<'tcx>, LitToConstError> { + let LitToConstInput { lit, ty, neg } = lit_input; + let trunc = |n| { + let param_ty = ty::ParamEnv::reveal_all().and(ty); + let width = tcx.layout_of(param_ty).map_err(|_| LitToConstError::Reported)?.size; + trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits()); + let result = width.truncate(n); + trace!("trunc result: {}", result); + Ok(ConstValue::Scalar(Scalar::from_uint(result, width))) + }; + + let value = match (lit, &ty.kind()) { + (ast::LitKind::Str(s, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_str() => { + let s = s.as_str(); + let allocation = Allocation::from_bytes_byte_aligned_immutable(s.as_bytes()); + let allocation = tcx.intern_const_alloc(allocation); + ConstValue::Slice { data: allocation, start: 0, end: s.len() } + } + (ast::LitKind::ByteStr(data), ty::Ref(_, inner_ty, _)) + if matches!(inner_ty.kind(), ty::Slice(_)) => + { + let allocation = Allocation::from_bytes_byte_aligned_immutable(data as &[u8]); + let allocation = tcx.intern_const_alloc(allocation); + ConstValue::Slice { data: allocation, start: 0, end: data.len() } + } + (ast::LitKind::ByteStr(data), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => { + let id = tcx.allocate_bytes(data); + ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx)) + } + (ast::LitKind::Byte(n), ty::Uint(ty::UintTy::U8)) => { + ConstValue::Scalar(Scalar::from_uint(*n, Size::from_bytes(1))) + } + (ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => { + trunc(if neg { (*n as i128).overflowing_neg().0 as u128 } else { *n })? + } + (ast::LitKind::Float(n, _), ty::Float(fty)) => { + parse_float_into_constval(*n, *fty, neg).ok_or(LitToConstError::Reported)? + } + (ast::LitKind::Bool(b), ty::Bool) => ConstValue::Scalar(Scalar::from_bool(*b)), + (ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)), + (ast::LitKind::Err(_), _) => return Err(LitToConstError::Reported), + _ => return Err(LitToConstError::TypeError), + }; + + Ok(ConstantKind::Val(value, ty)) +} diff --git a/compiler/rustc_mir_build/src/build/expr/as_operand.rs b/compiler/rustc_mir_build/src/build/expr/as_operand.rs new file mode 100644 index 000000000..e707c373f --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/as_operand.rs @@ -0,0 +1,184 @@ +//! See docs in build/expr/mod.rs + +use crate::build::expr::category::Category; +use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; +use rustc_middle::middle::region; +use rustc_middle::mir::*; +use rustc_middle::thir::*; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Returns an operand suitable for use until the end of the current + /// scope expression. + /// + /// The operand returned from this function will *not be valid* + /// after the current enclosing `ExprKind::Scope` has ended, so + /// please do *not* return it from functions to avoid bad + /// miscompiles. + pub(crate) fn as_local_operand( + &mut self, + block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<Operand<'tcx>> { + let local_scope = self.local_scope(); + self.as_operand(block, Some(local_scope), expr, None, NeedsTemporary::Maybe) + } + + /// Returns an operand suitable for use until the end of the current scope expression and + /// suitable also to be passed as function arguments. + /// + /// The operand returned from this function will *not be valid* after an ExprKind::Scope is + /// passed, so please do *not* return it from functions to avoid bad miscompiles. Returns an + /// operand suitable for use as a call argument. This is almost always equivalent to + /// `as_operand`, except for the particular case of passing values of (potentially) unsized + /// types "by value" (see details below). + /// + /// The operand returned from this function will *not be valid* + /// after the current enclosing `ExprKind::Scope` has ended, so + /// please do *not* return it from functions to avoid bad + /// miscompiles. + /// + /// # Parameters of unsized types + /// + /// We tweak the handling of parameters of unsized type slightly to avoid the need to create a + /// local variable of unsized type. For example, consider this program: + /// + /// ``` + /// #![feature(unsized_locals, unsized_fn_params)] + /// # use core::fmt::Debug; + /// fn foo(p: dyn Debug) { dbg!(p); } + /// + /// fn bar(box_p: Box<dyn Debug>) { foo(*box_p); } + /// ``` + /// + /// Ordinarily, for sized types, we would compile the call `foo(*p)` like so: + /// + /// ```ignore (illustrative) + /// let tmp0 = *box_p; // tmp0 would be the operand returned by this function call + /// foo(tmp0) + /// ``` + /// + /// But because the parameter to `foo` is of the unsized type `dyn Debug`, and because it is + /// being moved the deref of a box, we compile it slightly differently. The temporary `tmp0` + /// that we create *stores the entire box*, and the parameter to the call itself will be + /// `*tmp0`: + /// + /// ```ignore (illustrative) + /// let tmp0 = box_p; call foo(*tmp0) + /// ``` + /// + /// This way, the temporary `tmp0` that we create has type `Box<dyn Debug>`, which is sized. + /// The value passed to the call (`*tmp0`) still has the `dyn Debug` type -- but the way that + /// calls are compiled means that this parameter will be passed "by reference", meaning that we + /// will actually provide a pointer to the interior of the box, and not move the `dyn Debug` + /// value to the stack. + /// + /// See #68034 for more details. + pub(crate) fn as_local_call_operand( + &mut self, + block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<Operand<'tcx>> { + let local_scope = self.local_scope(); + self.as_call_operand(block, Some(local_scope), expr) + } + + /// Compile `expr` into a value that can be used as an operand. + /// If `expr` is a place like `x`, this will introduce a + /// temporary `tmp = x`, so that we capture the value of `x` at + /// this time. + /// + /// If we end up needing to create a temporary, then we will use + /// `local_info` as its `LocalInfo`, unless `as_temporary` + /// has already assigned it a non-`None` `LocalInfo`. + /// Normally, you should use `None` for `local_info` + /// + /// The operand is known to be live until the end of `scope`. + /// + /// Like `as_local_call_operand`, except that the argument will + /// not be valid once `scope` ends. + #[instrument(level = "debug", skip(self, scope))] + pub(crate) fn as_operand( + &mut self, + mut block: BasicBlock, + scope: Option<region::Scope>, + expr: &Expr<'tcx>, + local_info: Option<Box<LocalInfo<'tcx>>>, + needs_temporary: NeedsTemporary, + ) -> BlockAnd<Operand<'tcx>> { + let this = self; + + if let ExprKind::Scope { region_scope, lint_level, value } = expr.kind { + let source_info = this.source_info(expr.span); + let region_scope = (region_scope, source_info); + return this.in_scope(region_scope, lint_level, |this| { + this.as_operand(block, scope, &this.thir[value], local_info, needs_temporary) + }); + } + + let category = Category::of(&expr.kind).unwrap(); + debug!(?category, ?expr.kind); + match category { + Category::Constant if let NeedsTemporary::No = needs_temporary || !expr.ty.needs_drop(this.tcx, this.param_env) => { + let constant = this.as_constant(expr); + block.and(Operand::Constant(Box::new(constant))) + } + Category::Constant | Category::Place | Category::Rvalue(..) => { + let operand = unpack!(block = this.as_temp(block, scope, expr, Mutability::Mut)); + if this.local_decls[operand].local_info.is_none() { + this.local_decls[operand].local_info = local_info; + } + block.and(Operand::Move(Place::from(operand))) + } + } + } + + pub(crate) fn as_call_operand( + &mut self, + mut block: BasicBlock, + scope: Option<region::Scope>, + expr: &Expr<'tcx>, + ) -> BlockAnd<Operand<'tcx>> { + debug!("as_call_operand(block={:?}, expr={:?})", block, expr); + let this = self; + + if let ExprKind::Scope { region_scope, lint_level, value } = expr.kind { + let source_info = this.source_info(expr.span); + let region_scope = (region_scope, source_info); + return this.in_scope(region_scope, lint_level, |this| { + this.as_call_operand(block, scope, &this.thir[value]) + }); + } + + let tcx = this.tcx; + + if tcx.features().unsized_fn_params { + let ty = expr.ty; + let span = expr.span; + let param_env = this.param_env; + + if !ty.is_sized(tcx.at(span), param_env) { + // !sized means !copy, so this is an unsized move + assert!(!ty.is_copy_modulo_regions(tcx.at(span), param_env)); + + // As described above, detect the case where we are passing a value of unsized + // type, and that value is coming from the deref of a box. + if let ExprKind::Deref { arg } = expr.kind { + // Generate let tmp0 = arg0 + let operand = unpack!( + block = this.as_temp(block, scope, &this.thir[arg], Mutability::Mut) + ); + + // Return the operand *tmp0 to be used as the call argument + let place = Place { + local: operand, + projection: tcx.intern_place_elems(&[PlaceElem::Deref]), + }; + + return block.and(Operand::Move(place)); + } + } + } + + this.as_operand(block, scope, expr, None, NeedsTemporary::Maybe) + } +} diff --git a/compiler/rustc_mir_build/src/build/expr/as_place.rs b/compiler/rustc_mir_build/src/build/expr/as_place.rs new file mode 100644 index 000000000..0c06aad4e --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/as_place.rs @@ -0,0 +1,820 @@ +//! See docs in build/expr/mod.rs + +use crate::build::expr::category::Category; +use crate::build::ForGuard::{OutsideGuard, RefWithinGuard}; +use crate::build::{BlockAnd, BlockAndExtension, Builder}; +use rustc_hir::def_id::LocalDefId; +use rustc_middle::hir::place::Projection as HirProjection; +use rustc_middle::hir::place::ProjectionKind as HirProjectionKind; +use rustc_middle::middle::region; +use rustc_middle::mir::AssertKind::BoundsCheck; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::AdtDef; +use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, TyCtxt, Variance}; +use rustc_span::Span; +use rustc_target::abi::VariantIdx; + +use rustc_index::vec::Idx; + +use std::iter; + +/// The "outermost" place that holds this value. +#[derive(Copy, Clone, Debug, PartialEq)] +pub(crate) enum PlaceBase { + /// Denotes the start of a `Place`. + Local(Local), + + /// When building place for an expression within a closure, the place might start off a + /// captured path. When `capture_disjoint_fields` is enabled, we might not know the capture + /// index (within the desugared closure) of the captured path until most of the projections + /// are applied. We use `PlaceBase::Upvar` to keep track of the root variable off of which the + /// captured path starts, the closure the capture belongs to and the trait the closure + /// implements. + /// + /// Once we have figured out the capture index, we can convert the place builder to start from + /// `PlaceBase::Local`. + /// + /// Consider the following example + /// ```rust + /// let t = (((10, 10), 10), 10); + /// + /// let c = || { + /// println!("{}", t.0.0.0); + /// }; + /// ``` + /// Here the THIR expression for `t.0.0.0` will be something like + /// + /// ```ignore (illustrative) + /// * Field(0) + /// * Field(0) + /// * Field(0) + /// * UpvarRef(t) + /// ``` + /// + /// When `capture_disjoint_fields` is enabled, `t.0.0.0` is captured and we won't be able to + /// figure out that it is captured until all the `Field` projections are applied. + Upvar { + /// HirId of the upvar + var_hir_id: LocalVarId, + /// DefId of the closure + closure_def_id: LocalDefId, + /// The trait closure implements, `Fn`, `FnMut`, `FnOnce` + closure_kind: ty::ClosureKind, + }, +} + +/// `PlaceBuilder` is used to create places during MIR construction. It allows you to "build up" a +/// place by pushing more and more projections onto the end, and then convert the final set into a +/// place using the `into_place` method. +/// +/// This is used internally when building a place for an expression like `a.b.c`. The fields `b` +/// and `c` can be progressively pushed onto the place builder that is created when converting `a`. +#[derive(Clone, Debug, PartialEq)] +pub(crate) struct PlaceBuilder<'tcx> { + base: PlaceBase, + projection: Vec<PlaceElem<'tcx>>, +} + +/// Given a list of MIR projections, convert them to list of HIR ProjectionKind. +/// The projections are truncated to represent a path that might be captured by a +/// closure/generator. This implies the vector returned from this function doesn't contain +/// ProjectionElems `Downcast`, `ConstantIndex`, `Index`, or `Subslice` because those will never be +/// part of a path that is captured by a closure. We stop applying projections once we see the first +/// projection that isn't captured by a closure. +fn convert_to_hir_projections_and_truncate_for_capture<'tcx>( + mir_projections: &[PlaceElem<'tcx>], +) -> Vec<HirProjectionKind> { + let mut hir_projections = Vec::new(); + let mut variant = None; + + for mir_projection in mir_projections { + let hir_projection = match mir_projection { + ProjectionElem::Deref => HirProjectionKind::Deref, + ProjectionElem::Field(field, _) => { + let variant = variant.unwrap_or(VariantIdx::new(0)); + HirProjectionKind::Field(field.index() as u32, variant) + } + ProjectionElem::Downcast(.., idx) => { + // We don't expect to see multi-variant enums here, as earlier + // phases will have truncated them already. However, there can + // still be downcasts, thanks to single-variant enums. + // We keep track of VariantIdx so we can use this information + // if the next ProjectionElem is a Field. + variant = Some(*idx); + continue; + } + ProjectionElem::Index(..) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } => { + // We don't capture array-access projections. + // We can stop here as arrays are captured completely. + break; + } + }; + variant = None; + hir_projections.push(hir_projection); + } + + hir_projections +} + +/// Return true if the `proj_possible_ancestor` represents an ancestor path +/// to `proj_capture` or `proj_possible_ancestor` is same as `proj_capture`, +/// assuming they both start off of the same root variable. +/// +/// **Note:** It's the caller's responsibility to ensure that both lists of projections +/// start off of the same root variable. +/// +/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of +/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`. +/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`. +/// 2. Since we only look at the projections here function will return `bar.x` as an a valid +/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections +/// list are being applied to the same root variable. +fn is_ancestor_or_same_capture( + proj_possible_ancestor: &[HirProjectionKind], + proj_capture: &[HirProjectionKind], +) -> bool { + // We want to make sure `is_ancestor_or_same_capture("x.0.0", "x.0")` to return false. + // Therefore we can't just check if all projections are same in the zipped iterator below. + if proj_possible_ancestor.len() > proj_capture.len() { + return false; + } + + iter::zip(proj_possible_ancestor, proj_capture).all(|(a, b)| a == b) +} + +/// Computes the index of a capture within the desugared closure provided the closure's +/// `closure_min_captures` and the capture's index of the capture in the +/// `ty::MinCaptureList` of the root variable `var_hir_id`. +fn compute_capture_idx<'tcx>( + closure_min_captures: &ty::RootVariableMinCaptureList<'tcx>, + var_hir_id: LocalVarId, + root_var_idx: usize, +) -> usize { + let mut res = 0; + for (var_id, capture_list) in closure_min_captures { + if *var_id == var_hir_id.0 { + res += root_var_idx; + break; + } else { + res += capture_list.len(); + } + } + + res +} + +/// Given a closure, returns the index of a capture within the desugared closure struct and the +/// `ty::CapturedPlace` which is the ancestor of the Place represented using the `var_hir_id` +/// and `projection`. +/// +/// Note there will be at most one ancestor for any given Place. +/// +/// Returns None, when the ancestor is not found. +fn find_capture_matching_projections<'a, 'tcx>( + typeck_results: &'a ty::TypeckResults<'tcx>, + var_hir_id: LocalVarId, + closure_def_id: LocalDefId, + projections: &[PlaceElem<'tcx>], +) -> Option<(usize, &'a ty::CapturedPlace<'tcx>)> { + let closure_min_captures = typeck_results.closure_min_captures.get(&closure_def_id)?; + let root_variable_min_captures = closure_min_captures.get(&var_hir_id.0)?; + + let hir_projections = convert_to_hir_projections_and_truncate_for_capture(projections); + + // If an ancestor is found, `idx` is the index within the list of captured places + // for root variable `var_hir_id` and `capture` is the `ty::CapturedPlace` itself. + let (idx, capture) = root_variable_min_captures.iter().enumerate().find(|(_, capture)| { + let possible_ancestor_proj_kinds: Vec<_> = + capture.place.projections.iter().map(|proj| proj.kind).collect(); + is_ancestor_or_same_capture(&possible_ancestor_proj_kinds, &hir_projections) + })?; + + // Convert index to be from the perspective of the entire closure_min_captures map + // instead of just the root variable capture list + Some((compute_capture_idx(closure_min_captures, var_hir_id, idx), capture)) +} + +/// Takes a PlaceBuilder and resolves the upvar (if any) within it, so that the +/// `PlaceBuilder` now starts from `PlaceBase::Local`. +/// +/// Returns a Result with the error being the PlaceBuilder (`from_builder`) that was not found. +fn to_upvars_resolved_place_builder<'a, 'tcx>( + from_builder: PlaceBuilder<'tcx>, + tcx: TyCtxt<'tcx>, + typeck_results: &'a ty::TypeckResults<'tcx>, +) -> Result<PlaceBuilder<'tcx>, PlaceBuilder<'tcx>> { + match from_builder.base { + PlaceBase::Local(_) => Ok(from_builder), + PlaceBase::Upvar { var_hir_id, closure_def_id, closure_kind } => { + let mut upvar_resolved_place_builder = PlaceBuilder::from(ty::CAPTURE_STRUCT_LOCAL); + match closure_kind { + ty::ClosureKind::Fn | ty::ClosureKind::FnMut => { + upvar_resolved_place_builder = upvar_resolved_place_builder.deref(); + } + ty::ClosureKind::FnOnce => {} + } + + let Some((capture_index, capture)) = + find_capture_matching_projections( + typeck_results, + var_hir_id, + closure_def_id, + &from_builder.projection, + ) else { + let closure_span = tcx.def_span(closure_def_id); + if !enable_precise_capture(tcx, closure_span) { + bug!( + "No associated capture found for {:?}[{:#?}] even though \ + capture_disjoint_fields isn't enabled", + var_hir_id, + from_builder.projection + ) + } else { + debug!( + "No associated capture found for {:?}[{:#?}]", + var_hir_id, from_builder.projection, + ); + } + return Err(from_builder); + }; + + // We won't be building MIR if the closure wasn't local + let closure_hir_id = tcx.hir().local_def_id_to_hir_id(closure_def_id); + let closure_ty = typeck_results.node_type(closure_hir_id); + + let substs = match closure_ty.kind() { + ty::Closure(_, substs) => ty::UpvarSubsts::Closure(substs), + ty::Generator(_, substs, _) => ty::UpvarSubsts::Generator(substs), + _ => bug!("Lowering capture for non-closure type {:?}", closure_ty), + }; + + // Access the capture by accessing the field within the Closure struct. + // + // We must have inferred the capture types since we are building MIR, therefore + // it's safe to call `tuple_element_ty` and we can unwrap here because + // we know that the capture exists and is the `capture_index`-th capture. + let var_ty = substs.tupled_upvars_ty().tuple_fields()[capture_index]; + + upvar_resolved_place_builder = + upvar_resolved_place_builder.field(Field::new(capture_index), var_ty); + + // If the variable is captured via ByRef(Immutable/Mutable) Borrow, + // we need to deref it + upvar_resolved_place_builder = match capture.info.capture_kind { + ty::UpvarCapture::ByRef(_) => upvar_resolved_place_builder.deref(), + ty::UpvarCapture::ByValue => upvar_resolved_place_builder, + }; + + // We used some of the projections to build the capture itself, + // now we apply the remaining to the upvar resolved place. + let remaining_projections = strip_prefix( + capture.place.base_ty, + from_builder.projection, + &capture.place.projections, + ); + upvar_resolved_place_builder.projection.extend(remaining_projections); + + Ok(upvar_resolved_place_builder) + } + } +} + +/// Returns projections remaining after stripping an initial prefix of HIR +/// projections. +/// +/// Supports only HIR projection kinds that represent a path that might be +/// captured by a closure or a generator, i.e., an `Index` or a `Subslice` +/// projection kinds are unsupported. +fn strip_prefix<'tcx>( + mut base_ty: Ty<'tcx>, + projections: Vec<PlaceElem<'tcx>>, + prefix_projections: &[HirProjection<'tcx>], +) -> impl Iterator<Item = PlaceElem<'tcx>> { + let mut iter = projections.into_iter(); + for projection in prefix_projections { + match projection.kind { + HirProjectionKind::Deref => { + assert!(matches!(iter.next(), Some(ProjectionElem::Deref))); + } + HirProjectionKind::Field(..) => { + if base_ty.is_enum() { + assert!(matches!(iter.next(), Some(ProjectionElem::Downcast(..)))); + } + assert!(matches!(iter.next(), Some(ProjectionElem::Field(..)))); + } + HirProjectionKind::Index | HirProjectionKind::Subslice => { + bug!("unexpected projection kind: {:?}", projection); + } + } + base_ty = projection.ty; + } + iter +} + +impl<'tcx> PlaceBuilder<'tcx> { + pub(crate) fn into_place<'a>( + self, + tcx: TyCtxt<'tcx>, + typeck_results: &'a ty::TypeckResults<'tcx>, + ) -> Place<'tcx> { + if let PlaceBase::Local(local) = self.base { + Place { local, projection: tcx.intern_place_elems(&self.projection) } + } else { + self.expect_upvars_resolved(tcx, typeck_results).into_place(tcx, typeck_results) + } + } + + fn expect_upvars_resolved<'a>( + self, + tcx: TyCtxt<'tcx>, + typeck_results: &'a ty::TypeckResults<'tcx>, + ) -> PlaceBuilder<'tcx> { + to_upvars_resolved_place_builder(self, tcx, typeck_results).unwrap() + } + + /// Attempts to resolve the `PlaceBuilder`. + /// On success, it will return the resolved `PlaceBuilder`. + /// On failure, it will return itself. + /// + /// Upvars resolve may fail for a `PlaceBuilder` when attempting to + /// resolve a disjoint field whose root variable is not captured + /// (destructured assignments) or when attempting to resolve a root + /// variable (discriminant matching with only wildcard arm) that is + /// not captured. This can happen because the final mir that will be + /// generated doesn't require a read for this place. Failures will only + /// happen inside closures. + pub(crate) fn try_upvars_resolved<'a>( + self, + tcx: TyCtxt<'tcx>, + typeck_results: &'a ty::TypeckResults<'tcx>, + ) -> Result<PlaceBuilder<'tcx>, PlaceBuilder<'tcx>> { + to_upvars_resolved_place_builder(self, tcx, typeck_results) + } + + pub(crate) fn base(&self) -> PlaceBase { + self.base + } + + pub(crate) fn field(self, f: Field, ty: Ty<'tcx>) -> Self { + self.project(PlaceElem::Field(f, ty)) + } + + pub(crate) fn deref(self) -> Self { + self.project(PlaceElem::Deref) + } + + pub(crate) fn downcast(self, adt_def: AdtDef<'tcx>, variant_index: VariantIdx) -> Self { + self.project(PlaceElem::Downcast(Some(adt_def.variant(variant_index).name), variant_index)) + } + + fn index(self, index: Local) -> Self { + self.project(PlaceElem::Index(index)) + } + + pub(crate) fn project(mut self, elem: PlaceElem<'tcx>) -> Self { + self.projection.push(elem); + self + } +} + +impl<'tcx> From<Local> for PlaceBuilder<'tcx> { + fn from(local: Local) -> Self { + Self { base: PlaceBase::Local(local), projection: Vec::new() } + } +} + +impl<'tcx> From<PlaceBase> for PlaceBuilder<'tcx> { + fn from(base: PlaceBase) -> Self { + Self { base, projection: Vec::new() } + } +} + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr`, yielding a place that we can move from etc. + /// + /// WARNING: Any user code might: + /// * Invalidate any slice bounds checks performed. + /// * Change the address that this `Place` refers to. + /// * Modify the memory that this place refers to. + /// * Invalidate the memory that this place refers to, this will be caught + /// by borrow checking. + /// + /// Extra care is needed if any user code is allowed to run between calling + /// this method and using it, as is the case for `match` and index + /// expressions. + pub(crate) fn as_place( + &mut self, + mut block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<Place<'tcx>> { + let place_builder = unpack!(block = self.as_place_builder(block, expr)); + block.and(place_builder.into_place(self.tcx, self.typeck_results)) + } + + /// This is used when constructing a compound `Place`, so that we can avoid creating + /// intermediate `Place` values until we know the full set of projections. + pub(crate) fn as_place_builder( + &mut self, + block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + self.expr_as_place(block, expr, Mutability::Mut, None) + } + + /// Compile `expr`, yielding a place that we can move from etc. + /// Mutability note: The caller of this method promises only to read from the resulting + /// place. The place itself may or may not be mutable: + /// * If this expr is a place expr like a.b, then we will return that place. + /// * Otherwise, a temporary is created: in that event, it will be an immutable temporary. + pub(crate) fn as_read_only_place( + &mut self, + mut block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<Place<'tcx>> { + let place_builder = unpack!(block = self.as_read_only_place_builder(block, expr)); + block.and(place_builder.into_place(self.tcx, self.typeck_results)) + } + + /// This is used when constructing a compound `Place`, so that we can avoid creating + /// intermediate `Place` values until we know the full set of projections. + /// Mutability note: The caller of this method promises only to read from the resulting + /// place. The place itself may or may not be mutable: + /// * If this expr is a place expr like a.b, then we will return that place. + /// * Otherwise, a temporary is created: in that event, it will be an immutable temporary. + fn as_read_only_place_builder( + &mut self, + block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + self.expr_as_place(block, expr, Mutability::Not, None) + } + + fn expr_as_place( + &mut self, + mut block: BasicBlock, + expr: &Expr<'tcx>, + mutability: Mutability, + fake_borrow_temps: Option<&mut Vec<Local>>, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + debug!("expr_as_place(block={:?}, expr={:?}, mutability={:?})", block, expr, mutability); + + let this = self; + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + match expr.kind { + ExprKind::Scope { region_scope, lint_level, value } => { + this.in_scope((region_scope, source_info), lint_level, |this| { + this.expr_as_place(block, &this.thir[value], mutability, fake_borrow_temps) + }) + } + ExprKind::Field { lhs, variant_index, name } => { + let lhs = &this.thir[lhs]; + let mut place_builder = + unpack!(block = this.expr_as_place(block, lhs, mutability, fake_borrow_temps,)); + if let ty::Adt(adt_def, _) = lhs.ty.kind() { + if adt_def.is_enum() { + place_builder = place_builder.downcast(*adt_def, variant_index); + } + } + block.and(place_builder.field(name, expr.ty)) + } + ExprKind::Deref { arg } => { + let place_builder = unpack!( + block = + this.expr_as_place(block, &this.thir[arg], mutability, fake_borrow_temps,) + ); + block.and(place_builder.deref()) + } + ExprKind::Index { lhs, index } => this.lower_index_expression( + block, + &this.thir[lhs], + &this.thir[index], + mutability, + fake_borrow_temps, + expr.temp_lifetime, + expr_span, + source_info, + ), + ExprKind::UpvarRef { closure_def_id, var_hir_id } => { + this.lower_captured_upvar(block, closure_def_id.expect_local(), var_hir_id) + } + + ExprKind::VarRef { id } => { + let place_builder = if this.is_bound_var_in_guard(id) { + let index = this.var_local_id(id, RefWithinGuard); + PlaceBuilder::from(index).deref() + } else { + let index = this.var_local_id(id, OutsideGuard); + PlaceBuilder::from(index) + }; + block.and(place_builder) + } + + ExprKind::PlaceTypeAscription { source, user_ty } => { + let place_builder = unpack!( + block = this.expr_as_place( + block, + &this.thir[source], + mutability, + fake_borrow_temps, + ) + ); + if let Some(user_ty) = user_ty { + let annotation_index = + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span: source_info.span, + user_ty, + inferred_ty: expr.ty, + }); + + let place = place_builder.clone().into_place(this.tcx, this.typeck_results); + this.cfg.push( + block, + Statement { + source_info, + kind: StatementKind::AscribeUserType( + Box::new(( + place, + UserTypeProjection { base: annotation_index, projs: vec![] }, + )), + Variance::Invariant, + ), + }, + ); + } + block.and(place_builder) + } + ExprKind::ValueTypeAscription { source, user_ty } => { + let source = &this.thir[source]; + let temp = + unpack!(block = this.as_temp(block, source.temp_lifetime, source, mutability)); + if let Some(user_ty) = user_ty { + let annotation_index = + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span: source_info.span, + user_ty, + inferred_ty: expr.ty, + }); + this.cfg.push( + block, + Statement { + source_info, + kind: StatementKind::AscribeUserType( + Box::new(( + Place::from(temp), + UserTypeProjection { base: annotation_index, projs: vec![] }, + )), + Variance::Invariant, + ), + }, + ); + } + block.and(PlaceBuilder::from(temp)) + } + + ExprKind::Array { .. } + | ExprKind::Tuple { .. } + | ExprKind::Adt { .. } + | ExprKind::Closure { .. } + | ExprKind::Unary { .. } + | ExprKind::Binary { .. } + | ExprKind::LogicalOp { .. } + | ExprKind::Box { .. } + | ExprKind::Cast { .. } + | ExprKind::Use { .. } + | ExprKind::NeverToAny { .. } + | ExprKind::Pointer { .. } + | ExprKind::Repeat { .. } + | ExprKind::Borrow { .. } + | ExprKind::AddressOf { .. } + | ExprKind::Match { .. } + | ExprKind::If { .. } + | ExprKind::Loop { .. } + | ExprKind::Block { .. } + | ExprKind::Let { .. } + | ExprKind::Assign { .. } + | ExprKind::AssignOp { .. } + | ExprKind::Break { .. } + | ExprKind::Continue { .. } + | ExprKind::Return { .. } + | ExprKind::Literal { .. } + | ExprKind::NamedConst { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::ConstBlock { .. } + | ExprKind::StaticRef { .. } + | ExprKind::InlineAsm { .. } + | ExprKind::Yield { .. } + | ExprKind::ThreadLocalRef(_) + | ExprKind::Call { .. } => { + // these are not places, so we need to make a temporary. + debug_assert!(!matches!(Category::of(&expr.kind), Some(Category::Place))); + let temp = + unpack!(block = this.as_temp(block, expr.temp_lifetime, expr, mutability)); + block.and(PlaceBuilder::from(temp)) + } + } + } + + /// Lower a captured upvar. Note we might not know the actual capture index, + /// so we create a place starting from `PlaceBase::Upvar`, which will be resolved + /// once all projections that allow us to identify a capture have been applied. + fn lower_captured_upvar( + &mut self, + block: BasicBlock, + closure_def_id: LocalDefId, + var_hir_id: LocalVarId, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + let closure_ty = + self.typeck_results.node_type(self.tcx.hir().local_def_id_to_hir_id(closure_def_id)); + + let closure_kind = if let ty::Closure(_, closure_substs) = closure_ty.kind() { + self.infcx.closure_kind(closure_substs).unwrap() + } else { + // Generators are considered FnOnce. + ty::ClosureKind::FnOnce + }; + + block.and(PlaceBuilder::from(PlaceBase::Upvar { var_hir_id, closure_def_id, closure_kind })) + } + + /// Lower an index expression + /// + /// This has two complications; + /// + /// * We need to do a bounds check. + /// * We need to ensure that the bounds check can't be invalidated using an + /// expression like `x[1][{x = y; 2}]`. We use fake borrows here to ensure + /// that this is the case. + fn lower_index_expression( + &mut self, + mut block: BasicBlock, + base: &Expr<'tcx>, + index: &Expr<'tcx>, + mutability: Mutability, + fake_borrow_temps: Option<&mut Vec<Local>>, + temp_lifetime: Option<region::Scope>, + expr_span: Span, + source_info: SourceInfo, + ) -> BlockAnd<PlaceBuilder<'tcx>> { + let base_fake_borrow_temps = &mut Vec::new(); + let is_outermost_index = fake_borrow_temps.is_none(); + let fake_borrow_temps = fake_borrow_temps.unwrap_or(base_fake_borrow_temps); + + let mut base_place = + unpack!(block = self.expr_as_place(block, base, mutability, Some(fake_borrow_temps),)); + + // Making this a *fresh* temporary means we do not have to worry about + // the index changing later: Nothing will ever change this temporary. + // The "retagging" transformation (for Stacked Borrows) relies on this. + let idx = unpack!(block = self.as_temp(block, temp_lifetime, index, Mutability::Not,)); + + block = self.bounds_check(block, base_place.clone(), idx, expr_span, source_info); + + if is_outermost_index { + self.read_fake_borrows(block, fake_borrow_temps, source_info) + } else { + base_place = base_place.expect_upvars_resolved(self.tcx, self.typeck_results); + self.add_fake_borrows_of_base( + &base_place, + block, + fake_borrow_temps, + expr_span, + source_info, + ); + } + + block.and(base_place.index(idx)) + } + + fn bounds_check( + &mut self, + block: BasicBlock, + slice: PlaceBuilder<'tcx>, + index: Local, + expr_span: Span, + source_info: SourceInfo, + ) -> BasicBlock { + let usize_ty = self.tcx.types.usize; + let bool_ty = self.tcx.types.bool; + // bounds check: + let len = self.temp(usize_ty, expr_span); + let lt = self.temp(bool_ty, expr_span); + + // len = len(slice) + self.cfg.push_assign( + block, + source_info, + len, + Rvalue::Len(slice.into_place(self.tcx, self.typeck_results)), + ); + // lt = idx < len + self.cfg.push_assign( + block, + source_info, + lt, + Rvalue::BinaryOp( + BinOp::Lt, + Box::new((Operand::Copy(Place::from(index)), Operand::Copy(len))), + ), + ); + let msg = BoundsCheck { len: Operand::Move(len), index: Operand::Copy(Place::from(index)) }; + // assert!(lt, "...") + self.assert(block, Operand::Move(lt), true, msg, expr_span) + } + + fn add_fake_borrows_of_base( + &mut self, + base_place: &PlaceBuilder<'tcx>, + block: BasicBlock, + fake_borrow_temps: &mut Vec<Local>, + expr_span: Span, + source_info: SourceInfo, + ) { + let tcx = self.tcx; + let local = match base_place.base { + PlaceBase::Local(local) => local, + PlaceBase::Upvar { .. } => bug!("Expected PlacseBase::Local found Upvar"), + }; + + let place_ty = Place::ty_from(local, &base_place.projection, &self.local_decls, tcx); + if let ty::Slice(_) = place_ty.ty.kind() { + // We need to create fake borrows to ensure that the bounds + // check that we just did stays valid. Since we can't assign to + // unsized values, we only need to ensure that none of the + // pointers in the base place are modified. + for (idx, elem) in base_place.projection.iter().enumerate().rev() { + match elem { + ProjectionElem::Deref => { + let fake_borrow_deref_ty = Place::ty_from( + local, + &base_place.projection[..idx], + &self.local_decls, + tcx, + ) + .ty; + let fake_borrow_ty = + tcx.mk_imm_ref(tcx.lifetimes.re_erased, fake_borrow_deref_ty); + let fake_borrow_temp = + self.local_decls.push(LocalDecl::new(fake_borrow_ty, expr_span)); + let projection = tcx.intern_place_elems(&base_place.projection[..idx]); + self.cfg.push_assign( + block, + source_info, + fake_borrow_temp.into(), + Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Shallow, + Place { local, projection }, + ), + ); + fake_borrow_temps.push(fake_borrow_temp); + } + ProjectionElem::Index(_) => { + let index_ty = Place::ty_from( + local, + &base_place.projection[..idx], + &self.local_decls, + tcx, + ); + match index_ty.ty.kind() { + // The previous index expression has already + // done any index expressions needed here. + ty::Slice(_) => break, + ty::Array(..) => (), + _ => bug!("unexpected index base"), + } + } + ProjectionElem::Field(..) + | ProjectionElem::Downcast(..) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } => (), + } + } + } + } + + fn read_fake_borrows( + &mut self, + bb: BasicBlock, + fake_borrow_temps: &mut Vec<Local>, + source_info: SourceInfo, + ) { + // All indexes have been evaluated now, read all of the + // fake borrows so that they are live across those index + // expressions. + for temp in fake_borrow_temps { + self.cfg.push_fake_read(bb, source_info, FakeReadCause::ForIndex, Place::from(*temp)); + } + } +} + +/// Precise capture is enabled if the feature gate `capture_disjoint_fields` is enabled or if +/// user is using Rust Edition 2021 or higher. +fn enable_precise_capture(tcx: TyCtxt<'_>, closure_span: Span) -> bool { + tcx.features().capture_disjoint_fields || closure_span.rust_2021() +} diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs new file mode 100644 index 000000000..15f2d17c4 --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -0,0 +1,694 @@ +//! See docs in `build/expr/mod.rs`. + +use rustc_index::vec::Idx; +use rustc_middle::ty::util::IntTypeExt; + +use crate::build::expr::as_place::PlaceBase; +use crate::build::expr::category::{Category, RvalueFunc}; +use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; +use rustc_hir::lang_items::LangItem; +use rustc_middle::middle::region; +use rustc_middle::mir::AssertKind; +use rustc_middle::mir::Place; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::cast::CastTy; +use rustc_middle::ty::{self, Ty, UpvarSubsts}; +use rustc_span::Span; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Returns an rvalue suitable for use until the end of the current + /// scope expression. + /// + /// The operand returned from this function will *not be valid* after + /// an ExprKind::Scope is passed, so please do *not* return it from + /// functions to avoid bad miscompiles. + pub(crate) fn as_local_rvalue( + &mut self, + block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<Rvalue<'tcx>> { + let local_scope = self.local_scope(); + self.as_rvalue(block, Some(local_scope), expr) + } + + /// Compile `expr`, yielding an rvalue. + pub(crate) fn as_rvalue( + &mut self, + mut block: BasicBlock, + scope: Option<region::Scope>, + expr: &Expr<'tcx>, + ) -> BlockAnd<Rvalue<'tcx>> { + debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr); + + let this = self; + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + + match expr.kind { + ExprKind::ThreadLocalRef(did) => block.and(Rvalue::ThreadLocalRef(did)), + ExprKind::Scope { region_scope, lint_level, value } => { + let region_scope = (region_scope, source_info); + this.in_scope(region_scope, lint_level, |this| { + this.as_rvalue(block, scope, &this.thir[value]) + }) + } + ExprKind::Repeat { value, count } => { + if Some(0) == count.try_eval_usize(this.tcx, this.param_env) { + this.build_zero_repeat(block, value, scope, source_info) + } else { + let value_operand = unpack!( + block = this.as_operand( + block, + scope, + &this.thir[value], + None, + NeedsTemporary::No + ) + ); + block.and(Rvalue::Repeat(value_operand, count)) + } + } + ExprKind::Binary { op, lhs, rhs } => { + let lhs = unpack!( + block = + this.as_operand(block, scope, &this.thir[lhs], None, NeedsTemporary::Maybe) + ); + let rhs = unpack!( + block = + this.as_operand(block, scope, &this.thir[rhs], None, NeedsTemporary::No) + ); + this.build_binary_op(block, op, expr_span, expr.ty, lhs, rhs) + } + ExprKind::Unary { op, arg } => { + let arg = unpack!( + block = + this.as_operand(block, scope, &this.thir[arg], None, NeedsTemporary::No) + ); + // Check for -MIN on signed integers + if this.check_overflow && op == UnOp::Neg && expr.ty.is_signed() { + let bool_ty = this.tcx.types.bool; + + let minval = this.minval_literal(expr_span, expr.ty); + let is_min = this.temp(bool_ty, expr_span); + + this.cfg.push_assign( + block, + source_info, + is_min, + Rvalue::BinaryOp(BinOp::Eq, Box::new((arg.to_copy(), minval))), + ); + + block = this.assert( + block, + Operand::Move(is_min), + false, + AssertKind::OverflowNeg(arg.to_copy()), + expr_span, + ); + } + block.and(Rvalue::UnaryOp(op, arg)) + } + ExprKind::Box { value } => { + let value = &this.thir[value]; + let tcx = this.tcx; + + // `exchange_malloc` is unsafe but box is safe, so need a new scope. + let synth_scope = this.new_source_scope( + expr_span, + LintLevel::Inherited, + Some(Safety::BuiltinUnsafe), + ); + let synth_info = SourceInfo { span: expr_span, scope: synth_scope }; + + let size = this.temp(tcx.types.usize, expr_span); + this.cfg.push_assign( + block, + synth_info, + size, + Rvalue::NullaryOp(NullOp::SizeOf, value.ty), + ); + + let align = this.temp(tcx.types.usize, expr_span); + this.cfg.push_assign( + block, + synth_info, + align, + Rvalue::NullaryOp(NullOp::AlignOf, value.ty), + ); + + // malloc some memory of suitable size and align: + let exchange_malloc = Operand::function_handle( + tcx, + tcx.require_lang_item(LangItem::ExchangeMalloc, Some(expr_span)), + ty::List::empty(), + expr_span, + ); + let storage = this.temp(tcx.mk_mut_ptr(tcx.types.u8), expr_span); + let success = this.cfg.start_new_block(); + this.cfg.terminate( + block, + synth_info, + TerminatorKind::Call { + func: exchange_malloc, + args: vec![Operand::Move(size), Operand::Move(align)], + destination: storage, + target: Some(success), + cleanup: None, + from_hir_call: false, + fn_span: expr_span, + }, + ); + this.diverge_from(block); + block = success; + + // The `Box<T>` temporary created here is not a part of the HIR, + // and therefore is not considered during generator auto-trait + // determination. See the comment about `box` at `yield_in_scope`. + let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span).internal()); + this.cfg.push( + block, + Statement { source_info, kind: StatementKind::StorageLive(result) }, + ); + if let Some(scope) = scope { + // schedule a shallow free of that memory, lest we unwind: + this.schedule_drop_storage_and_value(expr_span, scope, result); + } + + // Transmute `*mut u8` to the box (thus far, uninitialized): + let box_ = Rvalue::ShallowInitBox(Operand::Move(storage), value.ty); + this.cfg.push_assign(block, source_info, Place::from(result), box_); + + // initialize the box contents: + unpack!( + block = this.expr_into_dest( + this.tcx.mk_place_deref(Place::from(result)), + block, + value + ) + ); + block.and(Rvalue::Use(Operand::Move(Place::from(result)))) + } + ExprKind::Cast { source } => { + let source = &this.thir[source]; + + // Casting an enum to an integer is equivalent to computing the discriminant and casting the + // discriminant. Previously every backend had to repeat the logic for this operation. Now we + // create all the steps directly in MIR with operations all backends need to support anyway. + let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind() && adt_def.is_enum() { + let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx); + let place = unpack!(block = this.as_place(block, source)); + let discr = this.temp(discr_ty, source.span); + this.cfg.push_assign( + block, + source_info, + discr, + Rvalue::Discriminant(place), + ); + + (Operand::Move(discr), discr_ty) + } else { + let ty = source.ty; + let source = unpack!( + block = this.as_operand(block, scope, source, None, NeedsTemporary::No) + ); + (source, ty) + }; + let from_ty = CastTy::from_ty(ty); + let cast_ty = CastTy::from_ty(expr.ty); + let cast_kind = match (from_ty, cast_ty) { + (Some(CastTy::Ptr(_) | CastTy::FnPtr), Some(CastTy::Int(_))) => { + CastKind::PointerExposeAddress + } + (Some(CastTy::Int(_)), Some(CastTy::Ptr(_))) => { + CastKind::PointerFromExposedAddress + } + (_, _) => CastKind::Misc, + }; + block.and(Rvalue::Cast(cast_kind, source, expr.ty)) + } + ExprKind::Pointer { cast, source } => { + let source = unpack!( + block = + this.as_operand(block, scope, &this.thir[source], None, NeedsTemporary::No) + ); + block.and(Rvalue::Cast(CastKind::Pointer(cast), source, expr.ty)) + } + ExprKind::Array { ref fields } => { + // (*) We would (maybe) be closer to codegen if we + // handled this and other aggregate cases via + // `into()`, not `as_rvalue` -- in that case, instead + // of generating + // + // let tmp1 = ...1; + // let tmp2 = ...2; + // dest = Rvalue::Aggregate(Foo, [tmp1, tmp2]) + // + // we could just generate + // + // dest.f = ...1; + // dest.g = ...2; + // + // The problem is that then we would need to: + // + // (a) have a more complex mechanism for handling + // partial cleanup; + // (b) distinguish the case where the type `Foo` has a + // destructor, in which case creating an instance + // as a whole "arms" the destructor, and you can't + // write individual fields; and, + // (c) handle the case where the type Foo has no + // fields. We don't want `let x: ();` to compile + // to the same MIR as `let x = ();`. + + // first process the set of fields + let el_ty = expr.ty.sequence_element_type(this.tcx); + let fields: Vec<_> = fields + .into_iter() + .copied() + .map(|f| { + unpack!( + block = this.as_operand( + block, + scope, + &this.thir[f], + None, + NeedsTemporary::Maybe + ) + ) + }) + .collect(); + + block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(el_ty)), fields)) + } + ExprKind::Tuple { ref fields } => { + // see (*) above + // first process the set of fields + let fields: Vec<_> = fields + .into_iter() + .copied() + .map(|f| { + unpack!( + block = this.as_operand( + block, + scope, + &this.thir[f], + None, + NeedsTemporary::Maybe + ) + ) + }) + .collect(); + + block.and(Rvalue::Aggregate(Box::new(AggregateKind::Tuple), fields)) + } + ExprKind::Closure { closure_id, substs, ref upvars, movability, ref fake_reads } => { + // Convert the closure fake reads, if any, from `ExprRef` to mir `Place` + // and push the fake reads. + // This must come before creating the operands. This is required in case + // there is a fake read and a borrow of the same path, since otherwise the + // fake read might interfere with the borrow. Consider an example like this + // one: + // ``` + // let mut x = 0; + // let c = || { + // &mut x; // mutable borrow of `x` + // match x { _ => () } // fake read of `x` + // }; + // ``` + // + for (thir_place, cause, hir_id) in fake_reads.into_iter() { + let place_builder = + unpack!(block = this.as_place_builder(block, &this.thir[*thir_place])); + + if let Ok(place_builder_resolved) = + place_builder.try_upvars_resolved(this.tcx, this.typeck_results) + { + let mir_place = + place_builder_resolved.into_place(this.tcx, this.typeck_results); + this.cfg.push_fake_read( + block, + this.source_info(this.tcx.hir().span(*hir_id)), + *cause, + mir_place, + ); + } + } + + // see (*) above + let operands: Vec<_> = upvars + .into_iter() + .copied() + .map(|upvar| { + let upvar = &this.thir[upvar]; + match Category::of(&upvar.kind) { + // Use as_place to avoid creating a temporary when + // moving a variable into a closure, so that + // borrowck knows which variables to mark as being + // used as mut. This is OK here because the upvar + // expressions have no side effects and act on + // disjoint places. + // This occurs when capturing by copy/move, while + // by reference captures use as_operand + Some(Category::Place) => { + let place = unpack!(block = this.as_place(block, upvar)); + this.consume_by_copy_or_move(place) + } + _ => { + // Turn mutable borrow captures into unique + // borrow captures when capturing an immutable + // variable. This is sound because the mutation + // that caused the capture will cause an error. + match upvar.kind { + ExprKind::Borrow { + borrow_kind: + BorrowKind::Mut { allow_two_phase_borrow: false }, + arg, + } => unpack!( + block = this.limit_capture_mutability( + upvar.span, + upvar.ty, + scope, + block, + &this.thir[arg], + ) + ), + _ => { + unpack!( + block = this.as_operand( + block, + scope, + upvar, + None, + NeedsTemporary::Maybe + ) + ) + } + } + } + } + }) + .collect(); + + let result = match substs { + UpvarSubsts::Generator(substs) => { + // We implicitly set the discriminant to 0. See + // librustc_mir/transform/deaggregator.rs for details. + let movability = movability.unwrap(); + Box::new(AggregateKind::Generator(closure_id, substs, movability)) + } + UpvarSubsts::Closure(substs) => { + Box::new(AggregateKind::Closure(closure_id, substs)) + } + }; + block.and(Rvalue::Aggregate(result, operands)) + } + ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => { + block = unpack!(this.stmt_expr(block, expr, None)); + block.and(Rvalue::Use(Operand::Constant(Box::new(Constant { + span: expr_span, + user_ty: None, + literal: ConstantKind::zero_sized(this.tcx.types.unit), + })))) + } + + ExprKind::Literal { .. } + | ExprKind::NamedConst { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::ConstBlock { .. } + | ExprKind::StaticRef { .. } => { + let constant = this.as_constant(expr); + block.and(Rvalue::Use(Operand::Constant(Box::new(constant)))) + } + + ExprKind::Yield { .. } + | ExprKind::Block { .. } + | ExprKind::Match { .. } + | ExprKind::If { .. } + | ExprKind::NeverToAny { .. } + | ExprKind::Use { .. } + | ExprKind::Borrow { .. } + | ExprKind::AddressOf { .. } + | ExprKind::Adt { .. } + | ExprKind::Loop { .. } + | ExprKind::LogicalOp { .. } + | ExprKind::Call { .. } + | ExprKind::Field { .. } + | ExprKind::Let { .. } + | ExprKind::Deref { .. } + | ExprKind::Index { .. } + | ExprKind::VarRef { .. } + | ExprKind::UpvarRef { .. } + | ExprKind::Break { .. } + | ExprKind::Continue { .. } + | ExprKind::Return { .. } + | ExprKind::InlineAsm { .. } + | ExprKind::PlaceTypeAscription { .. } + | ExprKind::ValueTypeAscription { .. } => { + // these do not have corresponding `Rvalue` variants, + // so make an operand and then return that + debug_assert!(!matches!( + Category::of(&expr.kind), + Some(Category::Rvalue(RvalueFunc::AsRvalue) | Category::Constant) + )); + let operand = + unpack!(block = this.as_operand(block, scope, expr, None, NeedsTemporary::No)); + block.and(Rvalue::Use(operand)) + } + } + } + + pub(crate) fn build_binary_op( + &mut self, + mut block: BasicBlock, + op: BinOp, + span: Span, + ty: Ty<'tcx>, + lhs: Operand<'tcx>, + rhs: Operand<'tcx>, + ) -> BlockAnd<Rvalue<'tcx>> { + let source_info = self.source_info(span); + let bool_ty = self.tcx.types.bool; + if self.check_overflow && op.is_checkable() && ty.is_integral() { + let result_tup = self.tcx.intern_tup(&[ty, bool_ty]); + let result_value = self.temp(result_tup, span); + + self.cfg.push_assign( + block, + source_info, + result_value, + Rvalue::CheckedBinaryOp(op, Box::new((lhs.to_copy(), rhs.to_copy()))), + ); + let val_fld = Field::new(0); + let of_fld = Field::new(1); + + let tcx = self.tcx; + let val = tcx.mk_place_field(result_value, val_fld, ty); + let of = tcx.mk_place_field(result_value, of_fld, bool_ty); + + let err = AssertKind::Overflow(op, lhs, rhs); + + block = self.assert(block, Operand::Move(of), false, err, span); + + block.and(Rvalue::Use(Operand::Move(val))) + } else { + if ty.is_integral() && (op == BinOp::Div || op == BinOp::Rem) { + // Checking division and remainder is more complex, since we 1. always check + // and 2. there are two possible failure cases, divide-by-zero and overflow. + + let zero_err = if op == BinOp::Div { + AssertKind::DivisionByZero(lhs.to_copy()) + } else { + AssertKind::RemainderByZero(lhs.to_copy()) + }; + let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy()); + + // Check for / 0 + let is_zero = self.temp(bool_ty, span); + let zero = self.zero_literal(span, ty); + self.cfg.push_assign( + block, + source_info, + is_zero, + Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), zero))), + ); + + block = self.assert(block, Operand::Move(is_zero), false, zero_err, span); + + // We only need to check for the overflow in one case: + // MIN / -1, and only for signed values. + if ty.is_signed() { + let neg_1 = self.neg_1_literal(span, ty); + let min = self.minval_literal(span, ty); + + let is_neg_1 = self.temp(bool_ty, span); + let is_min = self.temp(bool_ty, span); + let of = self.temp(bool_ty, span); + + // this does (rhs == -1) & (lhs == MIN). It could short-circuit instead + + self.cfg.push_assign( + block, + source_info, + is_neg_1, + Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), neg_1))), + ); + self.cfg.push_assign( + block, + source_info, + is_min, + Rvalue::BinaryOp(BinOp::Eq, Box::new((lhs.to_copy(), min))), + ); + + let is_neg_1 = Operand::Move(is_neg_1); + let is_min = Operand::Move(is_min); + self.cfg.push_assign( + block, + source_info, + of, + Rvalue::BinaryOp(BinOp::BitAnd, Box::new((is_neg_1, is_min))), + ); + + block = self.assert(block, Operand::Move(of), false, overflow_err, span); + } + } + + block.and(Rvalue::BinaryOp(op, Box::new((lhs, rhs)))) + } + } + + fn build_zero_repeat( + &mut self, + mut block: BasicBlock, + value: ExprId, + scope: Option<region::Scope>, + outer_source_info: SourceInfo, + ) -> BlockAnd<Rvalue<'tcx>> { + let this = self; + let value = &this.thir[value]; + let elem_ty = value.ty; + if let Some(Category::Constant) = Category::of(&value.kind) { + // Repeating a const does nothing + } else { + // For a non-const, we may need to generate an appropriate `Drop` + let value_operand = + unpack!(block = this.as_operand(block, scope, value, None, NeedsTemporary::No)); + if let Operand::Move(to_drop) = value_operand { + let success = this.cfg.start_new_block(); + this.cfg.terminate( + block, + outer_source_info, + TerminatorKind::Drop { place: to_drop, target: success, unwind: None }, + ); + this.diverge_from(block); + block = success; + } + this.record_operands_moved(&[value_operand]); + } + block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(elem_ty)), Vec::new())) + } + + fn limit_capture_mutability( + &mut self, + upvar_span: Span, + upvar_ty: Ty<'tcx>, + temp_lifetime: Option<region::Scope>, + mut block: BasicBlock, + arg: &Expr<'tcx>, + ) -> BlockAnd<Operand<'tcx>> { + let this = self; + + let source_info = this.source_info(upvar_span); + let temp = this.local_decls.push(LocalDecl::new(upvar_ty, upvar_span)); + + this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) }); + + let arg_place_builder = unpack!(block = this.as_place_builder(block, arg)); + + let mutability = match arg_place_builder.base() { + // We are capturing a path that starts off a local variable in the parent. + // The mutability of the current capture is same as the mutability + // of the local declaration in the parent. + PlaceBase::Local(local) => this.local_decls[local].mutability, + // Parent is a closure and we are capturing a path that is captured + // by the parent itself. The mutability of the current capture + // is same as that of the capture in the parent closure. + PlaceBase::Upvar { .. } => { + let enclosing_upvars_resolved = + arg_place_builder.clone().into_place(this.tcx, this.typeck_results); + + match enclosing_upvars_resolved.as_ref() { + PlaceRef { + local, + projection: &[ProjectionElem::Field(upvar_index, _), ..], + } + | PlaceRef { + local, + projection: + &[ProjectionElem::Deref, ProjectionElem::Field(upvar_index, _), ..], + } => { + // Not in a closure + debug_assert!( + local == ty::CAPTURE_STRUCT_LOCAL, + "Expected local to be Local(1), found {:?}", + local + ); + // Not in a closure + debug_assert!( + this.upvar_mutbls.len() > upvar_index.index(), + "Unexpected capture place, upvar_mutbls={:#?}, upvar_index={:?}", + this.upvar_mutbls, + upvar_index + ); + this.upvar_mutbls[upvar_index.index()] + } + _ => bug!("Unexpected capture place"), + } + } + }; + + let borrow_kind = match mutability { + Mutability::Not => BorrowKind::Unique, + Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false }, + }; + + let arg_place = arg_place_builder.into_place(this.tcx, this.typeck_results); + + this.cfg.push_assign( + block, + source_info, + Place::from(temp), + Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place), + ); + + // See the comment in `expr_as_temp` and on the `rvalue_scopes` field for why + // this can be `None`. + if let Some(temp_lifetime) = temp_lifetime { + this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp); + } + + block.and(Operand::Move(Place::from(temp))) + } + + // Helper to get a `-1` value of the appropriate type + fn neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> { + let param_ty = ty::ParamEnv::empty().and(ty); + let size = self.tcx.layout_of(param_ty).unwrap().size; + let literal = ConstantKind::from_bits(self.tcx, size.unsigned_int_max(), param_ty); + + self.literal_operand(span, literal) + } + + // Helper to get the minimum value of the appropriate type + fn minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> { + assert!(ty.is_signed()); + let param_ty = ty::ParamEnv::empty().and(ty); + let bits = self.tcx.layout_of(param_ty).unwrap().size.bits(); + let n = 1 << (bits - 1); + let literal = ConstantKind::from_bits(self.tcx, n, param_ty); + + self.literal_operand(span, literal) + } +} diff --git a/compiler/rustc_mir_build/src/build/expr/as_temp.rs b/compiler/rustc_mir_build/src/build/expr/as_temp.rs new file mode 100644 index 000000000..724b72f87 --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/as_temp.rs @@ -0,0 +1,119 @@ +//! See docs in build/expr/mod.rs + +use crate::build::scope::DropKind; +use crate::build::{BlockAnd, BlockAndExtension, Builder}; +use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_middle::middle::region; +use rustc_middle::mir::*; +use rustc_middle::thir::*; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr` into a fresh temporary. This is used when building + /// up rvalues so as to freeze the value that will be consumed. + pub(crate) fn as_temp( + &mut self, + block: BasicBlock, + temp_lifetime: Option<region::Scope>, + expr: &Expr<'tcx>, + mutability: Mutability, + ) -> BlockAnd<Local> { + // this is the only place in mir building that we need to truly need to worry about + // infinite recursion. Everything else does recurse, too, but it always gets broken up + // at some point by inserting an intermediate temporary + ensure_sufficient_stack(|| self.as_temp_inner(block, temp_lifetime, expr, mutability)) + } + + fn as_temp_inner( + &mut self, + mut block: BasicBlock, + temp_lifetime: Option<region::Scope>, + expr: &Expr<'tcx>, + mutability: Mutability, + ) -> BlockAnd<Local> { + debug!( + "as_temp(block={:?}, temp_lifetime={:?}, expr={:?}, mutability={:?})", + block, temp_lifetime, expr, mutability + ); + let this = self; + + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + if let ExprKind::Scope { region_scope, lint_level, value } = expr.kind { + return this.in_scope((region_scope, source_info), lint_level, |this| { + this.as_temp(block, temp_lifetime, &this.thir[value], mutability) + }); + } + + let expr_ty = expr.ty; + let temp = { + let mut local_decl = LocalDecl::new(expr_ty, expr_span); + if mutability == Mutability::Not { + local_decl = local_decl.immutable(); + } + + debug!("creating temp {:?} with block_context: {:?}", local_decl, this.block_context); + // Find out whether this temp is being created within the + // tail expression of a block whose result is ignored. + if let Some(tail_info) = this.block_context.currently_in_block_tail() { + local_decl = local_decl.block_tail(tail_info); + } + match expr.kind { + ExprKind::StaticRef { def_id, .. } => { + assert!(!this.tcx.is_thread_local_static(def_id)); + local_decl.internal = true; + local_decl.local_info = + Some(Box::new(LocalInfo::StaticRef { def_id, is_thread_local: false })); + } + ExprKind::ThreadLocalRef(def_id) => { + assert!(this.tcx.is_thread_local_static(def_id)); + local_decl.internal = true; + local_decl.local_info = + Some(Box::new(LocalInfo::StaticRef { def_id, is_thread_local: true })); + } + ExprKind::NamedConst { def_id, .. } | ExprKind::ConstParam { def_id, .. } => { + local_decl.local_info = Some(Box::new(LocalInfo::ConstRef { def_id })); + } + _ => {} + } + this.local_decls.push(local_decl) + }; + let temp_place = Place::from(temp); + + match expr.kind { + // Don't bother with StorageLive and Dead for these temporaries, + // they are never assigned. + ExprKind::Break { .. } | ExprKind::Continue { .. } | ExprKind::Return { .. } => (), + ExprKind::Block { body: Block { expr: None, targeted_by_break: false, .. } } + if expr_ty.is_never() => {} + _ => { + this.cfg + .push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) }); + + // In constants, `temp_lifetime` is `None` for temporaries that + // live for the `'static` lifetime. Thus we do not drop these + // temporaries and simply leak them. + // This is equivalent to what `let x = &foo();` does in + // functions. The temporary is lifted to their surrounding + // scope. In a function that means the temporary lives until + // just before the function returns. In constants that means it + // outlives the constant's initialization value computation. + // Anything outliving a constant must have the `'static` + // lifetime and live forever. + // Anything with a shorter lifetime (e.g the `&foo()` in + // `bar(&foo())` or anything within a block will keep the + // regular drops just like runtime code. + if let Some(temp_lifetime) = temp_lifetime { + this.schedule_drop(expr_span, temp_lifetime, temp, DropKind::Storage); + } + } + } + + unpack!(block = this.expr_into_dest(temp_place, block, expr)); + + if let Some(temp_lifetime) = temp_lifetime { + this.schedule_drop(expr_span, temp_lifetime, temp, DropKind::Value); + } + + block.and(temp) + } +} diff --git a/compiler/rustc_mir_build/src/build/expr/category.rs b/compiler/rustc_mir_build/src/build/expr/category.rs new file mode 100644 index 000000000..a4386319d --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/category.rs @@ -0,0 +1,92 @@ +use rustc_middle::thir::*; + +#[derive(Debug, PartialEq)] +pub(crate) enum Category { + // An assignable memory location like `x`, `x.f`, `foo()[3]`, that + // sort of thing. Something that could appear on the LHS of an `=` + // sign. + Place, + + // A literal like `23` or `"foo"`. Does not include constant + // expressions like `3 + 5`. + Constant, + + // Something that generates a new value at runtime, like `x + y` + // or `foo()`. + Rvalue(RvalueFunc), +} + +// Rvalues fall into different "styles" that will determine which fn +// is best suited to generate them. +#[derive(Debug, PartialEq)] +pub(crate) enum RvalueFunc { + // Best generated by `into`. This is generally exprs that + // cause branching, like `match`, but also includes calls. + Into, + + // Best generated by `as_rvalue`. This is usually the case. + AsRvalue, +} + +/// Determines the category for a given expression. Note that scope +/// and paren expressions have no category. +impl Category { + pub(crate) fn of(ek: &ExprKind<'_>) -> Option<Category> { + match *ek { + ExprKind::Scope { .. } => None, + + ExprKind::Field { .. } + | ExprKind::Deref { .. } + | ExprKind::Index { .. } + | ExprKind::UpvarRef { .. } + | ExprKind::VarRef { .. } + | ExprKind::PlaceTypeAscription { .. } + | ExprKind::ValueTypeAscription { .. } => Some(Category::Place), + + ExprKind::LogicalOp { .. } + | ExprKind::Match { .. } + | ExprKind::If { .. } + | ExprKind::Let { .. } + | ExprKind::NeverToAny { .. } + | ExprKind::Use { .. } + | ExprKind::Adt { .. } + | ExprKind::Borrow { .. } + | ExprKind::AddressOf { .. } + | ExprKind::Yield { .. } + | ExprKind::Call { .. } + | ExprKind::InlineAsm { .. } => Some(Category::Rvalue(RvalueFunc::Into)), + + ExprKind::Array { .. } + | ExprKind::Tuple { .. } + | ExprKind::Closure { .. } + | ExprKind::Unary { .. } + | ExprKind::Binary { .. } + | ExprKind::Box { .. } + | ExprKind::Cast { .. } + | ExprKind::Pointer { .. } + | ExprKind::Repeat { .. } + | ExprKind::Assign { .. } + | ExprKind::AssignOp { .. } + | ExprKind::ThreadLocalRef(_) => Some(Category::Rvalue(RvalueFunc::AsRvalue)), + + ExprKind::ConstBlock { .. } + | ExprKind::Literal { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::StaticRef { .. } + | ExprKind::NamedConst { .. } => Some(Category::Constant), + + ExprKind::Loop { .. } + | ExprKind::Block { .. } + | ExprKind::Break { .. } + | ExprKind::Continue { .. } + | ExprKind::Return { .. } => + // FIXME(#27840) these probably want their own + // category, like "nonterminating" + { + Some(Category::Rvalue(RvalueFunc::Into)) + } + } + } +} diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs new file mode 100644 index 000000000..017d43d10 --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/into.rs @@ -0,0 +1,599 @@ +//! See docs in build/expr/mod.rs + +use crate::build::expr::category::{Category, RvalueFunc}; +use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, NeedsTemporary}; +use rustc_ast::InlineAsmOptions; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::stack::ensure_sufficient_stack; +use rustc_hir as hir; +use rustc_index::vec::Idx; +use rustc_middle::mir::*; +use rustc_middle::thir::*; +use rustc_middle::ty::CanonicalUserTypeAnnotation; +use std::iter; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Compile `expr`, storing the result into `destination`, which + /// is assumed to be uninitialized. + pub(crate) fn expr_into_dest( + &mut self, + destination: Place<'tcx>, + mut block: BasicBlock, + expr: &Expr<'tcx>, + ) -> BlockAnd<()> { + debug!("expr_into_dest(destination={:?}, block={:?}, expr={:?})", destination, block, expr); + + // since we frequently have to reference `self` from within a + // closure, where `self` would be shadowed, it's easier to + // just use the name `this` uniformly + let this = self; + let expr_span = expr.span; + let source_info = this.source_info(expr_span); + + let expr_is_block_or_scope = + matches!(expr.kind, ExprKind::Block { .. } | ExprKind::Scope { .. }); + + if !expr_is_block_or_scope { + this.block_context.push(BlockFrame::SubExpr); + } + + let block_and = match expr.kind { + ExprKind::Scope { region_scope, lint_level, value } => { + let region_scope = (region_scope, source_info); + ensure_sufficient_stack(|| { + this.in_scope(region_scope, lint_level, |this| { + this.expr_into_dest(destination, block, &this.thir[value]) + }) + }) + } + ExprKind::Block { body: ref ast_block } => { + this.ast_block(destination, block, ast_block, source_info) + } + ExprKind::Match { scrutinee, ref arms } => { + this.match_expr(destination, expr_span, block, &this.thir[scrutinee], arms) + } + ExprKind::If { cond, then, else_opt, if_then_scope } => { + let then_blk; + let then_expr = &this.thir[then]; + let then_source_info = this.source_info(then_expr.span); + let condition_scope = this.local_scope(); + + let mut else_blk = unpack!( + then_blk = this.in_scope( + (if_then_scope, then_source_info), + LintLevel::Inherited, + |this| { + let source_info = if this.is_let(cond) { + let variable_scope = this.new_source_scope( + then_expr.span, + LintLevel::Inherited, + None, + ); + this.source_scope = variable_scope; + SourceInfo { span: then_expr.span, scope: variable_scope } + } else { + this.source_info(then_expr.span) + }; + let (then_block, else_block) = + this.in_if_then_scope(condition_scope, |this| { + let then_blk = unpack!(this.then_else_break( + block, + &this.thir[cond], + Some(condition_scope), + condition_scope, + source_info + )); + + this.expr_into_dest(destination, then_blk, then_expr) + }); + then_block.and(else_block) + }, + ) + ); + + else_blk = if let Some(else_opt) = else_opt { + unpack!(this.expr_into_dest(destination, else_blk, &this.thir[else_opt])) + } else { + // Body of the `if` expression without an `else` clause must return `()`, thus + // we implicitly generate an `else {}` if it is not specified. + let correct_si = this.source_info(expr_span.shrink_to_hi()); + this.cfg.push_assign_unit(else_blk, correct_si, destination, this.tcx); + else_blk + }; + + let join_block = this.cfg.start_new_block(); + this.cfg.goto(then_blk, source_info, join_block); + this.cfg.goto(else_blk, source_info, join_block); + join_block.unit() + } + ExprKind::Let { expr, ref pat } => { + let scope = this.local_scope(); + let (true_block, false_block) = this.in_if_then_scope(scope, |this| { + this.lower_let_expr(block, &this.thir[expr], pat, scope, None, expr_span) + }); + + this.cfg.push_assign_constant( + true_block, + source_info, + destination, + Constant { + span: expr_span, + user_ty: None, + literal: ConstantKind::from_bool(this.tcx, true), + }, + ); + + this.cfg.push_assign_constant( + false_block, + source_info, + destination, + Constant { + span: expr_span, + user_ty: None, + literal: ConstantKind::from_bool(this.tcx, false), + }, + ); + + let join_block = this.cfg.start_new_block(); + this.cfg.goto(true_block, source_info, join_block); + this.cfg.goto(false_block, source_info, join_block); + join_block.unit() + } + ExprKind::NeverToAny { source } => { + let source = &this.thir[source]; + let is_call = + matches!(source.kind, ExprKind::Call { .. } | ExprKind::InlineAsm { .. }); + + // (#66975) Source could be a const of type `!`, so has to + // exist in the generated MIR. + unpack!( + block = this.as_temp(block, Some(this.local_scope()), source, Mutability::Mut,) + ); + + // This is an optimization. If the expression was a call then we already have an + // unreachable block. Don't bother to terminate it and create a new one. + if is_call { + block.unit() + } else { + this.cfg.terminate(block, source_info, TerminatorKind::Unreachable); + let end_block = this.cfg.start_new_block(); + end_block.unit() + } + } + ExprKind::LogicalOp { op, lhs, rhs } => { + // And: + // + // [block: If(lhs)] -true-> [else_block: dest = (rhs)] + // | (false) + // [shortcurcuit_block: dest = false] + // + // Or: + // + // [block: If(lhs)] -false-> [else_block: dest = (rhs)] + // | (true) + // [shortcurcuit_block: dest = true] + + let (shortcircuit_block, mut else_block, join_block) = ( + this.cfg.start_new_block(), + this.cfg.start_new_block(), + this.cfg.start_new_block(), + ); + + let lhs = unpack!(block = this.as_local_operand(block, &this.thir[lhs])); + let blocks = match op { + LogicalOp::And => (else_block, shortcircuit_block), + LogicalOp::Or => (shortcircuit_block, else_block), + }; + let term = TerminatorKind::if_(this.tcx, lhs, blocks.0, blocks.1); + this.cfg.terminate(block, source_info, term); + + this.cfg.push_assign_constant( + shortcircuit_block, + source_info, + destination, + Constant { + span: expr_span, + user_ty: None, + literal: match op { + LogicalOp::And => ConstantKind::from_bool(this.tcx, false), + LogicalOp::Or => ConstantKind::from_bool(this.tcx, true), + }, + }, + ); + this.cfg.goto(shortcircuit_block, source_info, join_block); + + let rhs = unpack!(else_block = this.as_local_operand(else_block, &this.thir[rhs])); + this.cfg.push_assign(else_block, source_info, destination, Rvalue::Use(rhs)); + this.cfg.goto(else_block, source_info, join_block); + + join_block.unit() + } + ExprKind::Loop { body } => { + // [block] + // | + // [loop_block] -> [body_block] -/eval. body/-> [body_block_end] + // | ^ | + // false link | | + // | +-----------------------------------------+ + // +-> [diverge_cleanup] + // The false link is required to make sure borrowck considers unwinds through the + // body, even when the exact code in the body cannot unwind + + let loop_block = this.cfg.start_new_block(); + + // Start the loop. + this.cfg.goto(block, source_info, loop_block); + + this.in_breakable_scope(Some(loop_block), destination, expr_span, move |this| { + // conduct the test, if necessary + let body_block = this.cfg.start_new_block(); + this.cfg.terminate( + loop_block, + source_info, + TerminatorKind::FalseUnwind { real_target: body_block, unwind: None }, + ); + this.diverge_from(loop_block); + + // The “return” value of the loop body must always be a unit. We therefore + // introduce a unit temporary as the destination for the loop body. + let tmp = this.get_unit_temp(); + // Execute the body, branching back to the test. + let body_block_end = + unpack!(this.expr_into_dest(tmp, body_block, &this.thir[body])); + this.cfg.goto(body_block_end, source_info, loop_block); + + // Loops are only exited by `break` expressions. + None + }) + } + ExprKind::Call { ty: _, fun, ref args, from_hir_call, fn_span } => { + let fun = unpack!(block = this.as_local_operand(block, &this.thir[fun])); + let args: Vec<_> = args + .into_iter() + .copied() + .map(|arg| unpack!(block = this.as_local_call_operand(block, &this.thir[arg]))) + .collect(); + + let success = this.cfg.start_new_block(); + + this.record_operands_moved(&args); + + debug!("expr_into_dest: fn_span={:?}", fn_span); + + this.cfg.terminate( + block, + source_info, + TerminatorKind::Call { + func: fun, + args, + cleanup: None, + destination, + // The presence or absence of a return edge affects control-flow sensitive + // MIR checks and ultimately whether code is accepted or not. We can only + // omit the return edge if a return type is visibly uninhabited to a module + // that makes the call. + target: if this.tcx.is_ty_uninhabited_from( + this.parent_module, + expr.ty, + this.param_env, + ) { + None + } else { + Some(success) + }, + from_hir_call, + fn_span, + }, + ); + this.diverge_from(block); + success.unit() + } + ExprKind::Use { source } => this.expr_into_dest(destination, block, &this.thir[source]), + ExprKind::Borrow { arg, borrow_kind } => { + let arg = &this.thir[arg]; + // We don't do this in `as_rvalue` because we use `as_place` + // for borrow expressions, so we cannot create an `RValue` that + // remains valid across user code. `as_rvalue` is usually called + // by this method anyway, so this shouldn't cause too many + // unnecessary temporaries. + let arg_place = match borrow_kind { + BorrowKind::Shared => unpack!(block = this.as_read_only_place(block, arg)), + _ => unpack!(block = this.as_place(block, arg)), + }; + let borrow = Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place); + this.cfg.push_assign(block, source_info, destination, borrow); + block.unit() + } + ExprKind::AddressOf { mutability, arg } => { + let arg = &this.thir[arg]; + let place = match mutability { + hir::Mutability::Not => this.as_read_only_place(block, arg), + hir::Mutability::Mut => this.as_place(block, arg), + }; + let address_of = Rvalue::AddressOf(mutability, unpack!(block = place)); + this.cfg.push_assign(block, source_info, destination, address_of); + block.unit() + } + ExprKind::Adt(box Adt { + adt_def, + variant_index, + substs, + user_ty, + ref fields, + ref base, + }) => { + // See the notes for `ExprKind::Array` in `as_rvalue` and for + // `ExprKind::Borrow` above. + let is_union = adt_def.is_union(); + let active_field_index = if is_union { Some(fields[0].name.index()) } else { None }; + + let scope = this.local_scope(); + + // first process the set of fields that were provided + // (evaluating them in order given by user) + let fields_map: FxHashMap<_, _> = fields + .into_iter() + .map(|f| { + let local_info = Box::new(LocalInfo::AggregateTemp); + ( + f.name, + unpack!( + block = this.as_operand( + block, + Some(scope), + &this.thir[f.expr], + Some(local_info), + NeedsTemporary::Maybe, + ) + ), + ) + }) + .collect(); + + let field_names: Vec<_> = + (0..adt_def.variant(variant_index).fields.len()).map(Field::new).collect(); + + let fields: Vec<_> = if let Some(FruInfo { base, field_types }) = base { + let place_builder = + unpack!(block = this.as_place_builder(block, &this.thir[*base])); + + // MIR does not natively support FRU, so for each + // base-supplied field, generate an operand that + // reads it from the base. + iter::zip(field_names, &**field_types) + .map(|(n, ty)| match fields_map.get(&n) { + Some(v) => v.clone(), + None => { + let place_builder = place_builder.clone(); + this.consume_by_copy_or_move( + place_builder + .field(n, *ty) + .into_place(this.tcx, this.typeck_results), + ) + } + }) + .collect() + } else { + field_names.iter().filter_map(|n| fields_map.get(n).cloned()).collect() + }; + + let inferred_ty = expr.ty; + let user_ty = user_ty.map(|ty| { + this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { + span: source_info.span, + user_ty: ty, + inferred_ty, + }) + }); + let adt = Box::new(AggregateKind::Adt( + adt_def.did(), + variant_index, + substs, + user_ty, + active_field_index, + )); + this.cfg.push_assign( + block, + source_info, + destination, + Rvalue::Aggregate(adt, fields), + ); + block.unit() + } + ExprKind::InlineAsm { template, ref operands, options, line_spans } => { + use rustc_middle::{mir, thir}; + let operands = operands + .into_iter() + .map(|op| match *op { + thir::InlineAsmOperand::In { reg, expr } => mir::InlineAsmOperand::In { + reg, + value: unpack!(block = this.as_local_operand(block, &this.thir[expr])), + }, + thir::InlineAsmOperand::Out { reg, late, expr } => { + mir::InlineAsmOperand::Out { + reg, + late, + place: expr.map(|expr| { + unpack!(block = this.as_place(block, &this.thir[expr])) + }), + } + } + thir::InlineAsmOperand::InOut { reg, late, expr } => { + let place = unpack!(block = this.as_place(block, &this.thir[expr])); + mir::InlineAsmOperand::InOut { + reg, + late, + // This works because asm operands must be Copy + in_value: Operand::Copy(place), + out_place: Some(place), + } + } + thir::InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => { + mir::InlineAsmOperand::InOut { + reg, + late, + in_value: unpack!( + block = this.as_local_operand(block, &this.thir[in_expr]) + ), + out_place: out_expr.map(|out_expr| { + unpack!(block = this.as_place(block, &this.thir[out_expr])) + }), + } + } + thir::InlineAsmOperand::Const { value, span } => { + mir::InlineAsmOperand::Const { + value: Box::new(Constant { span, user_ty: None, literal: value }), + } + } + thir::InlineAsmOperand::SymFn { value, span } => { + mir::InlineAsmOperand::SymFn { + value: Box::new(Constant { span, user_ty: None, literal: value }), + } + } + thir::InlineAsmOperand::SymStatic { def_id } => { + mir::InlineAsmOperand::SymStatic { def_id } + } + }) + .collect(); + + if !options.contains(InlineAsmOptions::NORETURN) { + this.cfg.push_assign_unit(block, source_info, destination, this.tcx); + } + + let destination_block = this.cfg.start_new_block(); + this.cfg.terminate( + block, + source_info, + TerminatorKind::InlineAsm { + template, + operands, + options, + line_spans, + destination: if options.contains(InlineAsmOptions::NORETURN) { + None + } else { + Some(destination_block) + }, + cleanup: None, + }, + ); + if options.contains(InlineAsmOptions::MAY_UNWIND) { + this.diverge_from(block); + } + destination_block.unit() + } + + // These cases don't actually need a destination + ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => { + unpack!(block = this.stmt_expr(block, expr, None)); + this.cfg.push_assign_unit(block, source_info, destination, this.tcx); + block.unit() + } + + ExprKind::Continue { .. } | ExprKind::Break { .. } | ExprKind::Return { .. } => { + unpack!(block = this.stmt_expr(block, expr, None)); + // No assign, as these have type `!`. + block.unit() + } + + // Avoid creating a temporary + ExprKind::VarRef { .. } + | ExprKind::UpvarRef { .. } + | ExprKind::PlaceTypeAscription { .. } + | ExprKind::ValueTypeAscription { .. } => { + debug_assert!(Category::of(&expr.kind) == Some(Category::Place)); + + let place = unpack!(block = this.as_place(block, expr)); + let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); + this.cfg.push_assign(block, source_info, destination, rvalue); + block.unit() + } + ExprKind::Index { .. } | ExprKind::Deref { .. } | ExprKind::Field { .. } => { + debug_assert_eq!(Category::of(&expr.kind), Some(Category::Place)); + + // Create a "fake" temporary variable so that we check that the + // value is Sized. Usually, this is caught in type checking, but + // in the case of box expr there is no such check. + if !destination.projection.is_empty() { + this.local_decls.push(LocalDecl::new(expr.ty, expr.span)); + } + + let place = unpack!(block = this.as_place(block, expr)); + let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place)); + this.cfg.push_assign(block, source_info, destination, rvalue); + block.unit() + } + + ExprKind::Yield { value } => { + let scope = this.local_scope(); + let value = unpack!( + block = this.as_operand( + block, + Some(scope), + &this.thir[value], + None, + NeedsTemporary::No + ) + ); + let resume = this.cfg.start_new_block(); + this.cfg.terminate( + block, + source_info, + TerminatorKind::Yield { value, resume, resume_arg: destination, drop: None }, + ); + this.generator_drop_cleanup(block); + resume.unit() + } + + // these are the cases that are more naturally handled by some other mode + ExprKind::Unary { .. } + | ExprKind::Binary { .. } + | ExprKind::Box { .. } + | ExprKind::Cast { .. } + | ExprKind::Pointer { .. } + | ExprKind::Repeat { .. } + | ExprKind::Array { .. } + | ExprKind::Tuple { .. } + | ExprKind::Closure { .. } + | ExprKind::ConstBlock { .. } + | ExprKind::Literal { .. } + | ExprKind::NamedConst { .. } + | ExprKind::NonHirLiteral { .. } + | ExprKind::ZstLiteral { .. } + | ExprKind::ConstParam { .. } + | ExprKind::ThreadLocalRef(_) + | ExprKind::StaticRef { .. } => { + debug_assert!(match Category::of(&expr.kind).unwrap() { + // should be handled above + Category::Rvalue(RvalueFunc::Into) => false, + + // must be handled above or else we get an + // infinite loop in the builder; see + // e.g., `ExprKind::VarRef` above + Category::Place => false, + + _ => true, + }); + + let rvalue = unpack!(block = this.as_local_rvalue(block, expr)); + this.cfg.push_assign(block, source_info, destination, rvalue); + block.unit() + } + }; + + if !expr_is_block_or_scope { + let popped = this.block_context.pop(); + assert!(popped.is_some()); + } + + block_and + } + + fn is_let(&self, expr: ExprId) -> bool { + match self.thir[expr].kind { + ExprKind::Let { .. } => true, + ExprKind::Scope { value, .. } => self.is_let(value), + _ => false, + } + } +} diff --git a/compiler/rustc_mir_build/src/build/expr/mod.rs b/compiler/rustc_mir_build/src/build/expr/mod.rs new file mode 100644 index 000000000..f5ae060d6 --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/mod.rs @@ -0,0 +1,70 @@ +//! Builds MIR from expressions. As a caller into this module, you +//! have many options, but the first thing you have to decide is +//! whether you are evaluating this expression for its *value*, its +//! *location*, or as a *constant*. +//! +//! Typically, you want the value: e.g., if you are doing `expr_a + +//! expr_b`, you want the values of those expressions. In that case, +//! you want one of the following functions. Note that if the expr has +//! a type that is not `Copy`, then using any of these functions will +//! "move" the value out of its current home (if any). +//! +//! - `expr_into_dest` -- writes the value into a specific location, which +//! should be uninitialized +//! - `as_operand` -- evaluates the value and yields an `Operand`, +//! suitable for use as an argument to an `Rvalue` +//! - `as_temp` -- evaluates into a temporary; this is similar to `as_operand` +//! except it always returns a fresh place, even for constants +//! - `as_rvalue` -- yields an `Rvalue`, suitable for use in an assignment; +//! as of this writing, never needed outside of the `expr` module itself +//! +//! Sometimes though want the expression's *location*. An example +//! would be during a match statement, or the operand of the `&` +//! operator. In that case, you want `as_place`. This will create a +//! temporary if necessary. +//! +//! Finally, if it's a constant you seek, then call +//! `as_constant`. This creates a `Constant<H>`, but naturally it can +//! only be used on constant expressions and hence is needed only in +//! very limited contexts. +//! +//! ### Implementation notes +//! +//! For any given kind of expression, there is generally one way that +//! can be lowered most naturally. This is specified by the +//! `Category::of` function in the `category` module. For example, a +//! struct expression (or other expression that creates a new value) +//! is typically easiest to write in terms of `as_rvalue` or `into`, +//! whereas a reference to a field is easiest to write in terms of +//! `as_place`. (The exception to this is scope and paren +//! expressions, which have no category.) +//! +//! Therefore, the various functions above make use of one another in +//! a descending fashion. For any given expression, you should pick +//! the most suitable spot to implement it, and then just let the +//! other fns cycle around. The handoff works like this: +//! +//! - `into(place)` -> fallback is to create a rvalue with `as_rvalue` and assign it to `place` +//! - `as_rvalue` -> fallback is to create an Operand with `as_operand` and use `Rvalue::use` +//! - `as_operand` -> either invokes `as_constant` or `as_temp` +//! - `as_constant` -> (no fallback) +//! - `as_temp` -> creates a temporary and either calls `as_place` or `into` +//! - `as_place` -> for rvalues, falls back to `as_temp` and returns that +//! +//! As you can see, there is a cycle where `into` can (in theory) fallback to `as_temp` +//! which can fallback to `into`. So if one of the `ExprKind` variants is not, in fact, +//! implemented in the category where it is supposed to be, there will be a problem. +//! +//! Of those fallbacks, the most interesting one is `into`, because +//! it discriminates based on the category of the expression. This is +//! basically the point where the "by value" operations are bridged +//! over to the "by reference" mode (`as_place`). + +pub(crate) mod as_constant; +mod as_operand; +pub mod as_place; +mod as_rvalue; +mod as_temp; +pub mod category; +mod into; +mod stmt; diff --git a/compiler/rustc_mir_build/src/build/expr/stmt.rs b/compiler/rustc_mir_build/src/build/expr/stmt.rs new file mode 100644 index 000000000..a7e1331aa --- /dev/null +++ b/compiler/rustc_mir_build/src/build/expr/stmt.rs @@ -0,0 +1,149 @@ +use crate::build::scope::BreakableTarget; +use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder}; +use rustc_middle::middle::region; +use rustc_middle::mir::*; +use rustc_middle::thir::*; + +impl<'a, 'tcx> Builder<'a, 'tcx> { + /// Builds a block of MIR statements to evaluate the THIR `expr`. + /// If the original expression was an AST statement, + /// (e.g., `some().code(&here());`) then `opt_stmt_span` is the + /// span of that statement (including its semicolon, if any). + /// The scope is used if a statement temporary must be dropped. + pub(crate) fn stmt_expr( + &mut self, + mut block: BasicBlock, + expr: &Expr<'tcx>, + statement_scope: Option<region::Scope>, + ) -> BlockAnd<()> { + let this = self; + let expr_span = expr.span; + let source_info = this.source_info(expr.span); + // Handle a number of expressions that don't need a destination at all. This + // avoids needing a mountain of temporary `()` variables. + match expr.kind { + ExprKind::Scope { region_scope, lint_level, value } => { + this.in_scope((region_scope, source_info), lint_level, |this| { + this.stmt_expr(block, &this.thir[value], statement_scope) + }) + } + ExprKind::Assign { lhs, rhs } => { + let lhs = &this.thir[lhs]; + let rhs = &this.thir[rhs]; + let lhs_span = lhs.span; + + // Note: we evaluate assignments right-to-left. This + // is better for borrowck interaction with overloaded + // operators like x[j] = x[i]. + + debug!("stmt_expr Assign block_context.push(SubExpr) : {:?}", expr); + this.block_context.push(BlockFrame::SubExpr); + + // Generate better code for things that don't need to be + // dropped. + if lhs.ty.needs_drop(this.tcx, this.param_env) { + let rhs = unpack!(block = this.as_local_operand(block, rhs)); + let lhs = unpack!(block = this.as_place(block, lhs)); + unpack!(block = this.build_drop_and_replace(block, lhs_span, lhs, rhs)); + } else { + let rhs = unpack!(block = this.as_local_rvalue(block, rhs)); + let lhs = unpack!(block = this.as_place(block, lhs)); + this.cfg.push_assign(block, source_info, lhs, rhs); + } + + this.block_context.pop(); + block.unit() + } + ExprKind::AssignOp { op, lhs, rhs } => { + // FIXME(#28160) there is an interesting semantics + // question raised here -- should we "freeze" the + // value of the lhs here? I'm inclined to think not, + // since it seems closer to the semantics of the + // overloaded version, which takes `&mut self`. This + // only affects weird things like `x += {x += 1; x}` + // -- is that equal to `x + (x + 1)` or `2*(x+1)`? + + let lhs = &this.thir[lhs]; + let rhs = &this.thir[rhs]; + let lhs_ty = lhs.ty; + + debug!("stmt_expr AssignOp block_context.push(SubExpr) : {:?}", expr); + this.block_context.push(BlockFrame::SubExpr); + + // As above, RTL. + let rhs = unpack!(block = this.as_local_operand(block, rhs)); + let lhs = unpack!(block = this.as_place(block, lhs)); + + // we don't have to drop prior contents or anything + // because AssignOp is only legal for Copy types + // (overloaded ops should be desugared into a call). + let result = unpack!( + block = + this.build_binary_op(block, op, expr_span, lhs_ty, Operand::Copy(lhs), rhs) + ); + this.cfg.push_assign(block, source_info, lhs, result); + + this.block_context.pop(); + block.unit() + } + ExprKind::Continue { label } => { + this.break_scope(block, None, BreakableTarget::Continue(label), source_info) + } + ExprKind::Break { label, value } => this.break_scope( + block, + value.map(|value| &this.thir[value]), + BreakableTarget::Break(label), + source_info, + ), + ExprKind::Return { value } => this.break_scope( + block, + value.map(|value| &this.thir[value]), + BreakableTarget::Return, + source_info, + ), + _ => { + assert!( + statement_scope.is_some(), + "Should not be calling `stmt_expr` on a general expression \ + without a statement scope", + ); + + // Issue #54382: When creating temp for the value of + // expression like: + // + // `{ side_effects(); { let l = stuff(); the_value } }` + // + // it is usually better to focus on `the_value` rather + // than the entirety of block(s) surrounding it. + let adjusted_span = (|| { + if let ExprKind::Block { body } = &expr.kind && let Some(tail_ex) = body.expr { + let mut expr = &this.thir[tail_ex]; + while let ExprKind::Block { + body: Block { expr: Some(nested_expr), .. }, + } + | ExprKind::Scope { value: nested_expr, .. } = expr.kind + { + expr = &this.thir[nested_expr]; + } + this.block_context.push(BlockFrame::TailExpr { + tail_result_is_ignored: true, + span: expr.span, + }); + return Some(expr.span); + } + None + })(); + + let temp = + unpack!(block = this.as_temp(block, statement_scope, expr, Mutability::Not)); + + if let Some(span) = adjusted_span { + this.local_decls[temp].source_info.span = span; + this.block_context.pop(); + } + + block.unit() + } + } + } +} |