summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_mir_transform/src/elaborate_drops.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_transform/src/elaborate_drops.rs')
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drops.rs192
1 files changed, 55 insertions, 137 deletions
diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs
index bdfd8dc6e..a702113bd 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs
@@ -14,19 +14,18 @@ use rustc_mir_dataflow::un_derefer::UnDerefer;
use rustc_mir_dataflow::MoveDataParamEnv;
use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits};
use rustc_mir_dataflow::{Analysis, ResultsCursor};
-use rustc_span::Span;
-use rustc_target::abi::VariantIdx;
+use rustc_span::{DesugaringKind, Span};
+use rustc_target::abi::{FieldIdx, VariantIdx};
use std::fmt;
-/// During MIR building, Drop and DropAndReplace terminators are inserted in every place where a drop may occur.
+/// During MIR building, Drop terminators are inserted in every place where a drop may occur.
/// However, in this phase, the presence of these terminators does not guarantee that a destructor will run,
/// as the target of the drop may be uninitialized.
/// In general, the compiler cannot determine at compile time whether a destructor will run or not.
///
-/// At a high level, this pass refines Drop and DropAndReplace to only run the destructor if the
+/// At a high level, this pass refines Drop to only run the destructor if the
/// target is initialized. The way this is achievied is by inserting drop flags for every variable
/// that may be dropped, and then using those flags to determine whether a destructor should run.
-/// This pass also removes DropAndReplace, replacing it with a Drop paired with an assign statement.
/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
/// "drop shim" for the type of the dropped place.
///
@@ -121,8 +120,7 @@ fn remove_dead_unwinds<'tcx>(
.into_results_cursor(body);
for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
let place = match bb_data.terminator().kind {
- TerminatorKind::Drop { ref place, unwind: Some(_), .. }
- | TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => {
+ TerminatorKind::Drop { ref place, unwind: UnwindAction::Cleanup(_), .. } => {
und.derefer(place.as_ref(), body).unwrap_or(*place)
}
_ => continue,
@@ -162,7 +160,7 @@ fn remove_dead_unwinds<'tcx>(
let basic_blocks = body.basic_blocks.as_mut();
for &bb in dead_unwinds.iter() {
if let Some(unwind) = basic_blocks[bb].terminator_mut().unwind_mut() {
- *unwind = None;
+ *unwind = UnwindAction::Unreachable;
}
}
}
@@ -254,7 +252,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, '_, 'tcx> {
}
}
- fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> {
+ fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path> {
rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
ProjectionElem::Field(idx, _) => idx == field,
_ => false,
@@ -343,8 +341,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
}
let terminator = data.terminator();
let place = match terminator.kind {
- TerminatorKind::Drop { ref place, .. }
- | TerminatorKind::DropAndReplace { ref place, .. } => {
+ TerminatorKind::Drop { ref place, .. } => {
self.un_derefer.derefer(place.as_ref(), self.body).unwrap_or(*place)
}
_ => continue,
@@ -402,7 +399,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
let loc = Location { block: bb, statement_index: data.statements.len() };
let terminator = data.terminator();
- let resume_block = self.patch.resume_block();
match terminator.kind {
TerminatorKind::Drop { mut place, target, unwind } => {
if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
@@ -411,124 +407,53 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
self.init_data.seek_before(loc);
match self.move_data().rev_lookup.find(place.as_ref()) {
- LookupResult::Exact(path) => elaborate_drop(
- &mut Elaborator { ctxt: self },
- terminator.source_info,
- place,
- path,
- target,
- if data.is_cleanup {
+ LookupResult::Exact(path) => {
+ let unwind = if data.is_cleanup {
Unwind::InCleanup
} else {
- Unwind::To(Option::unwrap_or(unwind, resume_block))
- },
- bb,
- ),
+ match unwind {
+ UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup),
+ UnwindAction::Continue => Unwind::To(self.patch.resume_block()),
+ UnwindAction::Unreachable => {
+ Unwind::To(self.patch.unreachable_cleanup_block())
+ }
+ UnwindAction::Terminate => {
+ Unwind::To(self.patch.terminate_block())
+ }
+ }
+ };
+ elaborate_drop(
+ &mut Elaborator { ctxt: self },
+ terminator.source_info,
+ place,
+ path,
+ target,
+ unwind,
+ bb,
+ )
+ }
LookupResult::Parent(..) => {
- self.tcx.sess.delay_span_bug(
- terminator.source_info.span,
- &format!("drop of untracked value {:?}", bb),
- );
+ if !matches!(
+ terminator.source_info.span.desugaring_kind(),
+ Some(DesugaringKind::Replace),
+ ) {
+ self.tcx.sess.delay_span_bug(
+ terminator.source_info.span,
+ &format!("drop of untracked value {:?}", bb),
+ );
+ }
+ // A drop and replace behind a pointer/array/whatever.
+ // The borrow checker requires that these locations are initialized before the assignment,
+ // so we just leave an unconditional drop.
+ assert!(!data.is_cleanup);
}
}
}
- TerminatorKind::DropAndReplace { mut place, ref value, target, unwind } => {
- assert!(!data.is_cleanup);
-
- if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
- place = new_place;
- }
- self.elaborate_replace(loc, place, value, target, unwind);
- }
_ => continue,
}
}
}
- /// Elaborate a MIR `replace` terminator. This instruction
- /// is not directly handled by codegen, and therefore
- /// must be desugared.
- ///
- /// The desugaring drops the location if needed, and then writes
- /// the value (including setting the drop flag) over it in *both* arms.
- ///
- /// The `replace` terminator can also be called on places that
- /// are not tracked by elaboration (for example,
- /// `replace x[i] <- tmp0`). The borrow checker requires that
- /// these locations are initialized before the assignment,
- /// so we just generate an unconditional drop.
- fn elaborate_replace(
- &mut self,
- loc: Location,
- place: Place<'tcx>,
- value: &Operand<'tcx>,
- target: BasicBlock,
- unwind: Option<BasicBlock>,
- ) {
- let bb = loc.block;
- let data = &self.body[bb];
- let terminator = data.terminator();
- assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
-
- let assign = Statement {
- kind: StatementKind::Assign(Box::new((place, Rvalue::Use(value.clone())))),
- source_info: terminator.source_info,
- };
-
- let unwind = unwind.unwrap_or_else(|| self.patch.resume_block());
- let unwind = self.patch.new_block(BasicBlockData {
- statements: vec![assign.clone()],
- terminator: Some(Terminator {
- kind: TerminatorKind::Goto { target: unwind },
- ..*terminator
- }),
- is_cleanup: true,
- });
-
- let target = self.patch.new_block(BasicBlockData {
- statements: vec![assign],
- terminator: Some(Terminator { kind: TerminatorKind::Goto { target }, ..*terminator }),
- is_cleanup: false,
- });
-
- match self.move_data().rev_lookup.find(place.as_ref()) {
- LookupResult::Exact(path) => {
- debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
- self.init_data.seek_before(loc);
- elaborate_drop(
- &mut Elaborator { ctxt: self },
- terminator.source_info,
- place,
- path,
- target,
- Unwind::To(unwind),
- bb,
- );
- on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| {
- self.set_drop_flag(
- Location { block: target, statement_index: 0 },
- child,
- DropFlagState::Present,
- );
- self.set_drop_flag(
- Location { block: unwind, statement_index: 0 },
- child,
- DropFlagState::Present,
- );
- });
- }
- LookupResult::Parent(parent) => {
- // drop and replace behind a pointer/array/whatever. The location
- // must be initialized.
- debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
- self.patch.patch_terminator(
- bb,
- TerminatorKind::Drop { place, target, unwind: Some(unwind) },
- );
- }
- }
- }
-
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
Rvalue::Use(Operand::Constant(Box::new(Constant {
span,
@@ -560,7 +485,10 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
continue;
}
if let TerminatorKind::Call {
- destination, target: Some(tgt), cleanup: Some(_), ..
+ destination,
+ target: Some(tgt),
+ unwind: UnwindAction::Cleanup(_),
+ ..
} = data.terminator().kind
{
assert!(!self.patch.is_patched(bb));
@@ -600,22 +528,12 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
debug!("drop_flags_for_locs({:?})", data);
for i in 0..(data.statements.len() + 1) {
debug!("drop_flag_for_locs: stmt {}", i);
- let mut allow_initializations = true;
if i == data.statements.len() {
match data.terminator().kind {
TerminatorKind::Drop { .. } => {
// drop elaboration should handle that by itself
continue;
}
- TerminatorKind::DropAndReplace { .. } => {
- // this contains the move of the source and
- // the initialization of the destination. We
- // only want the former - the latter is handled
- // by the elaboration code and must be done
- // *after* the destination is dropped.
- assert!(self.patch.is_patched(bb));
- allow_initializations = false;
- }
TerminatorKind::Resume => {
// It is possible for `Resume` to be patched
// (in particular it can be patched to be replaced with
@@ -632,19 +550,19 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
self.body,
self.env,
loc,
- |path, ds| {
- if ds == DropFlagState::Absent || allow_initializations {
- self.set_drop_flag(loc, path, ds)
- }
- },
+ |path, ds| self.set_drop_flag(loc, path, ds),
)
}
// There may be a critical edge after this call,
// so mark the return as initialized *before* the
// call.
- if let TerminatorKind::Call { destination, target: Some(_), cleanup: None, .. } =
- data.terminator().kind
+ if let TerminatorKind::Call {
+ destination,
+ target: Some(_),
+ unwind: UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate,
+ ..
+ } = data.terminator().kind
{
assert!(!self.patch.is_patched(bb));