summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_mir_dataflow/src/impls
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-30 03:59:35 +0000
commitd1b2d29528b7794b41e66fc2136e395a02f8529b (patch)
treea4a17504b260206dec3cf55b2dca82929a348ac2 /compiler/rustc_mir_dataflow/src/impls
parentReleasing progress-linux version 1.72.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.tar.xz
rustc-d1b2d29528b7794b41e66fc2136e395a02f8529b.zip
Merging upstream version 1.73.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_mir_dataflow/src/impls')
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs65
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/initialized.rs778
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/liveness.rs92
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/mod.rs757
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs83
5 files changed, 911 insertions, 864 deletions
diff --git a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
index b88ed32b6..8d7b50796 100644
--- a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
@@ -1,9 +1,9 @@
-use super::*;
-
-use crate::{AnalysisDomain, CallReturnPlaces, GenKill, GenKillAnalysis};
+use rustc_index::bit_set::BitSet;
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*;
+use crate::{AnalysisDomain, GenKill, GenKillAnalysis};
+
/// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points
/// to a given local.
///
@@ -14,7 +14,7 @@ use rustc_middle::mir::*;
pub struct MaybeBorrowedLocals;
impl MaybeBorrowedLocals {
- fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> {
+ pub(super) fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> {
TransferFunction { trans }
}
}
@@ -23,12 +23,12 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
type Domain = BitSet<Local>;
const NAME: &'static str = "maybe_borrowed_locals";
- fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = unborrowed
BitSet::new_empty(body.local_decls().len())
}
- fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
+ fn initialize_start_block(&self, _: &Body<'tcx>, _: &mut Self::Domain) {
// No locals are aliased on function entry
}
}
@@ -36,35 +36,40 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
type Idx = Local;
+ fn domain_size(&self, body: &Body<'tcx>) -> usize {
+ body.local_decls.len()
+ }
+
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
- statement: &mir::Statement<'tcx>,
+ statement: &Statement<'tcx>,
location: Location,
) {
self.transfer_function(trans).visit_statement(statement, location);
}
- fn terminator_effect(
+ fn terminator_effect<'mir>(
&mut self,
- trans: &mut impl GenKill<Self::Idx>,
- terminator: &mir::Terminator<'tcx>,
+ trans: &mut Self::Domain,
+ terminator: &'mir Terminator<'tcx>,
location: Location,
- ) {
+ ) -> TerminatorEdges<'mir, 'tcx> {
self.transfer_function(trans).visit_terminator(terminator, location);
+ terminator.edges()
}
fn call_return_effect(
&mut self,
_trans: &mut impl GenKill<Self::Idx>,
- _block: mir::BasicBlock,
+ _block: BasicBlock,
_return_places: CallReturnPlaces<'_, 'tcx>,
) {
}
}
/// A `Visitor` that defines the transfer function for `MaybeBorrowedLocals`.
-struct TransferFunction<'a, T> {
+pub(super) struct TransferFunction<'a, T> {
trans: &'a mut T,
}
@@ -82,37 +87,37 @@ where
}
}
- fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
+ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
self.super_rvalue(rvalue, location);
match rvalue {
- mir::Rvalue::AddressOf(_, borrowed_place) | mir::Rvalue::Ref(_, _, borrowed_place) => {
+ Rvalue::AddressOf(_, borrowed_place) | Rvalue::Ref(_, _, borrowed_place) => {
if !borrowed_place.is_indirect() {
self.trans.gen(borrowed_place.local);
}
}
- mir::Rvalue::Cast(..)
- | mir::Rvalue::ShallowInitBox(..)
- | mir::Rvalue::Use(..)
- | mir::Rvalue::ThreadLocalRef(..)
- | mir::Rvalue::Repeat(..)
- | mir::Rvalue::Len(..)
- | mir::Rvalue::BinaryOp(..)
- | mir::Rvalue::CheckedBinaryOp(..)
- | mir::Rvalue::NullaryOp(..)
- | mir::Rvalue::UnaryOp(..)
- | mir::Rvalue::Discriminant(..)
- | mir::Rvalue::Aggregate(..)
- | mir::Rvalue::CopyForDeref(..) => {}
+ Rvalue::Cast(..)
+ | Rvalue::ShallowInitBox(..)
+ | Rvalue::Use(..)
+ | Rvalue::ThreadLocalRef(..)
+ | Rvalue::Repeat(..)
+ | Rvalue::Len(..)
+ | Rvalue::BinaryOp(..)
+ | Rvalue::CheckedBinaryOp(..)
+ | Rvalue::NullaryOp(..)
+ | Rvalue::UnaryOp(..)
+ | Rvalue::Discriminant(..)
+ | Rvalue::Aggregate(..)
+ | Rvalue::CopyForDeref(..) => {}
}
}
- fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
+ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
self.super_terminator(terminator, location);
match terminator.kind {
- mir::TerminatorKind::Drop { place: dropped_place, .. } => {
+ TerminatorKind::Drop { place: dropped_place, .. } => {
// Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut
// self` as a parameter. In the general case, a drop impl could launder that
// reference into the surrounding environment through a raw pointer, thus creating
diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
new file mode 100644
index 000000000..e6d383d62
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
@@ -0,0 +1,778 @@
+use rustc_index::bit_set::{BitSet, ChunkedBitSet};
+use rustc_index::Idx;
+use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges};
+use rustc_middle::ty::{self, TyCtxt};
+
+use crate::drop_flag_effects_for_function_entry;
+use crate::drop_flag_effects_for_location;
+use crate::elaborate_drops::DropFlagState;
+use crate::framework::SwitchIntEdgeEffects;
+use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
+use crate::on_lookup_result_bits;
+use crate::MoveDataParamEnv;
+use crate::{drop_flag_effects, on_all_children_bits, on_all_drop_children_bits};
+use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis, MaybeReachable};
+
+/// `MaybeInitializedPlaces` tracks all places that might be
+/// initialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // maybe-init:
+/// // {}
+/// let a = S; let mut b = S; let c; let d; // {a, b}
+///
+/// if pred {
+/// drop(a); // { b}
+/// b = S; // { b}
+///
+/// } else {
+/// drop(b); // {a}
+/// d = S; // {a, d}
+///
+/// } // {a, b, d}
+///
+/// c = S; // {a, b, c, d}
+/// }
+/// ```
+///
+/// To determine whether a place *must* be initialized at a
+/// particular control-flow point, one can take the set-difference
+/// between this data and the data from `MaybeUninitializedPlaces` at the
+/// corresponding control-flow point.
+///
+/// Similarly, at a given `drop` statement, the set-intersection
+/// between this data and `MaybeUninitializedPlaces` yields the set of
+/// places that would require a dynamic drop-flag at that statement.
+pub struct MaybeInitializedPlaces<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+ skip_unreachable_unwind: bool,
+}
+
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ MaybeInitializedPlaces { tcx, body, mdpe, skip_unreachable_unwind: false }
+ }
+
+ pub fn skipping_unreachable_unwind(mut self) -> Self {
+ self.skip_unreachable_unwind = true;
+ self
+ }
+
+ pub fn is_unwind_dead(
+ &self,
+ place: mir::Place<'tcx>,
+ state: &MaybeReachable<ChunkedBitSet<MovePathIndex>>,
+ ) -> bool {
+ if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
+ let mut maybe_live = false;
+ on_all_drop_children_bits(self.tcx, self.body, self.mdpe, path, |child| {
+ maybe_live |= state.contains(child);
+ });
+ !maybe_live
+ } else {
+ false
+ }
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+/// `MaybeUninitializedPlaces` tracks all places that might be
+/// uninitialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // maybe-uninit:
+/// // {a, b, c, d}
+/// let a = S; let mut b = S; let c; let d; // { c, d}
+///
+/// if pred {
+/// drop(a); // {a, c, d}
+/// b = S; // {a, c, d}
+///
+/// } else {
+/// drop(b); // { b, c, d}
+/// d = S; // { b, c }
+///
+/// } // {a, b, c, d}
+///
+/// c = S; // {a, b, d}
+/// }
+/// ```
+///
+/// To determine whether a place *must* be uninitialized at a
+/// particular control-flow point, one can take the set-difference
+/// between this data and the data from `MaybeInitializedPlaces` at the
+/// corresponding control-flow point.
+///
+/// Similarly, at a given `drop` statement, the set-intersection
+/// between this data and `MaybeInitializedPlaces` yields the set of
+/// places that would require a dynamic drop-flag at that statement.
+pub struct MaybeUninitializedPlaces<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+
+ mark_inactive_variants_as_uninit: bool,
+ skip_unreachable_unwind: BitSet<mir::BasicBlock>,
+}
+
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ MaybeUninitializedPlaces {
+ tcx,
+ body,
+ mdpe,
+ mark_inactive_variants_as_uninit: false,
+ skip_unreachable_unwind: BitSet::new_empty(body.basic_blocks.len()),
+ }
+ }
+
+ /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
+ /// enum discriminant.
+ ///
+ /// This is correct in a vacuum but is not the default because it causes problems in the borrow
+ /// checker, where this information gets propagated along `FakeEdge`s.
+ pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
+ self.mark_inactive_variants_as_uninit = true;
+ self
+ }
+
+ pub fn skipping_unreachable_unwind(
+ mut self,
+ unreachable_unwind: BitSet<mir::BasicBlock>,
+ ) -> Self {
+ self.skip_unreachable_unwind = unreachable_unwind;
+ self
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+/// `DefinitelyInitializedPlaces` tracks all places that are definitely
+/// initialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // definite-init:
+/// // { }
+/// let a = S; let mut b = S; let c; let d; // {a, b }
+///
+/// if pred {
+/// drop(a); // { b, }
+/// b = S; // { b, }
+///
+/// } else {
+/// drop(b); // {a, }
+/// d = S; // {a, d}
+///
+/// } // { }
+///
+/// c = S; // { c }
+/// }
+/// ```
+///
+/// To determine whether a place *may* be uninitialized at a
+/// particular control-flow point, one can take the set-complement
+/// of this data.
+///
+/// Similarly, at a given `drop` statement, the set-difference between
+/// this data and `MaybeInitializedPlaces` yields the set of places
+/// that would require a dynamic drop-flag at that statement.
+pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ DefinitelyInitializedPlaces { tcx, body, mdpe }
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+/// `EverInitializedPlaces` tracks all places that might have ever been
+/// initialized upon reaching a particular point in the control flow
+/// for a function, without an intervening `StorageDead`.
+///
+/// This dataflow is used to determine if an immutable local variable may
+/// be assigned to.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // ever-init:
+/// // { }
+/// let a = S; let mut b = S; let c; let d; // {a, b }
+///
+/// if pred {
+/// drop(a); // {a, b, }
+/// b = S; // {a, b, }
+///
+/// } else {
+/// drop(b); // {a, b, }
+/// d = S; // {a, b, d }
+///
+/// } // {a, b, d }
+///
+/// c = S; // {a, b, c, d }
+/// }
+/// ```
+pub struct EverInitializedPlaces<'a, 'tcx> {
+ #[allow(dead_code)]
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ EverInitializedPlaces { tcx, body, mdpe }
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+ fn update_bits(
+ trans: &mut impl GenKill<MovePathIndex>,
+ path: MovePathIndex,
+ state: DropFlagState,
+ ) {
+ match state {
+ DropFlagState::Absent => trans.kill(path),
+ DropFlagState::Present => trans.gen(path),
+ }
+ }
+}
+
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+ fn update_bits(
+ trans: &mut impl GenKill<MovePathIndex>,
+ path: MovePathIndex,
+ state: DropFlagState,
+ ) {
+ match state {
+ DropFlagState::Absent => trans.gen(path),
+ DropFlagState::Present => trans.kill(path),
+ }
+ }
+}
+
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
+ fn update_bits(
+ trans: &mut impl GenKill<MovePathIndex>,
+ path: MovePathIndex,
+ state: DropFlagState,
+ ) {
+ match state {
+ DropFlagState::Absent => trans.kill(path),
+ DropFlagState::Present => trans.gen(path),
+ }
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
+ type Domain = MaybeReachable<ChunkedBitSet<MovePathIndex>>;
+ const NAME: &'static str = "maybe_init";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = uninitialized
+ MaybeReachable::Unreachable
+ }
+
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ *state =
+ MaybeReachable::Reachable(ChunkedBitSet::new_empty(self.move_data().move_paths.len()));
+ drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+ assert!(s == DropFlagState::Present);
+ state.gen(path);
+ });
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
+ type Idx = MovePathIndex;
+
+ fn domain_size(&self, _: &Body<'tcx>) -> usize {
+ self.move_data().move_paths.len()
+ }
+
+ fn statement_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+
+ // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
+ if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
+ && let Some((_, rvalue)) = statement.kind.as_assign()
+ && let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
+ // FIXME: Does `&raw const foo` allow mutation? See #90413.
+ | mir::Rvalue::AddressOf(_, place) = rvalue
+ && let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
+ {
+ on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
+ trans.gen(child);
+ })
+ }
+ }
+
+ fn terminator_effect<'mir>(
+ &mut self,
+ state: &mut Self::Domain,
+ terminator: &'mir mir::Terminator<'tcx>,
+ location: Location,
+ ) -> TerminatorEdges<'mir, 'tcx> {
+ let mut edges = terminator.edges();
+ if self.skip_unreachable_unwind
+ && let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind
+ && matches!(unwind, mir::UnwindAction::Cleanup(_))
+ && self.is_unwind_dead(place, state)
+ {
+ edges = TerminatorEdges::Single(target);
+ }
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(state, path, s)
+ });
+ edges
+ }
+
+ fn call_return_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ // when a call returns successfully, that means we need to set
+ // the bits for that dest_place to 1 (initialized).
+ on_lookup_result_bits(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ self.move_data().rev_lookup.find(place.as_ref()),
+ |mpi| {
+ trans.gen(mpi);
+ },
+ );
+ });
+ }
+
+ fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
+ &mut self,
+ block: mir::BasicBlock,
+ discr: &mir::Operand<'tcx>,
+ edge_effects: &mut impl SwitchIntEdgeEffects<G>,
+ ) {
+ if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+ return;
+ }
+
+ let enum_ = discr.place().and_then(|discr| {
+ switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
+ });
+
+ let Some((enum_place, enum_def)) = enum_ else {
+ return;
+ };
+
+ let mut discriminants = enum_def.discriminants(self.tcx);
+ edge_effects.apply(|trans, edge| {
+ let Some(value) = edge.value else {
+ return;
+ };
+
+ // MIR building adds discriminants to the `values` array in the same order as they
+ // are yielded by `AdtDef::discriminants`. We rely on this to match each
+ // discriminant in `values` to its corresponding variant in linear time.
+ let (variant, _) = discriminants
+ .find(|&(_, discr)| discr.val == value)
+ .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+
+ // Kill all move paths that correspond to variants we know to be inactive along this
+ // particular outgoing edge of a `SwitchInt`.
+ drop_flag_effects::on_all_inactive_variants(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ enum_place,
+ variant,
+ |mpi| trans.kill(mpi),
+ );
+ });
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
+ type Domain = ChunkedBitSet<MovePathIndex>;
+
+ const NAME: &'static str = "maybe_uninit";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = initialized (start_block_effect counters this at outset)
+ ChunkedBitSet::new_empty(self.move_data().move_paths.len())
+ }
+
+ // sets on_entry bits for Arg places
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ // set all bits to 1 (uninit) before gathering counter-evidence
+ state.insert_all();
+
+ drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+ assert!(s == DropFlagState::Present);
+ state.remove(path);
+ });
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
+ type Idx = MovePathIndex;
+
+ fn domain_size(&self, _: &Body<'tcx>) -> usize {
+ self.move_data().move_paths.len()
+ }
+
+ fn statement_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+
+ // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
+ // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
+ }
+
+ fn terminator_effect<'mir>(
+ &mut self,
+ trans: &mut Self::Domain,
+ terminator: &'mir mir::Terminator<'tcx>,
+ location: Location,
+ ) -> TerminatorEdges<'mir, 'tcx> {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+ if self.skip_unreachable_unwind.contains(location.block) {
+ let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() };
+ assert!(matches!(unwind, mir::UnwindAction::Cleanup(_)));
+ TerminatorEdges::Single(target)
+ } else {
+ terminator.edges()
+ }
+ }
+
+ fn call_return_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ // when a call returns successfully, that means we need to set
+ // the bits for that dest_place to 0 (initialized).
+ on_lookup_result_bits(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ self.move_data().rev_lookup.find(place.as_ref()),
+ |mpi| {
+ trans.kill(mpi);
+ },
+ );
+ });
+ }
+
+ fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
+ &mut self,
+ block: mir::BasicBlock,
+ discr: &mir::Operand<'tcx>,
+ edge_effects: &mut impl SwitchIntEdgeEffects<G>,
+ ) {
+ if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+ return;
+ }
+
+ if !self.mark_inactive_variants_as_uninit {
+ return;
+ }
+
+ let enum_ = discr.place().and_then(|discr| {
+ switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
+ });
+
+ let Some((enum_place, enum_def)) = enum_ else {
+ return;
+ };
+
+ let mut discriminants = enum_def.discriminants(self.tcx);
+ edge_effects.apply(|trans, edge| {
+ let Some(value) = edge.value else {
+ return;
+ };
+
+ // MIR building adds discriminants to the `values` array in the same order as they
+ // are yielded by `AdtDef::discriminants`. We rely on this to match each
+ // discriminant in `values` to its corresponding variant in linear time.
+ let (variant, _) = discriminants
+ .find(|&(_, discr)| discr.val == value)
+ .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+
+ // Mark all move paths that correspond to variants other than this one as maybe
+ // uninitialized (in reality, they are *definitely* uninitialized).
+ drop_flag_effects::on_all_inactive_variants(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ enum_place,
+ variant,
+ |mpi| trans.gen(mpi),
+ );
+ });
+ }
+}
+
+impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
+ /// Use set intersection as the join operator.
+ type Domain = lattice::Dual<BitSet<MovePathIndex>>;
+
+ const NAME: &'static str = "definite_init";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = initialized (start_block_effect counters this at outset)
+ lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
+ }
+
+ // sets on_entry bits for Arg places
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ state.0.clear();
+
+ drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+ assert!(s == DropFlagState::Present);
+ state.0.insert(path);
+ });
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
+ type Idx = MovePathIndex;
+
+ fn domain_size(&self, _: &Body<'tcx>) -> usize {
+ self.move_data().move_paths.len()
+ }
+
+ fn statement_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ })
+ }
+
+ fn terminator_effect<'mir>(
+ &mut self,
+ trans: &mut Self::Domain,
+ terminator: &'mir mir::Terminator<'tcx>,
+ location: Location,
+ ) -> TerminatorEdges<'mir, 'tcx> {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+ terminator.edges()
+ }
+
+ fn call_return_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ // when a call returns successfully, that means we need to set
+ // the bits for that dest_place to 1 (initialized).
+ on_lookup_result_bits(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ self.move_data().rev_lookup.find(place.as_ref()),
+ |mpi| {
+ trans.gen(mpi);
+ },
+ );
+ });
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
+ type Domain = ChunkedBitSet<InitIndex>;
+
+ const NAME: &'static str = "ever_init";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = no initialized variables by default
+ ChunkedBitSet::new_empty(self.move_data().inits.len())
+ }
+
+ fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ for arg_init in 0..body.arg_count {
+ state.insert(InitIndex::new(arg_init));
+ }
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
+ type Idx = InitIndex;
+
+ fn domain_size(&self, _: &Body<'tcx>) -> usize {
+ self.move_data().inits.len()
+ }
+
+ #[instrument(skip(self, trans), level = "debug")]
+ fn statement_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ stmt: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ let move_data = self.move_data();
+ let init_path_map = &move_data.init_path_map;
+ let init_loc_map = &move_data.init_loc_map;
+ let rev_lookup = &move_data.rev_lookup;
+
+ debug!("initializes move_indexes {:?}", &init_loc_map[location]);
+ trans.gen_all(init_loc_map[location].iter().copied());
+
+ if let mir::StatementKind::StorageDead(local) = stmt.kind {
+ // End inits for StorageDead, so that an immutable variable can
+ // be reinitialized on the next iteration of the loop.
+ let move_path_index = rev_lookup.find_local(local);
+ debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
+ trans.kill_all(init_path_map[move_path_index].iter().copied());
+ }
+ }
+
+ #[instrument(skip(self, trans, terminator), level = "debug")]
+ fn terminator_effect<'mir>(
+ &mut self,
+ trans: &mut Self::Domain,
+ terminator: &'mir mir::Terminator<'tcx>,
+ location: Location,
+ ) -> TerminatorEdges<'mir, 'tcx> {
+ let (body, move_data) = (self.body, self.move_data());
+ let term = body[location.block].terminator();
+ let init_loc_map = &move_data.init_loc_map;
+ debug!(?term);
+ debug!("initializes move_indexes {:?}", init_loc_map[location]);
+ trans.gen_all(
+ init_loc_map[location]
+ .iter()
+ .filter(|init_index| {
+ move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
+ })
+ .copied(),
+ );
+ terminator.edges()
+ }
+
+ fn call_return_effect(
+ &mut self,
+ trans: &mut impl GenKill<Self::Idx>,
+ block: mir::BasicBlock,
+ _return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ let move_data = self.move_data();
+ let init_loc_map = &move_data.init_loc_map;
+
+ let call_loc = self.body.terminator_loc(block);
+ for init_index in &init_loc_map[call_loc] {
+ trans.gen(*init_index);
+ }
+ }
+}
+
+/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
+/// an enum discriminant.
+///
+/// We expect such blocks to have a call to `discriminant` as their last statement like so:
+///
+/// ```text
+/// ...
+/// _42 = discriminant(_1)
+/// SwitchInt(_42, ..)
+/// ```
+///
+/// If the basic block matches this pattern, this function returns the place corresponding to the
+/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
+fn switch_on_enum_discriminant<'mir, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &'mir mir::Body<'tcx>,
+ block: &'mir mir::BasicBlockData<'tcx>,
+ switch_on: mir::Place<'tcx>,
+) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
+ for statement in block.statements.iter().rev() {
+ match &statement.kind {
+ mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
+ if *lhs == switch_on =>
+ {
+ match discriminated.ty(body, tcx).ty.kind() {
+ ty::Adt(def, _) => return Some((*discriminated, *def)),
+
+ // `Rvalue::Discriminant` is also used to get the active yield point for a
+ // generator, but we do not need edge-specific effects in that case. This may
+ // change in the future.
+ ty::Generator(..) => return None,
+
+ t => bug!("`discriminant` called on unexpected type {:?}", t),
+ }
+ }
+ mir::StatementKind::Coverage(_) => continue,
+ _ => return None,
+ }
+ }
+ None
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/liveness.rs b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
index 9662c1977..5aa73c7a9 100644
--- a/compiler/rustc_mir_dataflow/src/impls/liveness.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
@@ -1,8 +1,10 @@
use rustc_index::bit_set::{BitSet, ChunkedBitSet};
use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
-use rustc_middle::mir::{self, Local, Location, Place, StatementKind};
+use rustc_middle::mir::{
+ self, CallReturnPlaces, Local, Location, Place, StatementKind, TerminatorEdges,
+};
-use crate::{Analysis, AnalysisDomain, Backward, CallReturnPlaces, GenKill, GenKillAnalysis};
+use crate::{Analysis, AnalysisDomain, Backward, GenKill, GenKillAnalysis};
/// A [live-variable dataflow analysis][liveness].
///
@@ -43,6 +45,10 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeLiveLocals {
impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
type Idx = Local;
+ fn domain_size(&self, body: &mir::Body<'tcx>) -> usize {
+ body.local_decls.len()
+ }
+
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
@@ -52,13 +58,14 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
TransferFunction(trans).visit_statement(statement, location);
}
- fn terminator_effect(
+ fn terminator_effect<'mir>(
&mut self,
- trans: &mut impl GenKill<Self::Idx>,
- terminator: &mir::Terminator<'tcx>,
+ trans: &mut Self::Domain,
+ terminator: &'mir mir::Terminator<'tcx>,
location: Location,
- ) {
+ ) -> TerminatorEdges<'mir, 'tcx> {
TransferFunction(trans).visit_terminator(terminator, location);
+ terminator.edges()
}
fn call_return_effect(
@@ -67,28 +74,23 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
- return_places.for_each(|place| {
- if let Some(local) = place.as_local() {
- trans.kill(local);
- }
- });
- }
-
- fn yield_resume_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _resume_block: mir::BasicBlock,
- resume_place: mir::Place<'tcx>,
- ) {
- YieldResumeEffect(trans).visit_place(
- &resume_place,
- PlaceContext::MutatingUse(MutatingUseContext::Yield),
- Location::START,
- )
+ if let CallReturnPlaces::Yield(resume_place) = return_places {
+ YieldResumeEffect(trans).visit_place(
+ &resume_place,
+ PlaceContext::MutatingUse(MutatingUseContext::Yield),
+ Location::START,
+ )
+ } else {
+ return_places.for_each(|place| {
+ if let Some(local) = place.as_local() {
+ trans.kill(local);
+ }
+ });
+ }
}
}
-struct TransferFunction<'a, T>(&'a mut T);
+pub struct TransferFunction<'a, T>(pub &'a mut T);
impl<'tcx, T> Visitor<'tcx> for TransferFunction<'_, T>
where
@@ -97,7 +99,7 @@ where
fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
if let PlaceContext::MutatingUse(MutatingUseContext::Yield) = context {
// The resume place is evaluated and assigned to only after generator resumes, so its
- // effect is handled separately in `yield_resume_effect`.
+ // effect is handled separately in `call_resume_effect`.
return;
}
@@ -283,13 +285,14 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
TransferFunction(trans).visit_statement(statement, location);
}
- fn apply_terminator_effect(
+ fn apply_terminator_effect<'mir>(
&mut self,
trans: &mut Self::Domain,
- terminator: &mir::Terminator<'tcx>,
+ terminator: &'mir mir::Terminator<'tcx>,
location: Location,
- ) {
+ ) -> TerminatorEdges<'mir, 'tcx> {
TransferFunction(trans).visit_terminator(terminator, location);
+ terminator.edges()
}
fn apply_call_return_effect(
@@ -298,23 +301,18 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
_block: mir::BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
- return_places.for_each(|place| {
- if let Some(local) = place.as_local() {
- trans.remove(local);
- }
- });
- }
-
- fn apply_yield_resume_effect(
- &mut self,
- trans: &mut Self::Domain,
- _resume_block: mir::BasicBlock,
- resume_place: mir::Place<'tcx>,
- ) {
- YieldResumeEffect(trans).visit_place(
- &resume_place,
- PlaceContext::MutatingUse(MutatingUseContext::Yield),
- Location::START,
- )
+ if let CallReturnPlaces::Yield(resume_place) = return_places {
+ YieldResumeEffect(trans).visit_place(
+ &resume_place,
+ PlaceContext::MutatingUse(MutatingUseContext::Yield),
+ Location::START,
+ )
+ } else {
+ return_places.for_each(|place| {
+ if let Some(local) = place.as_local() {
+ trans.remove(local);
+ }
+ });
+ }
}
}
diff --git a/compiler/rustc_mir_dataflow/src/impls/mod.rs b/compiler/rustc_mir_dataflow/src/impls/mod.rs
index 98cec1c67..f8db18fc1 100644
--- a/compiler/rustc_mir_dataflow/src/impls/mod.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/mod.rs
@@ -2,763 +2,18 @@
//! bitvectors attached to each basic block, represented via a
//! zero-sized structure.
-use rustc_index::bit_set::{BitSet, ChunkedBitSet};
-use rustc_index::Idx;
-use rustc_middle::mir::visit::{MirVisitable, Visitor};
-use rustc_middle::mir::{self, Body, Location};
-use rustc_middle::ty::{self, TyCtxt};
-
-use crate::drop_flag_effects_for_function_entry;
-use crate::drop_flag_effects_for_location;
-use crate::elaborate_drops::DropFlagState;
-use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
-use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
-use crate::on_lookup_result_bits;
-use crate::MoveDataParamEnv;
-use crate::{drop_flag_effects, on_all_children_bits};
-use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
-
mod borrowed_locals;
+mod initialized;
mod liveness;
mod storage_liveness;
pub use self::borrowed_locals::borrowed_locals;
pub use self::borrowed_locals::MaybeBorrowedLocals;
+pub use self::initialized::{
+ DefinitelyInitializedPlaces, EverInitializedPlaces, MaybeInitializedPlaces,
+ MaybeUninitializedPlaces,
+};
pub use self::liveness::MaybeLiveLocals;
pub use self::liveness::MaybeTransitiveLiveLocals;
+pub use self::liveness::TransferFunction as LivenessTransferFunction;
pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive};
-
-/// `MaybeInitializedPlaces` tracks all places that might be
-/// initialized upon reaching a particular point in the control flow
-/// for a function.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) { // maybe-init:
-/// // {}
-/// let a = S; let mut b = S; let c; let d; // {a, b}
-///
-/// if pred {
-/// drop(a); // { b}
-/// b = S; // { b}
-///
-/// } else {
-/// drop(b); // {a}
-/// d = S; // {a, d}
-///
-/// } // {a, b, d}
-///
-/// c = S; // {a, b, c, d}
-/// }
-/// ```
-///
-/// To determine whether a place *must* be initialized at a
-/// particular control-flow point, one can take the set-difference
-/// between this data and the data from `MaybeUninitializedPlaces` at the
-/// corresponding control-flow point.
-///
-/// Similarly, at a given `drop` statement, the set-intersection
-/// between this data and `MaybeUninitializedPlaces` yields the set of
-/// places that would require a dynamic drop-flag at that statement.
-pub struct MaybeInitializedPlaces<'a, 'tcx> {
- tcx: TyCtxt<'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'tcx>,
-}
-
-impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
- MaybeInitializedPlaces { tcx, body, mdpe }
- }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
- fn move_data(&self) -> &MoveData<'tcx> {
- &self.mdpe.move_data
- }
-}
-
-/// `MaybeUninitializedPlaces` tracks all places that might be
-/// uninitialized upon reaching a particular point in the control flow
-/// for a function.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) { // maybe-uninit:
-/// // {a, b, c, d}
-/// let a = S; let mut b = S; let c; let d; // { c, d}
-///
-/// if pred {
-/// drop(a); // {a, c, d}
-/// b = S; // {a, c, d}
-///
-/// } else {
-/// drop(b); // { b, c, d}
-/// d = S; // { b, c }
-///
-/// } // {a, b, c, d}
-///
-/// c = S; // {a, b, d}
-/// }
-/// ```
-///
-/// To determine whether a place *must* be uninitialized at a
-/// particular control-flow point, one can take the set-difference
-/// between this data and the data from `MaybeInitializedPlaces` at the
-/// corresponding control-flow point.
-///
-/// Similarly, at a given `drop` statement, the set-intersection
-/// between this data and `MaybeInitializedPlaces` yields the set of
-/// places that would require a dynamic drop-flag at that statement.
-pub struct MaybeUninitializedPlaces<'a, 'tcx> {
- tcx: TyCtxt<'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'tcx>,
-
- mark_inactive_variants_as_uninit: bool,
-}
-
-impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
- MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
- }
-
- /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
- /// enum discriminant.
- ///
- /// This is correct in a vacuum but is not the default because it causes problems in the borrow
- /// checker, where this information gets propagated along `FakeEdge`s.
- pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
- self.mark_inactive_variants_as_uninit = true;
- self
- }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
- fn move_data(&self) -> &MoveData<'tcx> {
- &self.mdpe.move_data
- }
-}
-
-/// `DefinitelyInitializedPlaces` tracks all places that are definitely
-/// initialized upon reaching a particular point in the control flow
-/// for a function.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) { // definite-init:
-/// // { }
-/// let a = S; let mut b = S; let c; let d; // {a, b }
-///
-/// if pred {
-/// drop(a); // { b, }
-/// b = S; // { b, }
-///
-/// } else {
-/// drop(b); // {a, }
-/// d = S; // {a, d}
-///
-/// } // { }
-///
-/// c = S; // { c }
-/// }
-/// ```
-///
-/// To determine whether a place *may* be uninitialized at a
-/// particular control-flow point, one can take the set-complement
-/// of this data.
-///
-/// Similarly, at a given `drop` statement, the set-difference between
-/// this data and `MaybeInitializedPlaces` yields the set of places
-/// that would require a dynamic drop-flag at that statement.
-pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
- tcx: TyCtxt<'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'tcx>,
-}
-
-impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
- DefinitelyInitializedPlaces { tcx, body, mdpe }
- }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
- fn move_data(&self) -> &MoveData<'tcx> {
- &self.mdpe.move_data
- }
-}
-
-/// `EverInitializedPlaces` tracks all places that might have ever been
-/// initialized upon reaching a particular point in the control flow
-/// for a function, without an intervening `StorageDead`.
-///
-/// This dataflow is used to determine if an immutable local variable may
-/// be assigned to.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) { // ever-init:
-/// // { }
-/// let a = S; let mut b = S; let c; let d; // {a, b }
-///
-/// if pred {
-/// drop(a); // {a, b, }
-/// b = S; // {a, b, }
-///
-/// } else {
-/// drop(b); // {a, b, }
-/// d = S; // {a, b, d }
-///
-/// } // {a, b, d }
-///
-/// c = S; // {a, b, c, d }
-/// }
-/// ```
-pub struct EverInitializedPlaces<'a, 'tcx> {
- #[allow(dead_code)]
- tcx: TyCtxt<'tcx>,
- body: &'a Body<'tcx>,
- mdpe: &'a MoveDataParamEnv<'tcx>,
-}
-
-impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
- EverInitializedPlaces { tcx, body, mdpe }
- }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
- fn move_data(&self) -> &MoveData<'tcx> {
- &self.mdpe.move_data
- }
-}
-
-impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
- fn update_bits(
- trans: &mut impl GenKill<MovePathIndex>,
- path: MovePathIndex,
- state: DropFlagState,
- ) {
- match state {
- DropFlagState::Absent => trans.kill(path),
- DropFlagState::Present => trans.gen(path),
- }
- }
-}
-
-impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
- fn update_bits(
- trans: &mut impl GenKill<MovePathIndex>,
- path: MovePathIndex,
- state: DropFlagState,
- ) {
- match state {
- DropFlagState::Absent => trans.gen(path),
- DropFlagState::Present => trans.kill(path),
- }
- }
-}
-
-impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
- fn update_bits(
- trans: &mut impl GenKill<MovePathIndex>,
- path: MovePathIndex,
- state: DropFlagState,
- ) {
- match state {
- DropFlagState::Absent => trans.kill(path),
- DropFlagState::Present => trans.gen(path),
- }
- }
-}
-
-impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
- type Domain = ChunkedBitSet<MovePathIndex>;
- const NAME: &'static str = "maybe_init";
-
- fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
- // bottom = uninitialized
- ChunkedBitSet::new_empty(self.move_data().move_paths.len())
- }
-
- fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
- drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
- assert!(s == DropFlagState::Present);
- state.insert(path);
- });
- }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
- type Idx = MovePathIndex;
-
- fn statement_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- statement: &mir::Statement<'tcx>,
- location: Location,
- ) {
- drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
- Self::update_bits(trans, path, s)
- });
-
- if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
- return;
- }
-
- // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
- for_each_mut_borrow(statement, location, |place| {
- let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
- on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
- trans.gen(child);
- })
- })
- }
-
- fn terminator_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- terminator: &mir::Terminator<'tcx>,
- location: Location,
- ) {
- drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
- Self::update_bits(trans, path, s)
- });
-
- if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
- return;
- }
-
- for_each_mut_borrow(terminator, location, |place| {
- let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
- on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
- trans.gen(child);
- })
- })
- }
-
- fn call_return_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _block: mir::BasicBlock,
- return_places: CallReturnPlaces<'_, 'tcx>,
- ) {
- return_places.for_each(|place| {
- // when a call returns successfully, that means we need to set
- // the bits for that dest_place to 1 (initialized).
- on_lookup_result_bits(
- self.tcx,
- self.body,
- self.move_data(),
- self.move_data().rev_lookup.find(place.as_ref()),
- |mpi| {
- trans.gen(mpi);
- },
- );
- });
- }
-
- fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
- &mut self,
- block: mir::BasicBlock,
- discr: &mir::Operand<'tcx>,
- edge_effects: &mut impl SwitchIntEdgeEffects<G>,
- ) {
- if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
- return;
- }
-
- let enum_ = discr.place().and_then(|discr| {
- switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
- });
-
- let Some((enum_place, enum_def)) = enum_ else {
- return;
- };
-
- let mut discriminants = enum_def.discriminants(self.tcx);
- edge_effects.apply(|trans, edge| {
- let Some(value) = edge.value else {
- return;
- };
-
- // MIR building adds discriminants to the `values` array in the same order as they
- // are yielded by `AdtDef::discriminants`. We rely on this to match each
- // discriminant in `values` to its corresponding variant in linear time.
- let (variant, _) = discriminants
- .find(|&(_, discr)| discr.val == value)
- .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
-
- // Kill all move paths that correspond to variants we know to be inactive along this
- // particular outgoing edge of a `SwitchInt`.
- drop_flag_effects::on_all_inactive_variants(
- self.tcx,
- self.body,
- self.move_data(),
- enum_place,
- variant,
- |mpi| trans.kill(mpi),
- );
- });
- }
-}
-
-impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
- type Domain = ChunkedBitSet<MovePathIndex>;
-
- const NAME: &'static str = "maybe_uninit";
-
- fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
- // bottom = initialized (start_block_effect counters this at outset)
- ChunkedBitSet::new_empty(self.move_data().move_paths.len())
- }
-
- // sets on_entry bits for Arg places
- fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
- // set all bits to 1 (uninit) before gathering counter-evidence
- state.insert_all();
-
- drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
- assert!(s == DropFlagState::Present);
- state.remove(path);
- });
- }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
- type Idx = MovePathIndex;
-
- fn statement_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _statement: &mir::Statement<'tcx>,
- location: Location,
- ) {
- drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
- Self::update_bits(trans, path, s)
- });
-
- // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
- // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
- }
-
- fn terminator_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _terminator: &mir::Terminator<'tcx>,
- location: Location,
- ) {
- drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
- Self::update_bits(trans, path, s)
- });
- }
-
- fn call_return_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _block: mir::BasicBlock,
- return_places: CallReturnPlaces<'_, 'tcx>,
- ) {
- return_places.for_each(|place| {
- // when a call returns successfully, that means we need to set
- // the bits for that dest_place to 0 (initialized).
- on_lookup_result_bits(
- self.tcx,
- self.body,
- self.move_data(),
- self.move_data().rev_lookup.find(place.as_ref()),
- |mpi| {
- trans.kill(mpi);
- },
- );
- });
- }
-
- fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
- &mut self,
- block: mir::BasicBlock,
- discr: &mir::Operand<'tcx>,
- edge_effects: &mut impl SwitchIntEdgeEffects<G>,
- ) {
- if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
- return;
- }
-
- if !self.mark_inactive_variants_as_uninit {
- return;
- }
-
- let enum_ = discr.place().and_then(|discr| {
- switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
- });
-
- let Some((enum_place, enum_def)) = enum_ else {
- return;
- };
-
- let mut discriminants = enum_def.discriminants(self.tcx);
- edge_effects.apply(|trans, edge| {
- let Some(value) = edge.value else {
- return;
- };
-
- // MIR building adds discriminants to the `values` array in the same order as they
- // are yielded by `AdtDef::discriminants`. We rely on this to match each
- // discriminant in `values` to its corresponding variant in linear time.
- let (variant, _) = discriminants
- .find(|&(_, discr)| discr.val == value)
- .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
-
- // Mark all move paths that correspond to variants other than this one as maybe
- // uninitialized (in reality, they are *definitely* uninitialized).
- drop_flag_effects::on_all_inactive_variants(
- self.tcx,
- self.body,
- self.move_data(),
- enum_place,
- variant,
- |mpi| trans.gen(mpi),
- );
- });
- }
-}
-
-impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
- /// Use set intersection as the join operator.
- type Domain = lattice::Dual<BitSet<MovePathIndex>>;
-
- const NAME: &'static str = "definite_init";
-
- fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
- // bottom = initialized (start_block_effect counters this at outset)
- lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
- }
-
- // sets on_entry bits for Arg places
- fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
- state.0.clear();
-
- drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
- assert!(s == DropFlagState::Present);
- state.0.insert(path);
- });
- }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
- type Idx = MovePathIndex;
-
- fn statement_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _statement: &mir::Statement<'tcx>,
- location: Location,
- ) {
- drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
- Self::update_bits(trans, path, s)
- })
- }
-
- fn terminator_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _terminator: &mir::Terminator<'tcx>,
- location: Location,
- ) {
- drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
- Self::update_bits(trans, path, s)
- })
- }
-
- fn call_return_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _block: mir::BasicBlock,
- return_places: CallReturnPlaces<'_, 'tcx>,
- ) {
- return_places.for_each(|place| {
- // when a call returns successfully, that means we need to set
- // the bits for that dest_place to 1 (initialized).
- on_lookup_result_bits(
- self.tcx,
- self.body,
- self.move_data(),
- self.move_data().rev_lookup.find(place.as_ref()),
- |mpi| {
- trans.gen(mpi);
- },
- );
- });
- }
-}
-
-impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
- type Domain = ChunkedBitSet<InitIndex>;
-
- const NAME: &'static str = "ever_init";
-
- fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
- // bottom = no initialized variables by default
- ChunkedBitSet::new_empty(self.move_data().inits.len())
- }
-
- fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
- for arg_init in 0..body.arg_count {
- state.insert(InitIndex::new(arg_init));
- }
- }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
- type Idx = InitIndex;
-
- #[instrument(skip(self, trans), level = "debug")]
- fn statement_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- stmt: &mir::Statement<'tcx>,
- location: Location,
- ) {
- let move_data = self.move_data();
- let init_path_map = &move_data.init_path_map;
- let init_loc_map = &move_data.init_loc_map;
- let rev_lookup = &move_data.rev_lookup;
-
- debug!("initializes move_indexes {:?}", &init_loc_map[location]);
- trans.gen_all(init_loc_map[location].iter().copied());
-
- if let mir::StatementKind::StorageDead(local) = stmt.kind {
- // End inits for StorageDead, so that an immutable variable can
- // be reinitialized on the next iteration of the loop.
- let move_path_index = rev_lookup.find_local(local);
- debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
- trans.kill_all(init_path_map[move_path_index].iter().copied());
- }
- }
-
- #[instrument(skip(self, trans, _terminator), level = "debug")]
- fn terminator_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _terminator: &mir::Terminator<'tcx>,
- location: Location,
- ) {
- let (body, move_data) = (self.body, self.move_data());
- let term = body[location.block].terminator();
- let init_loc_map = &move_data.init_loc_map;
- debug!(?term);
- debug!("initializes move_indexes {:?}", init_loc_map[location]);
- trans.gen_all(
- init_loc_map[location]
- .iter()
- .filter(|init_index| {
- move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
- })
- .copied(),
- );
- }
-
- fn call_return_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- block: mir::BasicBlock,
- _return_places: CallReturnPlaces<'_, 'tcx>,
- ) {
- let move_data = self.move_data();
- let init_loc_map = &move_data.init_loc_map;
-
- let call_loc = self.body.terminator_loc(block);
- for init_index in &init_loc_map[call_loc] {
- trans.gen(*init_index);
- }
- }
-}
-
-/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
-/// an enum discriminant.
-///
-/// We expect such blocks to have a call to `discriminant` as their last statement like so:
-///
-/// ```text
-/// ...
-/// _42 = discriminant(_1)
-/// SwitchInt(_42, ..)
-/// ```
-///
-/// If the basic block matches this pattern, this function returns the place corresponding to the
-/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
-fn switch_on_enum_discriminant<'mir, 'tcx>(
- tcx: TyCtxt<'tcx>,
- body: &'mir mir::Body<'tcx>,
- block: &'mir mir::BasicBlockData<'tcx>,
- switch_on: mir::Place<'tcx>,
-) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
- for statement in block.statements.iter().rev() {
- match &statement.kind {
- mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
- if *lhs == switch_on =>
- {
- match discriminated.ty(body, tcx).ty.kind() {
- ty::Adt(def, _) => return Some((*discriminated, *def)),
-
- // `Rvalue::Discriminant` is also used to get the active yield point for a
- // generator, but we do not need edge-specific effects in that case. This may
- // change in the future.
- ty::Generator(..) => return None,
-
- t => bug!("`discriminant` called on unexpected type {:?}", t),
- }
- }
- mir::StatementKind::Coverage(_) => continue,
- _ => return None,
- }
- }
- None
-}
-
-struct OnMutBorrow<F>(F);
-
-impl<F> Visitor<'_> for OnMutBorrow<F>
-where
- F: FnMut(&mir::Place<'_>),
-{
- fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
- // FIXME: Does `&raw const foo` allow mutation? See #90413.
- match rvalue {
- mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
- | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
-
- _ => {}
- }
-
- self.super_rvalue(rvalue, location)
- }
-}
-
-/// Calls `f` for each mutable borrow or raw reference in the program.
-///
-/// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
-/// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
-/// other analyses will likely need to check for `!Freeze`.
-fn for_each_mut_borrow<'tcx>(
- mir: &impl MirVisitable<'tcx>,
- location: Location,
- f: impl FnMut(&mir::Place<'_>),
-) {
- let mut vis = OnMutBorrow(f);
-
- mir.apply(location, &mut vis);
-}
diff --git a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
index 666c8d50a..bea23b7f7 100644
--- a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
@@ -1,10 +1,12 @@
-pub use super::*;
-
-use crate::{CallReturnPlaces, GenKill, ResultsClonedCursor};
+use rustc_index::bit_set::BitSet;
use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
+
use std::borrow::Cow;
+use super::MaybeBorrowedLocals;
+use crate::{GenKill, ResultsClonedCursor};
+
#[derive(Clone)]
pub struct MaybeStorageLive<'a> {
always_live_locals: Cow<'a, BitSet<Local>>,
@@ -27,12 +29,12 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
const NAME: &'static str = "maybe_storage_live";
- fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = dead
BitSet::new_empty(body.local_decls.len())
}
- fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+ fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
for local in self.always_live_locals.iter() {
on_entry.insert(local);
@@ -47,10 +49,14 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
type Idx = Local;
+ fn domain_size(&self, body: &Body<'tcx>) -> usize {
+ body.local_decls.len()
+ }
+
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
- stmt: &mir::Statement<'tcx>,
+ stmt: &Statement<'tcx>,
_: Location,
) {
match stmt.kind {
@@ -60,13 +66,14 @@ impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
}
}
- fn terminator_effect(
+ fn terminator_effect<'mir>(
&mut self,
- _trans: &mut impl GenKill<Self::Idx>,
- _: &mir::Terminator<'tcx>,
+ _trans: &mut Self::Domain,
+ terminator: &'mir Terminator<'tcx>,
_: Location,
- ) {
+ ) -> TerminatorEdges<'mir, 'tcx> {
// Terminators have no effect
+ terminator.edges()
}
fn call_return_effect(
@@ -95,12 +102,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
const NAME: &'static str = "maybe_storage_dead";
- fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = live
BitSet::new_empty(body.local_decls.len())
}
- fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+ fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
// Do not iterate on return place and args, as they are trivially always live.
for local in body.vars_and_temps_iter() {
@@ -114,10 +121,14 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
type Idx = Local;
+ fn domain_size(&self, body: &Body<'tcx>) -> usize {
+ body.local_decls.len()
+ }
+
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
- stmt: &mir::Statement<'tcx>,
+ stmt: &Statement<'tcx>,
_: Location,
) {
match stmt.kind {
@@ -127,13 +138,14 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
}
}
- fn terminator_effect(
+ fn terminator_effect<'mir>(
&mut self,
- _trans: &mut impl GenKill<Self::Idx>,
- _: &mir::Terminator<'tcx>,
+ _: &mut Self::Domain,
+ terminator: &'mir Terminator<'tcx>,
_: Location,
- ) {
+ ) -> TerminatorEdges<'mir, 'tcx> {
// Terminators have no effect
+ terminator.edges()
}
fn call_return_effect(
@@ -172,12 +184,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
const NAME: &'static str = "requires_storage";
- fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
// bottom = dead
BitSet::new_empty(body.local_decls.len())
}
- fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+ fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
// The resume argument is live on function entry (we don't care about
// the `self` argument)
for arg in body.args_iter().skip(1) {
@@ -189,10 +201,14 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
type Idx = Local;
+ fn domain_size(&self, body: &Body<'tcx>) -> usize {
+ body.local_decls.len()
+ }
+
fn before_statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
- stmt: &mir::Statement<'tcx>,
+ stmt: &Statement<'tcx>,
loc: Location,
) {
// If a place is borrowed in a statement, it needs storage for that statement.
@@ -225,7 +241,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn statement_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
- _: &mir::Statement<'tcx>,
+ _: &Statement<'tcx>,
loc: Location,
) {
// If we move from a place then it only stops needing storage *after*
@@ -236,11 +252,14 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
fn before_terminator_effect(
&mut self,
trans: &mut impl GenKill<Self::Idx>,
- terminator: &mir::Terminator<'tcx>,
+ terminator: &Terminator<'tcx>,
loc: Location,
) {
// If a place is borrowed in a terminator, it needs storage for that terminator.
- self.borrowed_locals.mut_analysis().terminator_effect(trans, terminator, loc);
+ self.borrowed_locals
+ .mut_analysis()
+ .transfer_function(trans)
+ .visit_terminator(terminator, loc);
match &terminator.kind {
TerminatorKind::Call { destination, .. } => {
@@ -286,12 +305,12 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
}
}
- fn terminator_effect(
+ fn terminator_effect<'t>(
&mut self,
- trans: &mut impl GenKill<Self::Idx>,
- terminator: &mir::Terminator<'tcx>,
+ trans: &mut Self::Domain,
+ terminator: &'t Terminator<'tcx>,
loc: Location,
- ) {
+ ) -> TerminatorEdges<'t, 'tcx> {
match terminator.kind {
// For call terminators the destination requires storage for the call
// and after the call returns successfully, but not after a panic.
@@ -323,6 +342,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
}
self.check_for_move(trans, loc);
+ terminator.edges()
}
fn call_return_effect(
@@ -333,15 +353,6 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
) {
return_places.for_each(|place| trans.gen(place.local));
}
-
- fn yield_resume_effect(
- &mut self,
- trans: &mut impl GenKill<Self::Idx>,
- _resume_block: BasicBlock,
- resume_place: mir::Place<'tcx>,
- ) {
- trans.gen(resume_place.local);
- }
}
impl<'tcx> MaybeRequiresStorage<'_, '_, 'tcx> {