summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_mir_dataflow/src/impls
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_dataflow/src/impls')
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs162
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/init_locals.rs122
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/liveness.rs297
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/mod.rs766
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs300
5 files changed, 1647 insertions, 0 deletions
diff --git a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
new file mode 100644
index 000000000..0f8e86d1d
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
@@ -0,0 +1,162 @@
+use super::*;
+
+use crate::{AnalysisDomain, CallReturnPlaces, GenKill, GenKillAnalysis};
+use rustc_middle::mir::visit::Visitor;
+use rustc_middle::mir::*;
+
+/// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points
+/// to a given local.
+///
+/// At present, this is used as a very limited form of alias analysis. For example,
+/// `MaybeBorrowedLocals` is used to compute which locals are live during a yield expression for
+/// immovable generators.
+pub struct MaybeBorrowedLocals;
+
+impl MaybeBorrowedLocals {
+ fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> {
+ TransferFunction { trans }
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
+ type Domain = BitSet<Local>;
+ const NAME: &'static str = "maybe_borrowed_locals";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = unborrowed
+ BitSet::new_empty(body.local_decls().len())
+ }
+
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
+ // No locals are aliased on function entry
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
+ type Idx = Local;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ self.transfer_function(trans).visit_statement(statement, location);
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ self.transfer_function(trans).visit_terminator(terminator, location);
+ }
+
+ fn call_return_effect(
+ &self,
+ _trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ _return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ }
+}
+
+/// A `Visitor` that defines the transfer function for `MaybeBorrowedLocals`.
+struct TransferFunction<'a, T> {
+ trans: &'a mut T,
+}
+
+impl<'tcx, T> Visitor<'tcx> for TransferFunction<'_, T>
+where
+ T: GenKill<Local>,
+{
+ fn visit_statement(&mut self, stmt: &Statement<'tcx>, location: Location) {
+ self.super_statement(stmt, location);
+
+ // When we reach a `StorageDead` statement, we can assume that any pointers to this memory
+ // are now invalid.
+ if let StatementKind::StorageDead(local) = stmt.kind {
+ self.trans.kill(local);
+ }
+ }
+
+ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
+ self.super_rvalue(rvalue, location);
+
+ match rvalue {
+ mir::Rvalue::AddressOf(_, borrowed_place) | mir::Rvalue::Ref(_, _, borrowed_place) => {
+ if !borrowed_place.is_indirect() {
+ self.trans.gen(borrowed_place.local);
+ }
+ }
+
+ mir::Rvalue::Cast(..)
+ | mir::Rvalue::ShallowInitBox(..)
+ | mir::Rvalue::Use(..)
+ | mir::Rvalue::ThreadLocalRef(..)
+ | mir::Rvalue::Repeat(..)
+ | mir::Rvalue::Len(..)
+ | mir::Rvalue::BinaryOp(..)
+ | mir::Rvalue::CheckedBinaryOp(..)
+ | mir::Rvalue::NullaryOp(..)
+ | mir::Rvalue::UnaryOp(..)
+ | mir::Rvalue::Discriminant(..)
+ | mir::Rvalue::Aggregate(..)
+ | mir::Rvalue::CopyForDeref(..) => {}
+ }
+ }
+
+ fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
+ self.super_terminator(terminator, location);
+
+ match terminator.kind {
+ mir::TerminatorKind::Drop { place: dropped_place, .. }
+ | mir::TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
+ // Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut
+ // self` as a parameter. In the general case, a drop impl could launder that
+ // reference into the surrounding environment through a raw pointer, thus creating
+ // a valid `*mut` pointing to the dropped local. We are not yet willing to declare
+ // this particular case UB, so we must treat all dropped locals as mutably borrowed
+ // for now. See discussion on [#61069].
+ //
+ // [#61069]: https://github.com/rust-lang/rust/pull/61069
+ self.trans.gen(dropped_place.local);
+ }
+
+ TerminatorKind::Abort
+ | TerminatorKind::Assert { .. }
+ | TerminatorKind::Call { .. }
+ | TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. }
+ | TerminatorKind::GeneratorDrop
+ | TerminatorKind::Goto { .. }
+ | TerminatorKind::InlineAsm { .. }
+ | TerminatorKind::Resume
+ | TerminatorKind::Return
+ | TerminatorKind::SwitchInt { .. }
+ | TerminatorKind::Unreachable
+ | TerminatorKind::Yield { .. } => {}
+ }
+ }
+}
+
+/// The set of locals that are borrowed at some point in the MIR body.
+pub fn borrowed_locals(body: &Body<'_>) -> BitSet<Local> {
+ struct Borrowed(BitSet<Local>);
+
+ impl GenKill<Local> for Borrowed {
+ #[inline]
+ fn gen(&mut self, elem: Local) {
+ self.0.gen(elem)
+ }
+ #[inline]
+ fn kill(&mut self, _: Local) {
+ // Ignore borrow invalidation.
+ }
+ }
+
+ let mut borrowed = Borrowed(BitSet::new_empty(body.local_decls.len()));
+ TransferFunction { trans: &mut borrowed }.visit_body(body);
+ borrowed.0
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/init_locals.rs b/compiler/rustc_mir_dataflow/src/impls/init_locals.rs
new file mode 100644
index 000000000..83ce4c44b
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/init_locals.rs
@@ -0,0 +1,122 @@
+//! A less precise version of `MaybeInitializedPlaces` whose domain is entire locals.
+//!
+//! A local will be maybe initialized if *any* projections of that local might be initialized.
+
+use crate::{CallReturnPlaces, GenKill};
+
+use rustc_index::bit_set::BitSet;
+use rustc_middle::mir::visit::{PlaceContext, Visitor};
+use rustc_middle::mir::{self, BasicBlock, Local, Location};
+
+pub struct MaybeInitializedLocals;
+
+impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeInitializedLocals {
+ type Domain = BitSet<Local>;
+
+ const NAME: &'static str = "maybe_init_locals";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = uninit
+ BitSet::new_empty(body.local_decls.len())
+ }
+
+ fn initialize_start_block(&self, body: &mir::Body<'tcx>, entry_set: &mut Self::Domain) {
+ // Function arguments are initialized to begin with.
+ for arg in body.args_iter() {
+ entry_set.insert(arg);
+ }
+ }
+}
+
+impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeInitializedLocals {
+ type Idx = Local;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ statement: &mir::Statement<'tcx>,
+ loc: Location,
+ ) {
+ TransferFunction { trans }.visit_statement(statement, loc)
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ terminator: &mir::Terminator<'tcx>,
+ loc: Location,
+ ) {
+ TransferFunction { trans }.visit_terminator(terminator, loc)
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| trans.gen(place.local));
+ }
+
+ /// See `Analysis::apply_yield_resume_effect`.
+ fn yield_resume_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _resume_block: BasicBlock,
+ resume_place: mir::Place<'tcx>,
+ ) {
+ trans.gen(resume_place.local)
+ }
+}
+
+struct TransferFunction<'a, T> {
+ trans: &'a mut T,
+}
+
+impl<T> Visitor<'_> for TransferFunction<'_, T>
+where
+ T: GenKill<Local>,
+{
+ // FIXME: Using `visit_local` here is a bug. For example, on `move _5.field` we mark `_5` as
+ // deinitialized, although clearly it is only partially deinitialized. This analysis is not
+ // actually used anywhere at the moment, so this is not critical, but this does need to be fixed
+ // before it starts being used again.
+ fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
+ use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, NonUseContext};
+ match context {
+ // These are handled specially in `call_return_effect` and `yield_resume_effect`.
+ PlaceContext::MutatingUse(
+ MutatingUseContext::Call
+ | MutatingUseContext::AsmOutput
+ | MutatingUseContext::Yield,
+ ) => {}
+
+ // If it's deinitialized, it's no longer init
+ PlaceContext::MutatingUse(MutatingUseContext::Deinit) => self.trans.kill(local),
+
+ // Otherwise, when a place is mutated, we must consider it possibly initialized.
+ PlaceContext::MutatingUse(_) => self.trans.gen(local),
+
+ // If the local is moved out of, or if it gets marked `StorageDead`, consider it no
+ // longer initialized.
+ PlaceContext::NonUse(NonUseContext::StorageDead)
+ | PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => self.trans.kill(local),
+
+ // All other uses do not affect this analysis.
+ PlaceContext::NonUse(
+ NonUseContext::StorageLive
+ | NonUseContext::AscribeUserTy
+ | NonUseContext::VarDebugInfo,
+ )
+ | PlaceContext::NonMutatingUse(
+ NonMutatingUseContext::Inspect
+ | NonMutatingUseContext::Copy
+ | NonMutatingUseContext::SharedBorrow
+ | NonMutatingUseContext::ShallowBorrow
+ | NonMutatingUseContext::UniqueBorrow
+ | NonMutatingUseContext::AddressOf
+ | NonMutatingUseContext::Projection,
+ ) => {}
+ }
+ }
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/liveness.rs b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
new file mode 100644
index 000000000..21132eb99
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
@@ -0,0 +1,297 @@
+use rustc_index::bit_set::{BitSet, ChunkedBitSet};
+use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
+use rustc_middle::mir::{self, Local, Location, Place, StatementKind};
+
+use crate::{Analysis, AnalysisDomain, Backward, CallReturnPlaces, GenKill, GenKillAnalysis};
+
+/// A [live-variable dataflow analysis][liveness].
+///
+/// This analysis considers references as being used only at the point of the
+/// borrow. In other words, this analysis does not track uses because of references that already
+/// exist. See [this `mir-dataflow` test][flow-test] for an example. You almost never want to use
+/// this analysis without also looking at the results of [`MaybeBorrowedLocals`].
+///
+/// ## Field-(in)sensitivity
+///
+/// As the name suggests, this analysis is field insensitive. If a projection of a variable `x` is
+/// assigned to (e.g. `x.0 = 42`), it does not "define" `x` as far as liveness is concerned. In fact,
+/// such an assignment is currently marked as a "use" of `x` in an attempt to be maximally
+/// conservative.
+///
+/// [`MaybeBorrowedLocals`]: super::MaybeBorrowedLocals
+/// [flow-test]: https://github.com/rust-lang/rust/blob/a08c47310c7d49cbdc5d7afb38408ba519967ecd/src/test/ui/mir-dataflow/liveness-ptr.rs
+/// [liveness]: https://en.wikipedia.org/wiki/Live_variable_analysis
+pub struct MaybeLiveLocals;
+
+impl MaybeLiveLocals {
+ fn transfer_function<'a, T>(&self, trans: &'a mut T) -> TransferFunction<'a, T> {
+ TransferFunction(trans)
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeLiveLocals {
+ type Domain = ChunkedBitSet<Local>;
+ type Direction = Backward;
+
+ const NAME: &'static str = "liveness";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = not live
+ ChunkedBitSet::new_empty(body.local_decls.len())
+ }
+
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
+ // No variables are live until we observe a use
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
+ type Idx = Local;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ self.transfer_function(trans).visit_statement(statement, location);
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ self.transfer_function(trans).visit_terminator(terminator, location);
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ if let Some(local) = place.as_local() {
+ trans.kill(local);
+ }
+ });
+ }
+
+ fn yield_resume_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _resume_block: mir::BasicBlock,
+ resume_place: mir::Place<'tcx>,
+ ) {
+ if let Some(local) = resume_place.as_local() {
+ trans.kill(local);
+ }
+ }
+}
+
+struct TransferFunction<'a, T>(&'a mut T);
+
+impl<'tcx, T> Visitor<'tcx> for TransferFunction<'_, T>
+where
+ T: GenKill<Local>,
+{
+ fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
+ let local = place.local;
+
+ // We purposefully do not call `super_place` here to avoid calling `visit_local` for this
+ // place with one of the `Projection` variants of `PlaceContext`.
+ self.visit_projection(place.as_ref(), context, location);
+
+ match DefUse::for_place(*place, context) {
+ Some(DefUse::Def) => self.0.kill(local),
+ Some(DefUse::Use) => self.0.gen(local),
+ None => {}
+ }
+ }
+
+ fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
+ // Because we do not call `super_place` above, `visit_local` is only called for locals that
+ // do not appear as part of a `Place` in the MIR. This handles cases like the implicit use
+ // of the return place in a `Return` terminator or the index in an `Index` projection.
+ match DefUse::for_place(local.into(), context) {
+ Some(DefUse::Def) => self.0.kill(local),
+ Some(DefUse::Use) => self.0.gen(local),
+ None => {}
+ }
+ }
+}
+
+#[derive(Eq, PartialEq, Clone)]
+enum DefUse {
+ Def,
+ Use,
+}
+
+impl DefUse {
+ fn for_place<'tcx>(place: Place<'tcx>, context: PlaceContext) -> Option<DefUse> {
+ match context {
+ PlaceContext::NonUse(_) => None,
+
+ PlaceContext::MutatingUse(MutatingUseContext::Store | MutatingUseContext::Deinit) => {
+ if place.is_indirect() {
+ // Treat derefs as a use of the base local. `*p = 4` is not a def of `p` but a
+ // use.
+ Some(DefUse::Use)
+ } else if place.projection.is_empty() {
+ Some(DefUse::Def)
+ } else {
+ None
+ }
+ }
+
+ // Setting the discriminant is not a use because it does no reading, but it is also not
+ // a def because it does not overwrite the whole place
+ PlaceContext::MutatingUse(MutatingUseContext::SetDiscriminant) => {
+ place.is_indirect().then_some(DefUse::Use)
+ }
+
+ // For the associated terminators, this is only a `Def` when the terminator returns
+ // "successfully." As such, we handle this case separately in `call_return_effect`
+ // above. However, if the place looks like `*_5`, this is still unconditionally a use of
+ // `_5`.
+ PlaceContext::MutatingUse(
+ MutatingUseContext::Call
+ | MutatingUseContext::Yield
+ | MutatingUseContext::AsmOutput,
+ ) => place.is_indirect().then_some(DefUse::Use),
+
+ // All other contexts are uses...
+ PlaceContext::MutatingUse(
+ MutatingUseContext::AddressOf
+ | MutatingUseContext::Borrow
+ | MutatingUseContext::Drop
+ | MutatingUseContext::Retag,
+ )
+ | PlaceContext::NonMutatingUse(
+ NonMutatingUseContext::AddressOf
+ | NonMutatingUseContext::Copy
+ | NonMutatingUseContext::Inspect
+ | NonMutatingUseContext::Move
+ | NonMutatingUseContext::ShallowBorrow
+ | NonMutatingUseContext::SharedBorrow
+ | NonMutatingUseContext::UniqueBorrow,
+ ) => Some(DefUse::Use),
+
+ PlaceContext::MutatingUse(MutatingUseContext::Projection)
+ | PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
+ unreachable!("A projection could be a def or a use and must be handled separately")
+ }
+ }
+ }
+}
+
+/// Like `MaybeLiveLocals`, but does not mark locals as live if they are used in a dead assignment.
+///
+/// This is basically written for dead store elimination and nothing else.
+///
+/// All of the caveats of `MaybeLiveLocals` apply.
+pub struct MaybeTransitiveLiveLocals<'a> {
+ always_live: &'a BitSet<Local>,
+}
+
+impl<'a> MaybeTransitiveLiveLocals<'a> {
+ /// The `always_alive` set is the set of locals to which all stores should unconditionally be
+ /// considered live.
+ ///
+ /// This should include at least all locals that are ever borrowed.
+ pub fn new(always_live: &'a BitSet<Local>) -> Self {
+ MaybeTransitiveLiveLocals { always_live }
+ }
+}
+
+impl<'a, 'tcx> AnalysisDomain<'tcx> for MaybeTransitiveLiveLocals<'a> {
+ type Domain = ChunkedBitSet<Local>;
+ type Direction = Backward;
+
+ const NAME: &'static str = "transitive liveness";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = not live
+ ChunkedBitSet::new_empty(body.local_decls.len())
+ }
+
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
+ // No variables are live until we observe a use
+ }
+}
+
+impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
+ fn apply_statement_effect(
+ &self,
+ trans: &mut Self::Domain,
+ statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ // Compute the place that we are storing to, if any
+ let destination = match &statement.kind {
+ StatementKind::Assign(assign) => {
+ if assign.1.is_safe_to_remove() {
+ Some(assign.0)
+ } else {
+ None
+ }
+ }
+ StatementKind::SetDiscriminant { place, .. } | StatementKind::Deinit(place) => {
+ Some(**place)
+ }
+ StatementKind::FakeRead(_)
+ | StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Retag(..)
+ | StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
+ | StatementKind::CopyNonOverlapping(..)
+ | StatementKind::Nop => None,
+ };
+ if let Some(destination) = destination {
+ if !destination.is_indirect()
+ && !trans.contains(destination.local)
+ && !self.always_live.contains(destination.local)
+ {
+ // This store is dead
+ return;
+ }
+ }
+ TransferFunction(trans).visit_statement(statement, location);
+ }
+
+ fn apply_terminator_effect(
+ &self,
+ trans: &mut Self::Domain,
+ terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ TransferFunction(trans).visit_terminator(terminator, location);
+ }
+
+ fn apply_call_return_effect(
+ &self,
+ trans: &mut Self::Domain,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ if let Some(local) = place.as_local() {
+ trans.remove(local);
+ }
+ });
+ }
+
+ fn apply_yield_resume_effect(
+ &self,
+ trans: &mut Self::Domain,
+ _resume_block: mir::BasicBlock,
+ resume_place: mir::Place<'tcx>,
+ ) {
+ if let Some(local) = resume_place.as_local() {
+ trans.remove(local);
+ }
+ }
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/mod.rs b/compiler/rustc_mir_dataflow/src/impls/mod.rs
new file mode 100644
index 000000000..fd1e49277
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/mod.rs
@@ -0,0 +1,766 @@
+//! Dataflow analyses are built upon some interpretation of the
+//! bitvectors attached to each basic block, represented via a
+//! zero-sized structure.
+
+use rustc_index::bit_set::{BitSet, ChunkedBitSet};
+use rustc_index::vec::Idx;
+use rustc_middle::mir::visit::{MirVisitable, Visitor};
+use rustc_middle::mir::{self, Body, Location};
+use rustc_middle::ty::{self, TyCtxt};
+
+use crate::drop_flag_effects_for_function_entry;
+use crate::drop_flag_effects_for_location;
+use crate::elaborate_drops::DropFlagState;
+use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
+use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
+use crate::on_lookup_result_bits;
+use crate::MoveDataParamEnv;
+use crate::{drop_flag_effects, on_all_children_bits};
+use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
+
+mod borrowed_locals;
+mod init_locals;
+mod liveness;
+mod storage_liveness;
+
+pub use self::borrowed_locals::borrowed_locals;
+pub use self::borrowed_locals::MaybeBorrowedLocals;
+pub use self::init_locals::MaybeInitializedLocals;
+pub use self::liveness::MaybeLiveLocals;
+pub use self::liveness::MaybeTransitiveLiveLocals;
+pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
+
+/// `MaybeInitializedPlaces` tracks all places that might be
+/// initialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // maybe-init:
+/// // {}
+/// let a = S; let mut b = S; let c; let d; // {a, b}
+///
+/// if pred {
+/// drop(a); // { b}
+/// b = S; // { b}
+///
+/// } else {
+/// drop(b); // {a}
+/// d = S; // {a, d}
+///
+/// } // {a, b, d}
+///
+/// c = S; // {a, b, c, d}
+/// }
+/// ```
+///
+/// To determine whether a place *must* be initialized at a
+/// particular control-flow point, one can take the set-difference
+/// between this data and the data from `MaybeUninitializedPlaces` at the
+/// corresponding control-flow point.
+///
+/// Similarly, at a given `drop` statement, the set-intersection
+/// between this data and `MaybeUninitializedPlaces` yields the set of
+/// places that would require a dynamic drop-flag at that statement.
+pub struct MaybeInitializedPlaces<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ MaybeInitializedPlaces { tcx, body, mdpe }
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+/// `MaybeUninitializedPlaces` tracks all places that might be
+/// uninitialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // maybe-uninit:
+/// // {a, b, c, d}
+/// let a = S; let mut b = S; let c; let d; // { c, d}
+///
+/// if pred {
+/// drop(a); // {a, c, d}
+/// b = S; // {a, c, d}
+///
+/// } else {
+/// drop(b); // { b, c, d}
+/// d = S; // { b, c }
+///
+/// } // {a, b, c, d}
+///
+/// c = S; // {a, b, d}
+/// }
+/// ```
+///
+/// To determine whether a place *must* be uninitialized at a
+/// particular control-flow point, one can take the set-difference
+/// between this data and the data from `MaybeInitializedPlaces` at the
+/// corresponding control-flow point.
+///
+/// Similarly, at a given `drop` statement, the set-intersection
+/// between this data and `MaybeInitializedPlaces` yields the set of
+/// places that would require a dynamic drop-flag at that statement.
+pub struct MaybeUninitializedPlaces<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+
+ mark_inactive_variants_as_uninit: bool,
+}
+
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
+ }
+
+ /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
+ /// enum discriminant.
+ ///
+ /// This is correct in a vacuum but is not the default because it causes problems in the borrow
+ /// checker, where this information gets propagated along `FakeEdge`s.
+ pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
+ self.mark_inactive_variants_as_uninit = true;
+ self
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+/// `DefinitelyInitializedPlaces` tracks all places that are definitely
+/// initialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // definite-init:
+/// // { }
+/// let a = S; let mut b = S; let c; let d; // {a, b }
+///
+/// if pred {
+/// drop(a); // { b, }
+/// b = S; // { b, }
+///
+/// } else {
+/// drop(b); // {a, }
+/// d = S; // {a, d}
+///
+/// } // { }
+///
+/// c = S; // { c }
+/// }
+/// ```
+///
+/// To determine whether a place *may* be uninitialized at a
+/// particular control-flow point, one can take the set-complement
+/// of this data.
+///
+/// Similarly, at a given `drop` statement, the set-difference between
+/// this data and `MaybeInitializedPlaces` yields the set of places
+/// that would require a dynamic drop-flag at that statement.
+pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ DefinitelyInitializedPlaces { tcx, body, mdpe }
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+/// `EverInitializedPlaces` tracks all places that might have ever been
+/// initialized upon reaching a particular point in the control flow
+/// for a function, without an intervening `StorageDead`.
+///
+/// This dataflow is used to determine if an immutable local variable may
+/// be assigned to.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) { // ever-init:
+/// // { }
+/// let a = S; let mut b = S; let c; let d; // {a, b }
+///
+/// if pred {
+/// drop(a); // {a, b, }
+/// b = S; // {a, b, }
+///
+/// } else {
+/// drop(b); // {a, b, }
+/// d = S; // {a, b, d }
+///
+/// } // {a, b, d }
+///
+/// c = S; // {a, b, c, d }
+/// }
+/// ```
+pub struct EverInitializedPlaces<'a, 'tcx> {
+ #[allow(dead_code)]
+ tcx: TyCtxt<'tcx>,
+ body: &'a Body<'tcx>,
+ mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+ EverInitializedPlaces { tcx, body, mdpe }
+ }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
+ fn move_data(&self) -> &MoveData<'tcx> {
+ &self.mdpe.move_data
+ }
+}
+
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+ fn update_bits(
+ trans: &mut impl GenKill<MovePathIndex>,
+ path: MovePathIndex,
+ state: DropFlagState,
+ ) {
+ match state {
+ DropFlagState::Absent => trans.kill(path),
+ DropFlagState::Present => trans.gen(path),
+ }
+ }
+}
+
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+ fn update_bits(
+ trans: &mut impl GenKill<MovePathIndex>,
+ path: MovePathIndex,
+ state: DropFlagState,
+ ) {
+ match state {
+ DropFlagState::Absent => trans.gen(path),
+ DropFlagState::Present => trans.kill(path),
+ }
+ }
+}
+
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
+ fn update_bits(
+ trans: &mut impl GenKill<MovePathIndex>,
+ path: MovePathIndex,
+ state: DropFlagState,
+ ) {
+ match state {
+ DropFlagState::Absent => trans.kill(path),
+ DropFlagState::Present => trans.gen(path),
+ }
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
+ type Domain = ChunkedBitSet<MovePathIndex>;
+ const NAME: &'static str = "maybe_init";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = uninitialized
+ ChunkedBitSet::new_empty(self.move_data().move_paths.len())
+ }
+
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+ assert!(s == DropFlagState::Present);
+ state.insert(path);
+ });
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
+ type Idx = MovePathIndex;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+
+ if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+ return;
+ }
+
+ // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
+ for_each_mut_borrow(statement, location, |place| {
+ let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
+ on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
+ trans.gen(child);
+ })
+ })
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+
+ if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+ return;
+ }
+
+ for_each_mut_borrow(terminator, location, |place| {
+ let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
+ on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
+ trans.gen(child);
+ })
+ })
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ // when a call returns successfully, that means we need to set
+ // the bits for that dest_place to 1 (initialized).
+ on_lookup_result_bits(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ self.move_data().rev_lookup.find(place.as_ref()),
+ |mpi| {
+ trans.gen(mpi);
+ },
+ );
+ });
+ }
+
+ fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
+ &self,
+ block: mir::BasicBlock,
+ discr: &mir::Operand<'tcx>,
+ edge_effects: &mut impl SwitchIntEdgeEffects<G>,
+ ) {
+ if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+ return;
+ }
+
+ let enum_ = discr.place().and_then(|discr| {
+ switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
+ });
+
+ let Some((enum_place, enum_def)) = enum_ else {
+ return;
+ };
+
+ let mut discriminants = enum_def.discriminants(self.tcx);
+ edge_effects.apply(|trans, edge| {
+ let Some(value) = edge.value else {
+ return;
+ };
+
+ // MIR building adds discriminants to the `values` array in the same order as they
+ // are yielded by `AdtDef::discriminants`. We rely on this to match each
+ // discriminant in `values` to its corresponding variant in linear time.
+ let (variant, _) = discriminants
+ .find(|&(_, discr)| discr.val == value)
+ .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+
+ // Kill all move paths that correspond to variants we know to be inactive along this
+ // particular outgoing edge of a `SwitchInt`.
+ drop_flag_effects::on_all_inactive_variants(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ enum_place,
+ variant,
+ |mpi| trans.kill(mpi),
+ );
+ });
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
+ type Domain = ChunkedBitSet<MovePathIndex>;
+
+ const NAME: &'static str = "maybe_uninit";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = initialized (start_block_effect counters this at outset)
+ ChunkedBitSet::new_empty(self.move_data().move_paths.len())
+ }
+
+ // sets on_entry bits for Arg places
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ // set all bits to 1 (uninit) before gathering counter-evidence
+ state.insert_all();
+
+ drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+ assert!(s == DropFlagState::Present);
+ state.remove(path);
+ });
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
+ type Idx = MovePathIndex;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+
+ // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
+ // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ });
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ // when a call returns successfully, that means we need to set
+ // the bits for that dest_place to 0 (initialized).
+ on_lookup_result_bits(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ self.move_data().rev_lookup.find(place.as_ref()),
+ |mpi| {
+ trans.kill(mpi);
+ },
+ );
+ });
+ }
+
+ fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
+ &self,
+ block: mir::BasicBlock,
+ discr: &mir::Operand<'tcx>,
+ edge_effects: &mut impl SwitchIntEdgeEffects<G>,
+ ) {
+ if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+ return;
+ }
+
+ if !self.mark_inactive_variants_as_uninit {
+ return;
+ }
+
+ let enum_ = discr.place().and_then(|discr| {
+ switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
+ });
+
+ let Some((enum_place, enum_def)) = enum_ else {
+ return;
+ };
+
+ let mut discriminants = enum_def.discriminants(self.tcx);
+ edge_effects.apply(|trans, edge| {
+ let Some(value) = edge.value else {
+ return;
+ };
+
+ // MIR building adds discriminants to the `values` array in the same order as they
+ // are yielded by `AdtDef::discriminants`. We rely on this to match each
+ // discriminant in `values` to its corresponding variant in linear time.
+ let (variant, _) = discriminants
+ .find(|&(_, discr)| discr.val == value)
+ .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+
+ // Mark all move paths that correspond to variants other than this one as maybe
+ // uninitialized (in reality, they are *definitely* uninitialized).
+ drop_flag_effects::on_all_inactive_variants(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ enum_place,
+ variant,
+ |mpi| trans.gen(mpi),
+ );
+ });
+ }
+}
+
+impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
+ /// Use set intersection as the join operator.
+ type Domain = lattice::Dual<BitSet<MovePathIndex>>;
+
+ const NAME: &'static str = "definite_init";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = initialized (start_block_effect counters this at outset)
+ lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
+ }
+
+ // sets on_entry bits for Arg places
+ fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ state.0.clear();
+
+ drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+ assert!(s == DropFlagState::Present);
+ state.0.insert(path);
+ });
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
+ type Idx = MovePathIndex;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _statement: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ })
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+ Self::update_bits(trans, path, s)
+ })
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: mir::BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| {
+ // when a call returns successfully, that means we need to set
+ // the bits for that dest_place to 1 (initialized).
+ on_lookup_result_bits(
+ self.tcx,
+ self.body,
+ self.move_data(),
+ self.move_data().rev_lookup.find(place.as_ref()),
+ |mpi| {
+ trans.gen(mpi);
+ },
+ );
+ });
+ }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
+ type Domain = ChunkedBitSet<InitIndex>;
+
+ const NAME: &'static str = "ever_init";
+
+ fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = no initialized variables by default
+ ChunkedBitSet::new_empty(self.move_data().inits.len())
+ }
+
+ fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
+ for arg_init in 0..body.arg_count {
+ state.insert(InitIndex::new(arg_init));
+ }
+ }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
+ type Idx = InitIndex;
+
+ #[instrument(skip(self, trans), level = "debug")]
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ stmt: &mir::Statement<'tcx>,
+ location: Location,
+ ) {
+ let move_data = self.move_data();
+ let init_path_map = &move_data.init_path_map;
+ let init_loc_map = &move_data.init_loc_map;
+ let rev_lookup = &move_data.rev_lookup;
+
+ debug!("initializes move_indexes {:?}", &init_loc_map[location]);
+ trans.gen_all(init_loc_map[location].iter().copied());
+
+ if let mir::StatementKind::StorageDead(local) = stmt.kind {
+ // End inits for StorageDead, so that an immutable variable can
+ // be reinitialized on the next iteration of the loop.
+ let move_path_index = rev_lookup.find_local(local);
+ debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
+ trans.kill_all(init_path_map[move_path_index].iter().copied());
+ }
+ }
+
+ #[instrument(skip(self, trans, _terminator), level = "debug")]
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _terminator: &mir::Terminator<'tcx>,
+ location: Location,
+ ) {
+ let (body, move_data) = (self.body, self.move_data());
+ let term = body[location.block].terminator();
+ let init_loc_map = &move_data.init_loc_map;
+ debug!(?term);
+ debug!("initializes move_indexes {:?}", init_loc_map[location]);
+ trans.gen_all(
+ init_loc_map[location]
+ .iter()
+ .filter(|init_index| {
+ move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
+ })
+ .copied(),
+ );
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ block: mir::BasicBlock,
+ _return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ let move_data = self.move_data();
+ let init_loc_map = &move_data.init_loc_map;
+
+ let call_loc = self.body.terminator_loc(block);
+ for init_index in &init_loc_map[call_loc] {
+ trans.gen(*init_index);
+ }
+ }
+}
+
+/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
+/// an enum discriminant.
+///
+/// We expect such blocks to have a call to `discriminant` as their last statement like so:
+///
+/// ```text
+/// ...
+/// _42 = discriminant(_1)
+/// SwitchInt(_42, ..)
+/// ```
+///
+/// If the basic block matches this pattern, this function returns the place corresponding to the
+/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
+fn switch_on_enum_discriminant<'mir, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &'mir mir::Body<'tcx>,
+ block: &'mir mir::BasicBlockData<'tcx>,
+ switch_on: mir::Place<'tcx>,
+) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
+ for statement in block.statements.iter().rev() {
+ match &statement.kind {
+ mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
+ if *lhs == switch_on =>
+ {
+ match discriminated.ty(body, tcx).ty.kind() {
+ ty::Adt(def, _) => return Some((*discriminated, *def)),
+
+ // `Rvalue::Discriminant` is also used to get the active yield point for a
+ // generator, but we do not need edge-specific effects in that case. This may
+ // change in the future.
+ ty::Generator(..) => return None,
+
+ t => bug!("`discriminant` called on unexpected type {:?}", t),
+ }
+ }
+ mir::StatementKind::Coverage(_) => continue,
+ _ => return None,
+ }
+ }
+ None
+}
+
+struct OnMutBorrow<F>(F);
+
+impl<F> Visitor<'_> for OnMutBorrow<F>
+where
+ F: FnMut(&mir::Place<'_>),
+{
+ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
+ // FIXME: Does `&raw const foo` allow mutation? See #90413.
+ match rvalue {
+ mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
+ | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
+
+ _ => {}
+ }
+
+ self.super_rvalue(rvalue, location)
+ }
+}
+
+/// Calls `f` for each mutable borrow or raw reference in the program.
+///
+/// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
+/// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
+/// other analyses will likely need to check for `!Freeze`.
+fn for_each_mut_borrow<'tcx>(
+ mir: &impl MirVisitable<'tcx>,
+ location: Location,
+ f: impl FnMut(&mir::Place<'_>),
+) {
+ let mut vis = OnMutBorrow(f);
+
+ mir.apply(location, &mut vis);
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
new file mode 100644
index 000000000..f6b5af90a
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
@@ -0,0 +1,300 @@
+pub use super::*;
+
+use crate::{CallReturnPlaces, GenKill, Results, ResultsRefCursor};
+use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
+use rustc_middle::mir::*;
+use std::cell::RefCell;
+
+#[derive(Clone)]
+pub struct MaybeStorageLive {
+ always_live_locals: BitSet<Local>,
+}
+
+impl MaybeStorageLive {
+ pub fn new(always_live_locals: BitSet<Local>) -> Self {
+ MaybeStorageLive { always_live_locals }
+ }
+}
+
+impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageLive {
+ type Domain = BitSet<Local>;
+
+ const NAME: &'static str = "maybe_storage_live";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = dead
+ BitSet::new_empty(body.local_decls.len())
+ }
+
+ fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+ assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
+ for local in self.always_live_locals.iter() {
+ on_entry.insert(local);
+ }
+
+ for arg in body.args_iter() {
+ on_entry.insert(arg);
+ }
+ }
+}
+
+impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageLive {
+ type Idx = Local;
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ stmt: &mir::Statement<'tcx>,
+ _: Location,
+ ) {
+ match stmt.kind {
+ StatementKind::StorageLive(l) => trans.gen(l),
+ StatementKind::StorageDead(l) => trans.kill(l),
+ _ => (),
+ }
+ }
+
+ fn terminator_effect(
+ &self,
+ _trans: &mut impl GenKill<Self::Idx>,
+ _: &mir::Terminator<'tcx>,
+ _: Location,
+ ) {
+ // Terminators have no effect
+ }
+
+ fn call_return_effect(
+ &self,
+ _trans: &mut impl GenKill<Self::Idx>,
+ _block: BasicBlock,
+ _return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ // Nothing to do when a call returns successfully
+ }
+}
+
+type BorrowedLocalsResults<'a, 'tcx> = ResultsRefCursor<'a, 'a, 'tcx, MaybeBorrowedLocals>;
+
+/// Dataflow analysis that determines whether each local requires storage at a
+/// given location; i.e. whether its storage can go away without being observed.
+pub struct MaybeRequiresStorage<'mir, 'tcx> {
+ body: &'mir Body<'tcx>,
+ borrowed_locals: RefCell<BorrowedLocalsResults<'mir, 'tcx>>,
+}
+
+impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> {
+ pub fn new(
+ body: &'mir Body<'tcx>,
+ borrowed_locals: &'mir Results<'tcx, MaybeBorrowedLocals>,
+ ) -> Self {
+ MaybeRequiresStorage {
+ body,
+ borrowed_locals: RefCell::new(ResultsRefCursor::new(&body, borrowed_locals)),
+ }
+ }
+}
+
+impl<'mir, 'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'mir, 'tcx> {
+ type Domain = BitSet<Local>;
+
+ const NAME: &'static str = "requires_storage";
+
+ fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+ // bottom = dead
+ BitSet::new_empty(body.local_decls.len())
+ }
+
+ fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+ // The resume argument is live on function entry (we don't care about
+ // the `self` argument)
+ for arg in body.args_iter().skip(1) {
+ on_entry.insert(arg);
+ }
+ }
+}
+
+impl<'mir, 'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tcx> {
+ type Idx = Local;
+
+ fn before_statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ stmt: &mir::Statement<'tcx>,
+ loc: Location,
+ ) {
+ // If a place is borrowed in a statement, it needs storage for that statement.
+ self.borrowed_locals.borrow().analysis().statement_effect(trans, stmt, loc);
+
+ match &stmt.kind {
+ StatementKind::StorageDead(l) => trans.kill(*l),
+
+ // If a place is assigned to in a statement, it needs storage for that statement.
+ StatementKind::Assign(box (place, _))
+ | StatementKind::SetDiscriminant { box place, .. }
+ | StatementKind::Deinit(box place) => {
+ trans.gen(place.local);
+ }
+
+ // Nothing to do for these. Match exhaustively so this fails to compile when new
+ // variants are added.
+ StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
+ | StatementKind::FakeRead(..)
+ | StatementKind::Nop
+ | StatementKind::Retag(..)
+ | StatementKind::CopyNonOverlapping(..)
+ | StatementKind::StorageLive(..) => {}
+ }
+ }
+
+ fn statement_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _: &mir::Statement<'tcx>,
+ loc: Location,
+ ) {
+ // If we move from a place then it only stops needing storage *after*
+ // that statement.
+ self.check_for_move(trans, loc);
+ }
+
+ fn before_terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ terminator: &mir::Terminator<'tcx>,
+ loc: Location,
+ ) {
+ // If a place is borrowed in a terminator, it needs storage for that terminator.
+ self.borrowed_locals.borrow().analysis().terminator_effect(trans, terminator, loc);
+
+ match &terminator.kind {
+ TerminatorKind::Call { destination, .. } => {
+ trans.gen(destination.local);
+ }
+
+ // Note that we do *not* gen the `resume_arg` of `Yield` terminators. The reason for
+ // that is that a `yield` will return from the function, and `resume_arg` is written
+ // only when the generator is later resumed. Unlike `Call`, this doesn't require the
+ // place to have storage *before* the yield, only after.
+ TerminatorKind::Yield { .. } => {}
+
+ TerminatorKind::InlineAsm { operands, .. } => {
+ for op in operands {
+ match op {
+ InlineAsmOperand::Out { place, .. }
+ | InlineAsmOperand::InOut { out_place: place, .. } => {
+ if let Some(place) = place {
+ trans.gen(place.local);
+ }
+ }
+ InlineAsmOperand::In { .. }
+ | InlineAsmOperand::Const { .. }
+ | InlineAsmOperand::SymFn { .. }
+ | InlineAsmOperand::SymStatic { .. } => {}
+ }
+ }
+ }
+
+ // Nothing to do for these. Match exhaustively so this fails to compile when new
+ // variants are added.
+ TerminatorKind::Abort
+ | TerminatorKind::Assert { .. }
+ | TerminatorKind::Drop { .. }
+ | TerminatorKind::DropAndReplace { .. }
+ | TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. }
+ | TerminatorKind::GeneratorDrop
+ | TerminatorKind::Goto { .. }
+ | TerminatorKind::Resume
+ | TerminatorKind::Return
+ | TerminatorKind::SwitchInt { .. }
+ | TerminatorKind::Unreachable => {}
+ }
+ }
+
+ fn terminator_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ terminator: &mir::Terminator<'tcx>,
+ loc: Location,
+ ) {
+ match terminator.kind {
+ // For call terminators the destination requires storage for the call
+ // and after the call returns successfully, but not after a panic.
+ // Since `propagate_call_unwind` doesn't exist, we have to kill the
+ // destination here, and then gen it again in `call_return_effect`.
+ TerminatorKind::Call { destination, .. } => {
+ trans.kill(destination.local);
+ }
+
+ // The same applies to InlineAsm outputs.
+ TerminatorKind::InlineAsm { ref operands, .. } => {
+ CallReturnPlaces::InlineAsm(operands).for_each(|place| trans.kill(place.local));
+ }
+
+ // Nothing to do for these. Match exhaustively so this fails to compile when new
+ // variants are added.
+ TerminatorKind::Yield { .. }
+ | TerminatorKind::Abort
+ | TerminatorKind::Assert { .. }
+ | TerminatorKind::Drop { .. }
+ | TerminatorKind::DropAndReplace { .. }
+ | TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. }
+ | TerminatorKind::GeneratorDrop
+ | TerminatorKind::Goto { .. }
+ | TerminatorKind::Resume
+ | TerminatorKind::Return
+ | TerminatorKind::SwitchInt { .. }
+ | TerminatorKind::Unreachable => {}
+ }
+
+ self.check_for_move(trans, loc);
+ }
+
+ fn call_return_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _block: BasicBlock,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ ) {
+ return_places.for_each(|place| trans.gen(place.local));
+ }
+
+ fn yield_resume_effect(
+ &self,
+ trans: &mut impl GenKill<Self::Idx>,
+ _resume_block: BasicBlock,
+ resume_place: mir::Place<'tcx>,
+ ) {
+ trans.gen(resume_place.local);
+ }
+}
+
+impl<'mir, 'tcx> MaybeRequiresStorage<'mir, 'tcx> {
+ /// Kill locals that are fully moved and have not been borrowed.
+ fn check_for_move(&self, trans: &mut impl GenKill<Local>, loc: Location) {
+ let mut visitor = MoveVisitor { trans, borrowed_locals: &self.borrowed_locals };
+ visitor.visit_location(&self.body, loc);
+ }
+}
+
+struct MoveVisitor<'a, 'mir, 'tcx, T> {
+ borrowed_locals: &'a RefCell<BorrowedLocalsResults<'mir, 'tcx>>,
+ trans: &'a mut T,
+}
+
+impl<'a, 'mir, 'tcx, T> Visitor<'tcx> for MoveVisitor<'a, 'mir, 'tcx, T>
+where
+ T: GenKill<Local>,
+{
+ fn visit_local(&mut self, local: Local, context: PlaceContext, loc: Location) {
+ if PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) == context {
+ let mut borrowed_locals = self.borrowed_locals.borrow_mut();
+ borrowed_locals.seek_before_primary_effect(loc);
+ if !borrowed_locals.contains(local) {
+ self.trans.kill(local);
+ }
+ }
+ }
+}