summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_mir_transform
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_transform')
-rw-r--r--compiler/rustc_mir_transform/messages.ftl2
-rw-r--r--compiler/rustc_mir_transform/src/abort_unwinding_calls.rs2
-rw-r--r--compiler/rustc_mir_transform/src/add_call_guards.rs6
-rw-r--r--compiler/rustc_mir_transform/src/add_subtyping_projections.rs70
-rw-r--r--compiler/rustc_mir_transform/src/check_alignment.rs16
-rw-r--r--compiler/rustc_mir_transform/src/check_unsafety.rs12
-rw-r--r--compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs6
-rw-r--r--compiler/rustc_mir_transform/src/const_debuginfo.rs4
-rw-r--r--compiler/rustc_mir_transform/src/const_goto.rs4
-rw-r--r--compiler/rustc_mir_transform/src/const_prop.rs204
-rw-r--r--compiler/rustc_mir_transform/src/const_prop_lint.rs72
-rw-r--r--compiler/rustc_mir_transform/src/copy_prop.rs2
-rw-r--r--compiler/rustc_mir_transform/src/coverage/counters.rs101
-rw-r--r--compiler/rustc_mir_transform/src/coverage/debug.rs802
-rw-r--r--compiler/rustc_mir_transform/src/coverage/graph.rs19
-rw-r--r--compiler/rustc_mir_transform/src/coverage/mod.rs158
-rw-r--r--compiler/rustc_mir_transform/src/coverage/query.rs123
-rw-r--r--compiler/rustc_mir_transform/src/coverage/spans.rs101
-rw-r--r--compiler/rustc_mir_transform/src/dataflow_const_prop.rs509
-rw-r--r--compiler/rustc_mir_transform/src/dead_store_elimination.rs6
-rw-r--r--compiler/rustc_mir_transform/src/deduplicate_blocks.rs8
-rw-r--r--compiler/rustc_mir_transform/src/dest_prop.rs4
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drops.rs19
-rw-r--r--compiler/rustc_mir_transform/src/errors.rs41
-rw-r--r--compiler/rustc_mir_transform/src/generator.rs145
-rw-r--r--compiler/rustc_mir_transform/src/gvn.rs539
-rw-r--r--compiler/rustc_mir_transform/src/inline.rs83
-rw-r--r--compiler/rustc_mir_transform/src/inline/cycle.rs2
-rw-r--r--compiler/rustc_mir_transform/src/instsimplify.rs6
-rw-r--r--compiler/rustc_mir_transform/src/large_enums.rs16
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs28
-rw-r--r--compiler/rustc_mir_transform/src/lower_intrinsics.rs39
-rw-r--r--compiler/rustc_mir_transform/src/match_branches.rs12
-rw-r--r--compiler/rustc_mir_transform/src/normalize_array_len.rs4
-rw-r--r--compiler/rustc_mir_transform/src/pass_manager.rs11
-rw-r--r--compiler/rustc_mir_transform/src/ref_prop.rs2
-rw-r--r--compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs6
-rw-r--r--compiler/rustc_mir_transform/src/remove_zsts.rs12
-rw-r--r--compiler/rustc_mir_transform/src/required_consts.rs18
-rw-r--r--compiler/rustc_mir_transform/src/reveal_all.rs40
-rw-r--r--compiler/rustc_mir_transform/src/separate_const_switch.rs8
-rw-r--r--compiler/rustc_mir_transform/src/shim.rs28
-rw-r--r--compiler/rustc_mir_transform/src/simplify.rs43
-rw-r--r--compiler/rustc_mir_transform/src/simplify_branches.rs4
-rw-r--r--compiler/rustc_mir_transform/src/simplify_comparison_integral.rs4
-rw-r--r--compiler/rustc_mir_transform/src/sroa.rs93
-rw-r--r--compiler/rustc_mir_transform/src/ssa.rs52
-rw-r--r--compiler/rustc_mir_transform/src/unreachable_prop.rs6
48 files changed, 1635 insertions, 1857 deletions
diff --git a/compiler/rustc_mir_transform/messages.ftl b/compiler/rustc_mir_transform/messages.ftl
index 2598eb2ed..5a99afc45 100644
--- a/compiler/rustc_mir_transform/messages.ftl
+++ b/compiler/rustc_mir_transform/messages.ftl
@@ -42,8 +42,6 @@ mir_transform_requires_unsafe = {$details} is unsafe and requires unsafe {$op_in
}
.not_inherited = items do not inherit unsafety from separate enclosing items
-mir_transform_simd_shuffle_last_const = last argument of `simd_shuffle` is required to be a `const` item
-
mir_transform_target_feature_call_label = call to function with `#[target_feature]`
mir_transform_target_feature_call_note = can only be called if the required target features are available
diff --git a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs
index 5aed89139..4500bb7ff 100644
--- a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs
+++ b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs
@@ -104,7 +104,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls {
for id in calls_to_terminate {
let cleanup = body.basic_blocks_mut()[id].terminator_mut().unwind_mut().unwrap();
- *cleanup = UnwindAction::Terminate;
+ *cleanup = UnwindAction::Terminate(UnwindTerminateReason::Abi);
}
for id in cleanups_to_remove {
diff --git a/compiler/rustc_mir_transform/src/add_call_guards.rs b/compiler/rustc_mir_transform/src/add_call_guards.rs
index fb4705e07..b814fbf32 100644
--- a/compiler/rustc_mir_transform/src/add_call_guards.rs
+++ b/compiler/rustc_mir_transform/src/add_call_guards.rs
@@ -53,8 +53,10 @@ impl AddCallGuards {
kind: TerminatorKind::Call { target: Some(ref mut destination), unwind, .. },
source_info,
}) if pred_count[*destination] > 1
- && (matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate)
- || self == &AllCallEdges) =>
+ && (matches!(
+ unwind,
+ UnwindAction::Cleanup(_) | UnwindAction::Terminate(_)
+ ) || self == &AllCallEdges) =>
{
// It's a critical edge, break it
let call_guard = BasicBlockData {
diff --git a/compiler/rustc_mir_transform/src/add_subtyping_projections.rs b/compiler/rustc_mir_transform/src/add_subtyping_projections.rs
new file mode 100644
index 000000000..e5be7c0ca
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/add_subtyping_projections.rs
@@ -0,0 +1,70 @@
+use crate::MirPass;
+use rustc_index::IndexVec;
+use rustc_middle::mir::patch::MirPatch;
+use rustc_middle::mir::visit::MutVisitor;
+use rustc_middle::mir::*;
+use rustc_middle::ty::TyCtxt;
+
+pub struct Subtyper;
+
+pub struct SubTypeChecker<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ patcher: MirPatch<'tcx>,
+ local_decls: &'a IndexVec<Local, LocalDecl<'tcx>>,
+}
+
+impl<'a, 'tcx> MutVisitor<'tcx> for SubTypeChecker<'a, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn visit_assign(
+ &mut self,
+ place: &mut Place<'tcx>,
+ rvalue: &mut Rvalue<'tcx>,
+ location: Location,
+ ) {
+ // We don't need to do anything for deref temps as they are
+ // not part of the source code, but used for desugaring purposes.
+ if self.local_decls[place.local].is_deref_temp() {
+ return;
+ }
+ let mut place_ty = place.ty(self.local_decls, self.tcx).ty;
+ let mut rval_ty = rvalue.ty(self.local_decls, self.tcx);
+ // Not erasing this causes `Free Regions` errors in validator,
+ // when rval is `ReStatic`.
+ rval_ty = self.tcx.erase_regions_ty(rval_ty);
+ place_ty = self.tcx.erase_regions(place_ty);
+ if place_ty != rval_ty {
+ let temp = self
+ .patcher
+ .new_temp(rval_ty, self.local_decls[place.as_ref().local].source_info.span);
+ let new_place = Place::from(temp);
+ self.patcher.add_assign(location, new_place, rvalue.clone());
+ let subtyped = new_place.project_deeper(&[ProjectionElem::Subtype(place_ty)], self.tcx);
+ *rvalue = Rvalue::Use(Operand::Move(subtyped));
+ }
+ }
+}
+
+// Aim here is to do this kind of transformation:
+//
+// let place: place_ty = rval;
+// // gets transformed to
+// let temp: rval_ty = rval;
+// let place: place_ty = temp as place_ty;
+pub fn subtype_finder<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ let patch = MirPatch::new(body);
+ let mut checker = SubTypeChecker { tcx, patcher: patch, local_decls: &body.local_decls };
+
+ for (bb, data) in body.basic_blocks.as_mut_preserves_cfg().iter_enumerated_mut() {
+ checker.visit_basic_block_data(bb, data);
+ }
+ checker.patcher.apply(body);
+}
+
+impl<'tcx> MirPass<'tcx> for Subtyper {
+ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ subtype_finder(tcx, body);
+ }
+}
diff --git a/compiler/rustc_mir_transform/src/check_alignment.rs b/compiler/rustc_mir_transform/src/check_alignment.rs
index 4892ace53..28765af20 100644
--- a/compiler/rustc_mir_transform/src/check_alignment.rs
+++ b/compiler/rustc_mir_transform/src/check_alignment.rs
@@ -4,7 +4,7 @@ use rustc_hir::lang_items::LangItem;
use rustc_index::IndexVec;
use rustc_middle::mir::*;
use rustc_middle::mir::{
- interpret::{ConstValue, Scalar},
+ interpret::Scalar,
visit::{PlaceContext, Visitor},
};
use rustc_middle::ty::{Ty, TyCtxt, TypeAndMut};
@@ -181,13 +181,10 @@ fn insert_alignment_check<'tcx>(
// Subtract 1 from the alignment to get the alignment mask
let alignment_mask =
local_decls.push(LocalDecl::with_source_info(tcx.types.usize, source_info)).into();
- let one = Operand::Constant(Box::new(Constant {
+ let one = Operand::Constant(Box::new(ConstOperand {
span: source_info.span,
user_ty: None,
- literal: ConstantKind::Val(
- ConstValue::Scalar(Scalar::from_target_usize(1, &tcx)),
- tcx.types.usize,
- ),
+ const_: Const::Val(ConstValue::Scalar(Scalar::from_target_usize(1, &tcx)), tcx.types.usize),
}));
block_data.statements.push(Statement {
source_info,
@@ -213,13 +210,10 @@ fn insert_alignment_check<'tcx>(
// Check if the alignment bits are all zero
let is_ok = local_decls.push(LocalDecl::with_source_info(tcx.types.bool, source_info)).into();
- let zero = Operand::Constant(Box::new(Constant {
+ let zero = Operand::Constant(Box::new(ConstOperand {
span: source_info.span,
user_ty: None,
- literal: ConstantKind::Val(
- ConstValue::Scalar(Scalar::from_target_usize(0, &tcx)),
- tcx.types.usize,
- ),
+ const_: Const::Val(ConstValue::Scalar(Scalar::from_target_usize(0, &tcx)), tcx.types.usize),
}));
block_data.statements.push(Statement {
source_info,
diff --git a/compiler/rustc_mir_transform/src/check_unsafety.rs b/compiler/rustc_mir_transform/src/check_unsafety.rs
index 58e9786ec..bacabc62e 100644
--- a/compiler/rustc_mir_transform/src/check_unsafety.rs
+++ b/compiler/rustc_mir_transform/src/check_unsafety.rs
@@ -57,8 +57,8 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> {
| TerminatorKind::Yield { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::GeneratorDrop
- | TerminatorKind::Resume
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindResume
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::FalseEdge { .. }
@@ -142,9 +142,9 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> {
fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
if let Operand::Constant(constant) = op {
- let maybe_uneval = match constant.literal {
- ConstantKind::Val(..) | ConstantKind::Ty(_) => None,
- ConstantKind::Unevaluated(uv, _) => Some(uv),
+ let maybe_uneval = match constant.const_ {
+ Const::Val(..) | Const::Ty(_) => None,
+ Const::Unevaluated(uv, _) => Some(uv),
};
if let Some(uv) = maybe_uneval {
@@ -483,7 +483,7 @@ fn unsafety_check_result(tcx: TyCtxt<'_>, def: LocalDefId) -> &UnsafetyCheckResu
// `mir_built` force this.
let body = &tcx.mir_built(def).borrow();
- if body.is_custom_mir() {
+ if body.is_custom_mir() || body.tainted_by_errors.is_some() {
return tcx.arena.alloc(UnsafetyCheckResult {
violations: Vec::new(),
used_unsafe_blocks: Default::default(),
diff --git a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs
index d435d3ee6..5b4bc4fa1 100644
--- a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs
+++ b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs
@@ -4,13 +4,13 @@
//!
//! - [`AscribeUserType`]
//! - [`FakeRead`]
-//! - [`Assign`] statements with a [`Shallow`] borrow
+//! - [`Assign`] statements with a [`Fake`] borrow
//!
//! [`AscribeUserType`]: rustc_middle::mir::StatementKind::AscribeUserType
//! [`Assign`]: rustc_middle::mir::StatementKind::Assign
//! [`FakeRead`]: rustc_middle::mir::StatementKind::FakeRead
//! [`Nop`]: rustc_middle::mir::StatementKind::Nop
-//! [`Shallow`]: rustc_middle::mir::BorrowKind::Shallow
+//! [`Fake`]: rustc_middle::mir::BorrowKind::Fake
use crate::MirPass;
use rustc_middle::mir::{Body, BorrowKind, Rvalue, StatementKind, TerminatorKind};
@@ -24,7 +24,7 @@ impl<'tcx> MirPass<'tcx> for CleanupPostBorrowck {
for statement in basic_block.statements.iter_mut() {
match statement.kind {
StatementKind::AscribeUserType(..)
- | StatementKind::Assign(box (_, Rvalue::Ref(_, BorrowKind::Shallow, _)))
+ | StatementKind::Assign(box (_, Rvalue::Ref(_, BorrowKind::Fake, _)))
| StatementKind::FakeRead(..) => statement.make_nop(),
_ => (),
}
diff --git a/compiler/rustc_mir_transform/src/const_debuginfo.rs b/compiler/rustc_mir_transform/src/const_debuginfo.rs
index f662ce645..40cd28254 100644
--- a/compiler/rustc_mir_transform/src/const_debuginfo.rs
+++ b/compiler/rustc_mir_transform/src/const_debuginfo.rs
@@ -4,7 +4,7 @@
use rustc_middle::{
mir::{
visit::{PlaceContext, Visitor},
- Body, Constant, Local, Location, Operand, Rvalue, StatementKind, VarDebugInfoContents,
+ Body, ConstOperand, Local, Location, Operand, Rvalue, StatementKind, VarDebugInfoContents,
},
ty::TyCtxt,
};
@@ -45,7 +45,7 @@ struct LocalUseVisitor {
local_assignment_locations: IndexVec<Local, Option<Location>>,
}
-fn find_optimization_opportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Constant<'tcx>)> {
+fn find_optimization_opportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, ConstOperand<'tcx>)> {
let mut visitor = LocalUseVisitor {
local_mutating_uses: IndexVec::from_elem(0, &body.local_decls),
local_assignment_locations: IndexVec::from_elem(None, &body.local_decls),
diff --git a/compiler/rustc_mir_transform/src/const_goto.rs b/compiler/rustc_mir_transform/src/const_goto.rs
index e175f22d7..fd2d37dbe 100644
--- a/compiler/rustc_mir_transform/src/const_goto.rs
+++ b/compiler/rustc_mir_transform/src/const_goto.rs
@@ -96,10 +96,10 @@ impl<'tcx> Visitor<'tcx> for ConstGotoOptimizationFinder<'_, 'tcx> {
let (discr, targets) = target_bb_terminator.kind.as_switch()?;
if discr.place() == Some(*place) {
let switch_ty = place.ty(self.body.local_decls(), self.tcx).ty;
+ debug_assert_eq!(switch_ty, _const.ty());
// We now know that the Switch matches on the const place, and it is statementless
// Now find which value in the Switch matches the const value.
- let const_value =
- _const.literal.try_eval_bits(self.tcx, self.param_env, switch_ty)?;
+ let const_value = _const.const_.try_eval_bits(self.tcx, self.param_env)?;
let target_to_use_in_goto = targets.target_for_value(const_value);
self.optimizations.push(OptimizationToApply {
bb_with_goto: location.block,
diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs
index 7529ed818..50443e739 100644
--- a/compiler/rustc_mir_transform/src/const_prop.rs
+++ b/compiler/rustc_mir_transform/src/const_prop.rs
@@ -15,15 +15,15 @@ use rustc_middle::mir::visit::{
use rustc_middle::mir::*;
use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout};
use rustc_middle::ty::{self, GenericArgs, Instance, ParamEnv, Ty, TyCtxt, TypeVisitableExt};
-use rustc_span::{def_id::DefId, Span, DUMMY_SP};
+use rustc_span::{def_id::DefId, Span};
use rustc_target::abi::{self, Align, HasDataLayout, Size, TargetDataLayout};
use rustc_target::spec::abi::Abi as CallAbi;
+use crate::dataflow_const_prop::Patch;
use crate::MirPass;
use rustc_const_eval::interpret::{
- self, compile_time_machine, AllocId, ConstAllocation, ConstValue, FnArg, Frame, ImmTy,
- Immediate, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, Scalar,
- StackPopCleanup,
+ self, compile_time_machine, AllocId, ConstAllocation, FnArg, Frame, ImmTy, Immediate, InterpCx,
+ InterpResult, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, StackPopCleanup,
};
/// The maximum number of bytes that we'll allocate space for a local or the return value.
@@ -33,32 +33,30 @@ const MAX_ALLOC_LIMIT: u64 = 1024;
/// Macro for machine-specific `InterpError` without allocation.
/// (These will never be shown to the user, but they help diagnose ICEs.)
-macro_rules! throw_machine_stop_str {
- ($($tt:tt)*) => {{
- // We make a new local type for it. The type itself does not carry any information,
- // but its vtable (for the `MachineStopType` trait) does.
- #[derive(Debug)]
- struct Zst;
- // Printing this type shows the desired string.
- impl std::fmt::Display for Zst {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(f, $($tt)*)
- }
+pub(crate) macro throw_machine_stop_str($($tt:tt)*) {{
+ // We make a new local type for it. The type itself does not carry any information,
+ // but its vtable (for the `MachineStopType` trait) does.
+ #[derive(Debug)]
+ struct Zst;
+ // Printing this type shows the desired string.
+ impl std::fmt::Display for Zst {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ write!(f, $($tt)*)
}
+ }
- impl rustc_middle::mir::interpret::MachineStopType for Zst {
- fn diagnostic_message(&self) -> rustc_errors::DiagnosticMessage {
- self.to_string().into()
- }
-
- fn add_args(
- self: Box<Self>,
- _: &mut dyn FnMut(std::borrow::Cow<'static, str>, rustc_errors::DiagnosticArgValue<'static>),
- ) {}
+ impl rustc_middle::mir::interpret::MachineStopType for Zst {
+ fn diagnostic_message(&self) -> rustc_errors::DiagnosticMessage {
+ self.to_string().into()
}
- throw_machine_stop!(Zst)
- }};
-}
+
+ fn add_args(
+ self: Box<Self>,
+ _: &mut dyn FnMut(std::borrow::Cow<'static, str>, rustc_errors::DiagnosticArgValue<'static>),
+ ) {}
+ }
+ throw_machine_stop!(Zst)
+}}
pub struct ConstProp;
@@ -86,9 +84,9 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
return;
}
- let is_generator = tcx.type_of(def_id.to_def_id()).instantiate_identity().is_generator();
// FIXME(welseywiser) const prop doesn't work on generators because of query cycles
// computing their layout.
+ let is_generator = def_kind == DefKind::Generator;
if is_generator {
trace!("ConstProp skipped for generator {:?}", def_id);
return;
@@ -96,33 +94,22 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
trace!("ConstProp starting for {:?}", def_id);
- let dummy_body = &Body::new(
- body.source,
- (*body.basic_blocks).to_owned(),
- body.source_scopes.clone(),
- body.local_decls.clone(),
- Default::default(),
- body.arg_count,
- Default::default(),
- body.span,
- body.generator_kind(),
- body.tainted_by_errors,
- );
-
// FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
// constants, instead of just checking for const-folding succeeding.
// That would require a uniform one-def no-mutation analysis
// and RPO (or recursing when needing the value of a local).
- let mut optimization_finder = ConstPropagator::new(body, dummy_body, tcx);
+ let mut optimization_finder = ConstPropagator::new(body, tcx);
// Traverse the body in reverse post-order, to ensure that `FullConstProp` locals are
// assigned before being read.
- let rpo = body.basic_blocks.reverse_postorder().to_vec();
- for bb in rpo {
- let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
+ for &bb in body.basic_blocks.reverse_postorder() {
+ let data = &body.basic_blocks[bb];
optimization_finder.visit_basic_block_data(bb, data);
}
+ let mut patch = optimization_finder.patch;
+ patch.visit_body_preserves_cfg(body);
+
trace!("ConstProp done for {:?}", def_id);
}
}
@@ -146,14 +133,17 @@ impl ConstPropMachine<'_, '_> {
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> {
compile_time_machine!(<'mir, 'tcx>);
+
const PANIC_ON_ALLOC_FAIL: bool = true; // all allocations are small (see `MAX_ALLOC_LIMIT`)
+ const POST_MONO_CHECKS: bool = false; // this MIR is still generic!
+
type MemoryKind = !;
#[inline(always)]
fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment {
// We do not check for alignment to avoid having to carry an `Align`
- // in `ConstValue::ByRef`.
+ // in `ConstValue::Indirect`.
CheckAlignment::No
}
@@ -180,6 +170,10 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
throw_machine_stop_str!("calling functions isn't supported in ConstProp")
}
+ fn panic_nounwind(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _msg: &str) -> InterpResult<'tcx> {
+ throw_machine_stop_str!("panicking isn't supported in ConstProp")
+ }
+
fn find_mir_or_eval_fn(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
@@ -216,16 +210,16 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_bin_op: BinOp,
_left: &ImmTy<'tcx>,
_right: &ImmTy<'tcx>,
- ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
+ ) -> InterpResult<'tcx, (ImmTy<'tcx>, bool)> {
// We can't do this because aliasing of memory can differ between const eval and llvm
throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp")
}
- fn access_local_mut<'a>(
+ fn before_access_local_mut<'a>(
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
frame: usize,
local: Local,
- ) -> InterpResult<'tcx, &'a mut interpret::Operand<Self::Provenance>> {
+ ) -> InterpResult<'tcx> {
assert_eq!(frame, 0);
match ecx.machine.can_const_prop[local] {
ConstPropMode::NoPropagation => {
@@ -238,7 +232,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
}
ConstPropMode::FullConstProp => {}
}
- ecx.machine.stack[frame].locals[local].access_mut()
+ Ok(())
}
fn before_access_global(
@@ -298,6 +292,7 @@ struct ConstPropagator<'mir, 'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ParamEnv<'tcx>,
local_decls: &'mir IndexSlice<Local, LocalDecl<'tcx>>,
+ patch: Patch<'tcx>,
}
impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> {
@@ -331,11 +326,7 @@ impl<'tcx> ty::layout::HasParamEnv<'tcx> for ConstPropagator<'_, 'tcx> {
}
impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
- fn new(
- body: &Body<'tcx>,
- dummy_body: &'mir Body<'tcx>,
- tcx: TyCtxt<'tcx>,
- ) -> ConstPropagator<'mir, 'tcx> {
+ fn new(body: &'mir Body<'tcx>, tcx: TyCtxt<'tcx>) -> ConstPropagator<'mir, 'tcx> {
let def_id = body.source.def_id();
let args = &GenericArgs::identity_for_item(tcx, def_id);
let param_env = tcx.param_env_reveal_all_normalized(def_id);
@@ -366,19 +357,33 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
ecx.push_stack_frame(
Instance::new(def_id, args),
- dummy_body,
+ body,
&ret,
StackPopCleanup::Root { cleanup: false },
)
.expect("failed to push initial stack frame");
- ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls }
+ for local in body.local_decls.indices() {
+ // Mark everything initially live.
+ // This is somewhat dicey since some of them might be unsized and it is incoherent to
+ // mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
+ // stopping us before those unsized immediates can cause issues deeper in the
+ // interpreter.
+ ecx.frame_mut().locals[local].make_live_uninit();
+ }
+
+ let patch = Patch::new(tcx);
+ ConstPropagator { ecx, tcx, param_env, local_decls: &body.local_decls, patch }
}
fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> {
let op = match self.ecx.eval_place_to_op(place, None) {
Ok(op) => {
- if matches!(*op, interpret::Operand::Immediate(Immediate::Uninit)) {
+ if op
+ .as_mplace_or_imm()
+ .right()
+ .is_some_and(|imm| matches!(*imm, Immediate::Uninit))
+ {
// Make sure nobody accidentally uses this value.
return None;
}
@@ -401,17 +406,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
/// Remove `local` from the pool of `Locals`. Allows writing to them,
/// but not reading from them anymore.
fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
- ecx.frame_mut().locals[local].value =
- LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit));
+ ecx.frame_mut().locals[local].make_live_uninit();
ecx.machine.written_only_inside_own_block_locals.remove(&local);
}
- fn propagate_operand(&mut self, operand: &mut Operand<'tcx>) {
- if let Some(place) = operand.place() && let Some(op) = self.replace_with_const(place) {
- *operand = op;
- }
- }
-
fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>) -> Option<()> {
// Perform any special handling for specific Rvalue types.
// Generally, checks here fall into one of two categories:
@@ -527,16 +525,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
}
}
- /// Creates a new `Operand::Constant` from a `Scalar` value
- fn operand_from_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> Operand<'tcx> {
- Operand::Constant(Box::new(Constant {
- span: DUMMY_SP,
- user_ty: None,
- literal: ConstantKind::from_scalar(self.tcx, scalar, ty),
- }))
- }
-
- fn replace_with_const(&mut self, place: Place<'tcx>) -> Option<Operand<'tcx>> {
+ fn replace_with_const(&mut self, place: Place<'tcx>) -> Option<Const<'tcx>> {
// This will return None if the above `const_prop` invocation only "wrote" a
// type whose creation requires no write. E.g. a generator whose initial state
// consists solely of uninitialized memory (so it doesn't capture any locals).
@@ -546,31 +535,26 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
}
trace!("replacing {:?} with {:?}", place, value);
- // FIXME> figure out what to do when read_immediate_raw fails
+ // FIXME: figure out what to do when read_immediate_raw fails
let imm = self.ecx.read_immediate_raw(&value).ok()?;
let Right(imm) = imm else { return None };
match *imm {
Immediate::Scalar(scalar) if scalar.try_to_int().is_ok() => {
- Some(self.operand_from_scalar(scalar, value.layout.ty))
+ Some(Const::from_scalar(self.tcx, scalar, value.layout.ty))
}
Immediate::ScalarPair(l, r) if l.try_to_int().is_ok() && r.try_to_int().is_ok() => {
- let alloc = self
+ let alloc_id = self
.ecx
.intern_with_temp_alloc(value.layout, |ecx, dest| {
ecx.write_immediate(*imm, dest)
})
.ok()?;
- let literal = ConstantKind::Val(
- ConstValue::ByRef { alloc, offset: Size::ZERO },
+ Some(Const::Val(
+ ConstValue::Indirect { alloc_id, offset: Size::ZERO },
value.layout.ty,
- );
- Some(Operand::Constant(Box::new(Constant {
- span: DUMMY_SP,
- user_ty: None,
- literal,
- })))
+ ))
}
// Scalars or scalar pairs that contain undef values are assumed to not have
// successfully evaluated and are thus not propagated.
@@ -699,7 +683,7 @@ impl<'tcx> Visitor<'tcx> for CanConstProp {
// These can't ever be propagated under any scheme, as we can't reason about indirect
// mutation.
| NonMutatingUse(NonMutatingUseContext::SharedBorrow)
- | NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
+ | NonMutatingUse(NonMutatingUseContext::FakeBorrow)
| NonMutatingUse(NonMutatingUseContext::AddressOf)
| MutatingUse(MutatingUseContext::Borrow)
| MutatingUse(MutatingUseContext::AddressOf) => {
@@ -712,39 +696,29 @@ impl<'tcx> Visitor<'tcx> for CanConstProp {
}
}
-impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
- fn tcx(&self) -> TyCtxt<'tcx> {
- self.tcx
- }
-
- fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
+ fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
self.super_operand(operand, location);
- self.propagate_operand(operand)
+ if let Some(place) = operand.place() && let Some(value) = self.replace_with_const(place) {
+ self.patch.before_effect.insert((location, place), value);
+ }
}
- fn process_projection_elem(
+ fn visit_projection_elem(
&mut self,
+ _: PlaceRef<'tcx>,
elem: PlaceElem<'tcx>,
- _: Location,
- ) -> Option<PlaceElem<'tcx>> {
+ _: PlaceContext,
+ location: Location,
+ ) {
if let PlaceElem::Index(local) = elem
- && let Some(value) = self.get_const(local.into())
- && let interpret::Operand::Immediate(Immediate::Scalar(scalar)) = *value
- && let Ok(offset) = scalar.to_target_usize(&self.tcx)
- && let Some(min_length) = offset.checked_add(1)
+ && let Some(value) = self.replace_with_const(local.into())
{
- Some(PlaceElem::ConstantIndex { offset, min_length, from_end: false })
- } else {
- None
+ self.patch.before_effect.insert((location, local.into()), value);
}
}
- fn visit_assign(
- &mut self,
- place: &mut Place<'tcx>,
- rvalue: &mut Rvalue<'tcx>,
- location: Location,
- ) {
+ fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
self.super_assign(place, rvalue, location);
let Some(()) = self.check_rvalue(rvalue) else { return };
@@ -757,11 +731,11 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
if let Some(()) = self.eval_rvalue_with_identities(rvalue, *place) {
// If this was already an evaluated constant, keep it.
if let Rvalue::Use(Operand::Constant(c)) = rvalue
- && let ConstantKind::Val(..) = c.literal
+ && let Const::Val(..) = c.const_
{
trace!("skipping replace of Rvalue::Use({:?} because it is already a const", c);
} else if let Some(operand) = self.replace_with_const(*place) {
- *rvalue = Rvalue::Use(operand);
+ self.patch.assignments.insert(location, operand);
}
} else {
// Const prop failed, so erase the destination, ensuring that whatever happens
@@ -785,7 +759,7 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
}
}
- fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
+ fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
trace!("visit_statement: {:?}", statement);
// We want to evaluate operands before any change to the assigned-to value,
@@ -829,7 +803,7 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> {
}
}
- fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
+ fn visit_basic_block_data(&mut self, block: BasicBlock, data: &BasicBlockData<'tcx>) {
self.super_basic_block_data(block, data);
// We remove all Locals which are restricted in propagation to their containing blocks and
diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs
index ac07c2576..64e262c6c 100644
--- a/compiler/rustc_mir_transform/src/const_prop_lint.rs
+++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs
@@ -7,7 +7,7 @@ use either::Left;
use rustc_const_eval::interpret::Immediate;
use rustc_const_eval::interpret::{
- self, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, Scalar, StackPopCleanup,
+ InterpCx, InterpResult, MemoryKind, OpTy, Scalar, StackPopCleanup,
};
use rustc_const_eval::ReportErrorExt;
use rustc_hir::def::DefKind;
@@ -39,6 +39,10 @@ pub struct ConstProp;
impl<'tcx> MirLint<'tcx> for ConstProp {
fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
+ if body.tainted_by_errors.is_some() {
+ return;
+ }
+
// will be evaluated by miri and produce its errors there
if body.source.promoted.is_some() {
return;
@@ -101,25 +105,12 @@ impl<'tcx> MirLint<'tcx> for ConstProp {
trace!("ConstProp starting for {:?}", def_id);
- let dummy_body = &Body::new(
- body.source,
- (*body.basic_blocks).to_owned(),
- body.source_scopes.clone(),
- body.local_decls.clone(),
- Default::default(),
- body.arg_count,
- Default::default(),
- body.span,
- body.generator_kind(),
- body.tainted_by_errors,
- );
-
// FIXME(oli-obk, eddyb) Optimize locals (or even local paths) to hold
// constants, instead of just checking for const-folding succeeding.
// That would require a uniform one-def no-mutation analysis
// and RPO (or recursing when needing the value of a local).
- let mut optimization_finder = ConstPropagator::new(body, dummy_body, tcx);
- optimization_finder.visit_body(body);
+ let mut linter = ConstPropagator::new(body, tcx);
+ linter.visit_body(body);
trace!("ConstProp done for {:?}", def_id);
}
@@ -165,11 +156,7 @@ impl<'tcx> ty::layout::HasParamEnv<'tcx> for ConstPropagator<'_, 'tcx> {
}
impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
- fn new(
- body: &Body<'tcx>,
- dummy_body: &'mir Body<'tcx>,
- tcx: TyCtxt<'tcx>,
- ) -> ConstPropagator<'mir, 'tcx> {
+ fn new(body: &'mir Body<'tcx>, tcx: TyCtxt<'tcx>) -> ConstPropagator<'mir, 'tcx> {
let def_id = body.source.def_id();
let args = &GenericArgs::identity_for_item(tcx, def_id);
let param_env = tcx.param_env_reveal_all_normalized(def_id);
@@ -200,12 +187,21 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
ecx.push_stack_frame(
Instance::new(def_id, args),
- dummy_body,
+ body,
&ret,
StackPopCleanup::Root { cleanup: false },
)
.expect("failed to push initial stack frame");
+ for local in body.local_decls.indices() {
+ // Mark everything initially live.
+ // This is somewhat dicey since some of them might be unsized and it is incoherent to
+ // mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
+ // stopping us before those unsized immediates can cause issues deeper in the
+ // interpreter.
+ ecx.frame_mut().locals[local].make_live_uninit();
+ }
+
ConstPropagator {
ecx,
tcx,
@@ -226,7 +222,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> {
let op = match self.ecx.eval_place_to_op(place, None) {
Ok(op) => {
- if matches!(*op, interpret::Operand::Immediate(Immediate::Uninit)) {
+ if op
+ .as_mplace_or_imm()
+ .right()
+ .is_some_and(|imm| matches!(*imm, Immediate::Uninit))
+ {
// Make sure nobody accidentally uses this value.
return None;
}
@@ -249,8 +249,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
/// Remove `local` from the pool of `Locals`. Allows writing to them,
/// but not reading from them anymore.
fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) {
- ecx.frame_mut().locals[local].value =
- LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit));
+ ecx.frame_mut().locals[local].make_live_uninit();
ecx.machine.written_only_inside_own_block_locals.remove(&local);
}
@@ -273,7 +272,8 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// dedicated error variants should be introduced instead.
assert!(
!error.kind().formatted_string(),
- "const-prop encountered formatting error: {error:?}",
+ "const-prop encountered formatting error: {}",
+ self.ecx.format_error(error),
);
None
}
@@ -281,7 +281,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
}
/// Returns the value, if any, of evaluating `c`.
- fn eval_constant(&mut self, c: &Constant<'tcx>, location: Location) -> Option<OpTy<'tcx>> {
+ fn eval_constant(&mut self, c: &ConstOperand<'tcx>, location: Location) -> Option<OpTy<'tcx>> {
// FIXME we need to revisit this for #67176
if c.has_param() {
return None;
@@ -293,7 +293,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// that the `RevealAll` pass has happened and that the body's consts
// are normalized, so any call to resolve before that needs to be
// manually normalized.
- let val = self.tcx.try_normalize_erasing_regions(self.param_env, c.literal).ok()?;
+ let val = self.tcx.try_normalize_erasing_regions(self.param_env, c.const_).ok()?;
self.use_ecx(location, |this| this.ecx.eval_mir_constant(&val, Some(c.span), None))
}
@@ -322,7 +322,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
fn check_unary_op(&mut self, op: UnOp, arg: &Operand<'tcx>, location: Location) -> Option<()> {
if let (val, true) = self.use_ecx(location, |this| {
let val = this.ecx.read_immediate(&this.ecx.eval_operand(arg, None)?)?;
- let (_res, overflow, _ty) = this.ecx.overflowing_unary_op(op, &val)?;
+ let (_res, overflow) = this.ecx.overflowing_unary_op(op, &val)?;
Ok((val, overflow))
})? {
// `AssertKind` only has an `OverflowNeg` variant, so make sure that is
@@ -390,7 +390,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
if let (Some(l), Some(r)) = (l, r) {
// The remaining operators are handled through `overflowing_binary_op`.
if self.use_ecx(location, |this| {
- let (_res, overflow, _ty) = this.ecx.overflowing_binary_op(op, &l, &r)?;
+ let (_res, overflow) = this.ecx.overflowing_binary_op(op, &l, &r)?;
Ok(overflow)
})? {
let source_info = self.body().source_info(location);
@@ -580,7 +580,7 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
self.super_operand(operand, location);
}
- fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
+ fn visit_constant(&mut self, constant: &ConstOperand<'tcx>, location: Location) {
trace!("visit_constant: {:?}", constant);
self.super_constant(constant, location);
self.eval_constant(constant, location);
@@ -645,12 +645,12 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
}
StatementKind::StorageLive(local) => {
let frame = self.ecx.frame_mut();
- frame.locals[local].value =
- LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit));
+ frame.locals[local].make_live_uninit();
}
StatementKind::StorageDead(local) => {
let frame = self.ecx.frame_mut();
- frame.locals[local].value = LocalValue::Dead;
+ // We don't actually track liveness, so the local remains live. But forget its value.
+ frame.locals[local].make_live_uninit();
}
_ => {}
}
@@ -678,8 +678,8 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
}
// None of these have Operands to const-propagate.
TerminatorKind::Goto { .. }
- | TerminatorKind::Resume
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindResume
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. }
diff --git a/compiler/rustc_mir_transform/src/copy_prop.rs b/compiler/rustc_mir_transform/src/copy_prop.rs
index 9a3798eea..9c38a6f81 100644
--- a/compiler/rustc_mir_transform/src/copy_prop.rs
+++ b/compiler/rustc_mir_transform/src/copy_prop.rs
@@ -131,7 +131,7 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> {
let observes_address = match ctxt {
PlaceContext::NonMutatingUse(
NonMutatingUseContext::SharedBorrow
- | NonMutatingUseContext::ShallowBorrow
+ | NonMutatingUseContext::FakeBorrow
| NonMutatingUseContext::AddressOf,
) => true,
// For debuginfo, merging locals is ok.
diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs
index 3d442e5dc..d56d4ad4f 100644
--- a/compiler/rustc_mir_transform/src/coverage/counters.rs
+++ b/compiler/rustc_mir_transform/src/coverage/counters.rs
@@ -1,10 +1,8 @@
use super::Error;
-use super::debug;
use super::graph;
use super::spans;
-use debug::{DebugCounters, NESTED_INDENT};
use graph::{BasicCoverageBlock, BcbBranch, CoverageGraph, TraverseCoverageGraphWithLoops};
use spans::CoverageSpan;
@@ -16,6 +14,8 @@ use rustc_middle::mir::coverage::*;
use std::fmt::{self, Debug};
+const NESTED_INDENT: &str = " ";
+
/// The coverage counter or counter expression associated with a particular
/// BCB node or BCB edge.
#[derive(Clone)]
@@ -75,8 +75,6 @@ pub(super) struct CoverageCounters {
/// BCB/edge, but are needed as operands to more complex expressions.
/// These are always [`BcbCounter::Expression`].
pub(super) intermediate_expressions: Vec<BcbCounter>,
-
- pub debug_counters: DebugCounters,
}
impl CoverageCounters {
@@ -91,17 +89,9 @@ impl CoverageCounters {
bcb_edge_counters: FxHashMap::default(),
bcb_has_incoming_edge_counters: BitSet::new_empty(num_bcbs),
intermediate_expressions: Vec::new(),
-
- debug_counters: DebugCounters::new(),
}
}
- /// Activate the `DebugCounters` data structures, to provide additional debug formatting
- /// features when formatting [`BcbCounter`] (counter) values.
- pub fn enable_debug(&mut self) {
- self.debug_counters.enable();
- }
-
/// Makes [`BcbCounter`] `Counter`s and `Expressions` for the `BasicCoverageBlock`s directly or
/// indirectly associated with `CoverageSpans`, and accumulates additional `Expression`s
/// representing intermediate values.
@@ -113,44 +103,18 @@ impl CoverageCounters {
MakeBcbCounters::new(self, basic_coverage_blocks).make_bcb_counters(coverage_spans)
}
- fn make_counter<F>(&mut self, debug_block_label_fn: F) -> BcbCounter
- where
- F: Fn() -> Option<String>,
- {
- let counter = BcbCounter::Counter { id: self.next_counter() };
- if self.debug_counters.is_enabled() {
- self.debug_counters.add_counter(&counter, (debug_block_label_fn)());
- }
- counter
+ fn make_counter(&mut self) -> BcbCounter {
+ let id = self.next_counter();
+ BcbCounter::Counter { id }
}
- fn make_expression<F>(
- &mut self,
- lhs: Operand,
- op: Op,
- rhs: Operand,
- debug_block_label_fn: F,
- ) -> BcbCounter
- where
- F: Fn() -> Option<String>,
- {
+ fn make_expression(&mut self, lhs: Operand, op: Op, rhs: Operand) -> BcbCounter {
let id = self.next_expression();
- let expression = BcbCounter::Expression { id, lhs, op, rhs };
- if self.debug_counters.is_enabled() {
- self.debug_counters.add_counter(&expression, (debug_block_label_fn)());
- }
- expression
+ BcbCounter::Expression { id, lhs, op, rhs }
}
pub fn make_identity_counter(&mut self, counter_operand: Operand) -> BcbCounter {
- let some_debug_block_label = if self.debug_counters.is_enabled() {
- self.debug_counters.some_block_label(counter_operand).cloned()
- } else {
- None
- };
- self.make_expression(counter_operand, Op::Add, Operand::Zero, || {
- some_debug_block_label.clone()
- })
+ self.make_expression(counter_operand, Op::Add, Operand::Zero)
}
/// Counter IDs start from one and go up.
@@ -367,12 +331,8 @@ impl<'a> MakeBcbCounters<'a> {
branch_counter_operand,
Op::Add,
sumup_counter_operand,
- || None,
- );
- debug!(
- " [new intermediate expression: {}]",
- self.format_counter(&intermediate_expression)
);
+ debug!(" [new intermediate expression: {:?}]", intermediate_expression);
let intermediate_expression_operand = intermediate_expression.as_operand();
self.coverage_counters.intermediate_expressions.push(intermediate_expression);
some_sumup_counter_operand.replace(intermediate_expression_operand);
@@ -394,9 +354,8 @@ impl<'a> MakeBcbCounters<'a> {
branching_counter_operand,
Op::Subtract,
sumup_counter_operand,
- || Some(format!("{expression_branch:?}")),
);
- debug!("{:?} gets an expression: {}", expression_branch, self.format_counter(&expression));
+ debug!("{:?} gets an expression: {:?}", expression_branch, expression);
let bcb = expression_branch.target_bcb;
if expression_branch.is_only_path_to_target() {
self.coverage_counters.set_bcb_counter(bcb, expression)?;
@@ -418,10 +377,10 @@ impl<'a> MakeBcbCounters<'a> {
// If the BCB already has a counter, return it.
if let Some(counter_kind) = &self.coverage_counters.bcb_counters[bcb] {
debug!(
- "{}{:?} already has a counter: {}",
+ "{}{:?} already has a counter: {:?}",
NESTED_INDENT.repeat(debug_indent_level),
bcb,
- self.format_counter(counter_kind),
+ counter_kind,
);
return Ok(counter_kind.as_operand());
}
@@ -431,22 +390,22 @@ impl<'a> MakeBcbCounters<'a> {
// program results in a tight infinite loop, but it should still compile.
let one_path_to_target = self.bcb_has_one_path_to_target(bcb);
if one_path_to_target || self.bcb_predecessors(bcb).contains(&bcb) {
- let counter_kind = self.coverage_counters.make_counter(|| Some(format!("{bcb:?}")));
+ let counter_kind = self.coverage_counters.make_counter();
if one_path_to_target {
debug!(
- "{}{:?} gets a new counter: {}",
+ "{}{:?} gets a new counter: {:?}",
NESTED_INDENT.repeat(debug_indent_level),
bcb,
- self.format_counter(&counter_kind),
+ counter_kind,
);
} else {
debug!(
"{}{:?} has itself as its own predecessor. It can't be part of its own \
- Expression sum, so it will get its own new counter: {}. (Note, the compiled \
+ Expression sum, so it will get its own new counter: {:?}. (Note, the compiled \
code will generate an infinite loop.)",
NESTED_INDENT.repeat(debug_indent_level),
bcb,
- self.format_counter(&counter_kind),
+ counter_kind,
);
}
return self.coverage_counters.set_bcb_counter(bcb, counter_kind);
@@ -481,12 +440,11 @@ impl<'a> MakeBcbCounters<'a> {
sumup_edge_counter_operand,
Op::Add,
edge_counter_operand,
- || None,
);
debug!(
- "{}new intermediate expression: {}",
+ "{}new intermediate expression: {:?}",
NESTED_INDENT.repeat(debug_indent_level),
- self.format_counter(&intermediate_expression)
+ intermediate_expression
);
let intermediate_expression_operand = intermediate_expression.as_operand();
self.coverage_counters.intermediate_expressions.push(intermediate_expression);
@@ -497,13 +455,12 @@ impl<'a> MakeBcbCounters<'a> {
first_edge_counter_operand,
Op::Add,
some_sumup_edge_counter_operand.unwrap(),
- || Some(format!("{bcb:?}")),
);
debug!(
- "{}{:?} gets a new counter (sum of predecessor counters): {}",
+ "{}{:?} gets a new counter (sum of predecessor counters): {:?}",
NESTED_INDENT.repeat(debug_indent_level),
bcb,
- self.format_counter(&counter_kind)
+ counter_kind
);
self.coverage_counters.set_bcb_counter(bcb, counter_kind)
}
@@ -534,24 +491,23 @@ impl<'a> MakeBcbCounters<'a> {
self.coverage_counters.bcb_edge_counters.get(&(from_bcb, to_bcb))
{
debug!(
- "{}Edge {:?}->{:?} already has a counter: {}",
+ "{}Edge {:?}->{:?} already has a counter: {:?}",
NESTED_INDENT.repeat(debug_indent_level),
from_bcb,
to_bcb,
- self.format_counter(counter_kind)
+ counter_kind
);
return Ok(counter_kind.as_operand());
}
// Make a new counter to count this edge.
- let counter_kind =
- self.coverage_counters.make_counter(|| Some(format!("{from_bcb:?}->{to_bcb:?}")));
+ let counter_kind = self.coverage_counters.make_counter();
debug!(
- "{}Edge {:?}->{:?} gets a new counter: {}",
+ "{}Edge {:?}->{:?} gets a new counter: {:?}",
NESTED_INDENT.repeat(debug_indent_level),
from_bcb,
to_bcb,
- self.format_counter(&counter_kind)
+ counter_kind
);
self.coverage_counters.set_bcb_edge_counter(from_bcb, to_bcb, counter_kind)
}
@@ -710,9 +666,4 @@ impl<'a> MakeBcbCounters<'a> {
fn bcb_dominates(&self, dom: BasicCoverageBlock, node: BasicCoverageBlock) -> bool {
self.basic_coverage_blocks.dominates(dom, node)
}
-
- #[inline]
- fn format_counter(&self, counter_kind: &BcbCounter) -> String {
- self.coverage_counters.debug_counters.format_counter(counter_kind)
- }
}
diff --git a/compiler/rustc_mir_transform/src/coverage/debug.rs b/compiler/rustc_mir_transform/src/coverage/debug.rs
deleted file mode 100644
index af616c498..000000000
--- a/compiler/rustc_mir_transform/src/coverage/debug.rs
+++ /dev/null
@@ -1,802 +0,0 @@
-//! The `InstrumentCoverage` MIR pass implementation includes debugging tools and options
-//! to help developers understand and/or improve the analysis and instrumentation of a MIR.
-//!
-//! To enable coverage, include the rustc command line option:
-//!
-//! * `-C instrument-coverage`
-//!
-//! MIR Dump Files, with additional `CoverageGraph` graphviz and `CoverageSpan` spanview
-//! ------------------------------------------------------------------------------------
-//!
-//! Additional debugging options include:
-//!
-//! * `-Z dump-mir=InstrumentCoverage` - Generate `.mir` files showing the state of the MIR,
-//! before and after the `InstrumentCoverage` pass, for each compiled function.
-//!
-//! * `-Z dump-mir-graphviz` - If `-Z dump-mir` is also enabled for the current MIR node path,
-//! each MIR dump is accompanied by a before-and-after graphical view of the MIR, in Graphviz
-//! `.dot` file format (which can be visually rendered as a graph using any of a number of free
-//! Graphviz viewers and IDE extensions).
-//!
-//! For the `InstrumentCoverage` pass, this option also enables generation of an additional
-//! Graphviz `.dot` file for each function, rendering the `CoverageGraph`: the control flow
-//! graph (CFG) of `BasicCoverageBlocks` (BCBs), as nodes, internally labeled to show the
-//! `CoverageSpan`-based MIR elements each BCB represents (`BasicBlock`s, `Statement`s and
-//! `Terminator`s), assigned coverage counters and/or expressions, and edge counters, as needed.
-//!
-//! (Note the additional option, `-Z graphviz-dark-mode`, can be added, to change the rendered
-//! output from its default black-on-white background to a dark color theme, if desired.)
-//!
-//! * `-Z dump-mir-spanview` - If `-Z dump-mir` is also enabled for the current MIR node path,
-//! each MIR dump is accompanied by a before-and-after `.html` document showing the function's
-//! original source code, highlighted by it's MIR spans, at the `statement`-level (by default),
-//! `terminator` only, or encompassing span for the `Terminator` plus all `Statement`s, in each
-//! `block` (`BasicBlock`).
-//!
-//! For the `InstrumentCoverage` pass, this option also enables generation of an additional
-//! spanview `.html` file for each function, showing the aggregated `CoverageSpan`s that will
-//! require counters (or counter expressions) for accurate coverage analysis.
-//!
-//! Debug Logging
-//! -------------
-//!
-//! The `InstrumentCoverage` pass includes debug logging messages at various phases and decision
-//! points, which can be enabled via environment variable:
-//!
-//! ```shell
-//! RUSTC_LOG=rustc_mir_transform::transform::coverage=debug
-//! ```
-//!
-//! Other module paths with coverage-related debug logs may also be of interest, particularly for
-//! debugging the coverage map data, injected as global variables in the LLVM IR (during rustc's
-//! code generation pass). For example:
-//!
-//! ```shell
-//! RUSTC_LOG=rustc_mir_transform::transform::coverage,rustc_codegen_ssa::coverageinfo,rustc_codegen_llvm::coverageinfo=debug
-//! ```
-//!
-//! Coverage Debug Options
-//! ---------------------------------
-//!
-//! Additional debugging options can be enabled using the environment variable:
-//!
-//! ```shell
-//! RUSTC_COVERAGE_DEBUG_OPTIONS=<options>
-//! ```
-//!
-//! These options are comma-separated, and specified in the format `option-name=value`. For example:
-//!
-//! ```shell
-//! $ RUSTC_COVERAGE_DEBUG_OPTIONS=counter-format=id+operation,allow-unused-expressions=yes cargo build
-//! ```
-//!
-//! Coverage debug options include:
-//!
-//! * `allow-unused-expressions=yes` or `no` (default: `no`)
-//!
-//! The `InstrumentCoverage` algorithms _should_ only create and assign expressions to a
-//! `BasicCoverageBlock`, or an incoming edge, if that expression is either (a) required to
-//! count a `CoverageSpan`, or (b) a dependency of some other required counter expression.
-//!
-//! If an expression is generated that does not map to a `CoverageSpan` or dependency, this
-//! probably indicates there was a bug in the algorithm that creates and assigns counters
-//! and expressions.
-//!
-//! When this kind of bug is encountered, the rustc compiler will panic by default. Setting:
-//! `allow-unused-expressions=yes` will log a warning message instead of panicking (effectively
-//! ignoring the unused expressions), which may be helpful when debugging the root cause of
-//! the problem.
-//!
-//! * `counter-format=<choices>`, where `<choices>` can be any plus-separated combination of `id`,
-//! `block`, and/or `operation` (default: `block+operation`)
-//!
-//! This option effects both the `CoverageGraph` (graphviz `.dot` files) and debug logging, when
-//! generating labels for counters and expressions.
-//!
-//! Depending on the values and combinations, counters can be labeled by:
-//!
-//! * `id` - counter or expression ID (ascending counter IDs, starting at 1, or descending
-//! expression IDs, starting at `u32:MAX`)
-//! * `block` - the `BasicCoverageBlock` label (for example, `bcb0`) or edge label (for
-//! example `bcb0->bcb1`), for counters or expressions assigned to count a
-//! `BasicCoverageBlock` or edge. Intermediate expressions (not directly associated with
-//! a BCB or edge) will be labeled by their expression ID, unless `operation` is also
-//! specified.
-//! * `operation` - applied to expressions only, labels include the left-hand-side counter
-//! or expression label (lhs operand), the operator (`+` or `-`), and the right-hand-side
-//! counter or expression (rhs operand). Expression operand labels are generated
-//! recursively, generating labels with nested operations, enclosed in parentheses
-//! (for example: `bcb2 + (bcb0 - bcb1)`).
-
-use super::counters::{BcbCounter, CoverageCounters};
-use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph};
-use super::spans::CoverageSpan;
-
-use itertools::Itertools;
-use rustc_middle::mir::create_dump_file;
-use rustc_middle::mir::generic_graphviz::GraphvizWriter;
-use rustc_middle::mir::spanview::{self, SpanViewable};
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_middle::mir::coverage::*;
-use rustc_middle::mir::{self, BasicBlock};
-use rustc_middle::ty::TyCtxt;
-use rustc_span::Span;
-
-use std::iter;
-use std::ops::Deref;
-use std::sync::OnceLock;
-
-pub const NESTED_INDENT: &str = " ";
-
-const RUSTC_COVERAGE_DEBUG_OPTIONS: &str = "RUSTC_COVERAGE_DEBUG_OPTIONS";
-
-pub(super) fn debug_options<'a>() -> &'a DebugOptions {
- static DEBUG_OPTIONS: OnceLock<DebugOptions> = OnceLock::new();
-
- &DEBUG_OPTIONS.get_or_init(DebugOptions::from_env)
-}
-
-/// Parses and maintains coverage-specific debug options captured from the environment variable
-/// "RUSTC_COVERAGE_DEBUG_OPTIONS", if set.
-#[derive(Debug, Clone)]
-pub(super) struct DebugOptions {
- pub allow_unused_expressions: bool,
- counter_format: ExpressionFormat,
-}
-
-impl DebugOptions {
- fn from_env() -> Self {
- let mut allow_unused_expressions = true;
- let mut counter_format = ExpressionFormat::default();
-
- if let Ok(env_debug_options) = std::env::var(RUSTC_COVERAGE_DEBUG_OPTIONS) {
- for setting_str in env_debug_options.replace(' ', "").replace('-', "_").split(',') {
- let (option, value) = match setting_str.split_once('=') {
- None => (setting_str, None),
- Some((k, v)) => (k, Some(v)),
- };
- match option {
- "allow_unused_expressions" => {
- allow_unused_expressions = bool_option_val(option, value);
- debug!(
- "{} env option `allow_unused_expressions` is set to {}",
- RUSTC_COVERAGE_DEBUG_OPTIONS, allow_unused_expressions
- );
- }
- "counter_format" => {
- match value {
- None => {
- bug!(
- "`{}` option in environment variable {} requires one or more \
- plus-separated choices (a non-empty subset of \
- `id+block+operation`)",
- option,
- RUSTC_COVERAGE_DEBUG_OPTIONS
- );
- }
- Some(val) => {
- counter_format = counter_format_option_val(val);
- debug!(
- "{} env option `counter_format` is set to {:?}",
- RUSTC_COVERAGE_DEBUG_OPTIONS, counter_format
- );
- }
- };
- }
- _ => bug!(
- "Unsupported setting `{}` in environment variable {}",
- option,
- RUSTC_COVERAGE_DEBUG_OPTIONS
- ),
- };
- }
- }
-
- Self { allow_unused_expressions, counter_format }
- }
-}
-
-fn bool_option_val(option: &str, some_strval: Option<&str>) -> bool {
- if let Some(val) = some_strval {
- if ["yes", "y", "on", "true"].contains(&val) {
- true
- } else if ["no", "n", "off", "false"].contains(&val) {
- false
- } else {
- bug!(
- "Unsupported value `{}` for option `{}` in environment variable {}",
- option,
- val,
- RUSTC_COVERAGE_DEBUG_OPTIONS
- )
- }
- } else {
- true
- }
-}
-
-fn counter_format_option_val(strval: &str) -> ExpressionFormat {
- let mut counter_format = ExpressionFormat { id: false, block: false, operation: false };
- let components = strval.splitn(3, '+');
- for component in components {
- match component {
- "id" => counter_format.id = true,
- "block" => counter_format.block = true,
- "operation" => counter_format.operation = true,
- _ => bug!(
- "Unsupported counter_format choice `{}` in environment variable {}",
- component,
- RUSTC_COVERAGE_DEBUG_OPTIONS
- ),
- }
- }
- counter_format
-}
-
-#[derive(Debug, Clone)]
-struct ExpressionFormat {
- id: bool,
- block: bool,
- operation: bool,
-}
-
-impl Default for ExpressionFormat {
- fn default() -> Self {
- Self { id: false, block: true, operation: true }
- }
-}
-
-/// If enabled, this struct maintains a map from `BcbCounter` IDs (as `Operand`) to
-/// the `BcbCounter` data and optional label (normally, the counter's associated
-/// `BasicCoverageBlock` format string, if any).
-///
-/// Use `format_counter` to convert one of these `BcbCounter` counters to a debug output string,
-/// as directed by the `DebugOptions`. This allows the format of counter labels in logs and dump
-/// files (including the `CoverageGraph` graphviz file) to be changed at runtime, via environment
-/// variable.
-///
-/// `DebugCounters` supports a recursive rendering of `Expression` counters, so they can be
-/// presented as nested expressions such as `(bcb3 - (bcb0 + bcb1))`.
-pub(super) struct DebugCounters {
- some_counters: Option<FxHashMap<Operand, DebugCounter>>,
-}
-
-impl DebugCounters {
- pub fn new() -> Self {
- Self { some_counters: None }
- }
-
- pub fn enable(&mut self) {
- debug_assert!(!self.is_enabled());
- self.some_counters.replace(FxHashMap::default());
- }
-
- pub fn is_enabled(&self) -> bool {
- self.some_counters.is_some()
- }
-
- pub fn add_counter(&mut self, counter_kind: &BcbCounter, some_block_label: Option<String>) {
- if let Some(counters) = &mut self.some_counters {
- let id = counter_kind.as_operand();
- counters
- .try_insert(id, DebugCounter::new(counter_kind.clone(), some_block_label))
- .expect("attempt to add the same counter_kind to DebugCounters more than once");
- }
- }
-
- pub fn some_block_label(&self, operand: Operand) -> Option<&String> {
- self.some_counters.as_ref().and_then(|counters| {
- counters.get(&operand).and_then(|debug_counter| debug_counter.some_block_label.as_ref())
- })
- }
-
- pub fn format_counter(&self, counter_kind: &BcbCounter) -> String {
- match *counter_kind {
- BcbCounter::Counter { .. } => {
- format!("Counter({})", self.format_counter_kind(counter_kind))
- }
- BcbCounter::Expression { .. } => {
- format!("Expression({})", self.format_counter_kind(counter_kind))
- }
- }
- }
-
- fn format_counter_kind(&self, counter_kind: &BcbCounter) -> String {
- let counter_format = &debug_options().counter_format;
- if let BcbCounter::Expression { id, lhs, op, rhs } = *counter_kind {
- if counter_format.operation {
- return format!(
- "{}{} {} {}",
- if counter_format.id || self.some_counters.is_none() {
- format!("#{} = ", id.index())
- } else {
- String::new()
- },
- self.format_operand(lhs),
- match op {
- Op::Add => "+",
- Op::Subtract => "-",
- },
- self.format_operand(rhs),
- );
- }
- }
-
- let id = counter_kind.as_operand();
- if self.some_counters.is_some() && (counter_format.block || !counter_format.id) {
- let counters = self.some_counters.as_ref().unwrap();
- if let Some(DebugCounter { some_block_label: Some(block_label), .. }) =
- counters.get(&id)
- {
- return if counter_format.id {
- format!("{}#{:?}", block_label, id)
- } else {
- block_label.to_string()
- };
- }
- }
- format!("#{:?}", id)
- }
-
- fn format_operand(&self, operand: Operand) -> String {
- if matches!(operand, Operand::Zero) {
- return String::from("0");
- }
- if let Some(counters) = &self.some_counters {
- if let Some(DebugCounter { counter_kind, some_block_label }) = counters.get(&operand) {
- if let BcbCounter::Expression { .. } = counter_kind {
- if let Some(label) = some_block_label && debug_options().counter_format.block {
- return format!(
- "{}:({})",
- label,
- self.format_counter_kind(counter_kind)
- );
- }
- return format!("({})", self.format_counter_kind(counter_kind));
- }
- return self.format_counter_kind(counter_kind);
- }
- }
- format!("#{:?}", operand)
- }
-}
-
-/// A non-public support class to `DebugCounters`.
-#[derive(Debug)]
-struct DebugCounter {
- counter_kind: BcbCounter,
- some_block_label: Option<String>,
-}
-
-impl DebugCounter {
- fn new(counter_kind: BcbCounter, some_block_label: Option<String>) -> Self {
- Self { counter_kind, some_block_label }
- }
-}
-
-/// If enabled, this data structure captures additional debugging information used when generating
-/// a Graphviz (.dot file) representation of the `CoverageGraph`, for debugging purposes.
-pub(super) struct GraphvizData {
- some_bcb_to_coverage_spans_with_counters:
- Option<FxHashMap<BasicCoverageBlock, Vec<(CoverageSpan, BcbCounter)>>>,
- some_bcb_to_dependency_counters: Option<FxHashMap<BasicCoverageBlock, Vec<BcbCounter>>>,
- some_edge_to_counter: Option<FxHashMap<(BasicCoverageBlock, BasicBlock), BcbCounter>>,
-}
-
-impl GraphvizData {
- pub fn new() -> Self {
- Self {
- some_bcb_to_coverage_spans_with_counters: None,
- some_bcb_to_dependency_counters: None,
- some_edge_to_counter: None,
- }
- }
-
- pub fn enable(&mut self) {
- debug_assert!(!self.is_enabled());
- self.some_bcb_to_coverage_spans_with_counters = Some(FxHashMap::default());
- self.some_bcb_to_dependency_counters = Some(FxHashMap::default());
- self.some_edge_to_counter = Some(FxHashMap::default());
- }
-
- pub fn is_enabled(&self) -> bool {
- self.some_bcb_to_coverage_spans_with_counters.is_some()
- }
-
- pub fn add_bcb_coverage_span_with_counter(
- &mut self,
- bcb: BasicCoverageBlock,
- coverage_span: &CoverageSpan,
- counter_kind: &BcbCounter,
- ) {
- if let Some(bcb_to_coverage_spans_with_counters) =
- self.some_bcb_to_coverage_spans_with_counters.as_mut()
- {
- bcb_to_coverage_spans_with_counters
- .entry(bcb)
- .or_insert_with(Vec::new)
- .push((coverage_span.clone(), counter_kind.clone()));
- }
- }
-
- pub fn get_bcb_coverage_spans_with_counters(
- &self,
- bcb: BasicCoverageBlock,
- ) -> Option<&[(CoverageSpan, BcbCounter)]> {
- if let Some(bcb_to_coverage_spans_with_counters) =
- self.some_bcb_to_coverage_spans_with_counters.as_ref()
- {
- bcb_to_coverage_spans_with_counters.get(&bcb).map(Deref::deref)
- } else {
- None
- }
- }
-
- pub fn add_bcb_dependency_counter(
- &mut self,
- bcb: BasicCoverageBlock,
- counter_kind: &BcbCounter,
- ) {
- if let Some(bcb_to_dependency_counters) = self.some_bcb_to_dependency_counters.as_mut() {
- bcb_to_dependency_counters
- .entry(bcb)
- .or_insert_with(Vec::new)
- .push(counter_kind.clone());
- }
- }
-
- pub fn get_bcb_dependency_counters(&self, bcb: BasicCoverageBlock) -> Option<&[BcbCounter]> {
- if let Some(bcb_to_dependency_counters) = self.some_bcb_to_dependency_counters.as_ref() {
- bcb_to_dependency_counters.get(&bcb).map(Deref::deref)
- } else {
- None
- }
- }
-
- pub fn set_edge_counter(
- &mut self,
- from_bcb: BasicCoverageBlock,
- to_bb: BasicBlock,
- counter_kind: &BcbCounter,
- ) {
- if let Some(edge_to_counter) = self.some_edge_to_counter.as_mut() {
- edge_to_counter
- .try_insert((from_bcb, to_bb), counter_kind.clone())
- .expect("invalid attempt to insert more than one edge counter for the same edge");
- }
- }
-
- pub fn get_edge_counter(
- &self,
- from_bcb: BasicCoverageBlock,
- to_bb: BasicBlock,
- ) -> Option<&BcbCounter> {
- if let Some(edge_to_counter) = self.some_edge_to_counter.as_ref() {
- edge_to_counter.get(&(from_bcb, to_bb))
- } else {
- None
- }
- }
-}
-
-/// If enabled, this struct captures additional data used to track whether expressions were used,
-/// directly or indirectly, to compute the coverage counts for all `CoverageSpan`s, and any that are
-/// _not_ used are retained in the `unused_expressions` Vec, to be included in debug output (logs
-/// and/or a `CoverageGraph` graphviz output).
-pub(super) struct UsedExpressions {
- some_used_expression_operands: Option<FxHashMap<Operand, Vec<ExpressionId>>>,
- some_unused_expressions:
- Option<Vec<(BcbCounter, Option<BasicCoverageBlock>, BasicCoverageBlock)>>,
-}
-
-impl UsedExpressions {
- pub fn new() -> Self {
- Self { some_used_expression_operands: None, some_unused_expressions: None }
- }
-
- pub fn enable(&mut self) {
- debug_assert!(!self.is_enabled());
- self.some_used_expression_operands = Some(FxHashMap::default());
- self.some_unused_expressions = Some(Vec::new());
- }
-
- pub fn is_enabled(&self) -> bool {
- self.some_used_expression_operands.is_some()
- }
-
- pub fn add_expression_operands(&mut self, expression: &BcbCounter) {
- if let Some(used_expression_operands) = self.some_used_expression_operands.as_mut() {
- if let BcbCounter::Expression { id, lhs, rhs, .. } = *expression {
- used_expression_operands.entry(lhs).or_insert_with(Vec::new).push(id);
- used_expression_operands.entry(rhs).or_insert_with(Vec::new).push(id);
- }
- }
- }
-
- pub fn expression_is_used(&self, expression: &BcbCounter) -> bool {
- if let Some(used_expression_operands) = self.some_used_expression_operands.as_ref() {
- used_expression_operands.contains_key(&expression.as_operand())
- } else {
- false
- }
- }
-
- pub fn add_unused_expression_if_not_found(
- &mut self,
- expression: &BcbCounter,
- edge_from_bcb: Option<BasicCoverageBlock>,
- target_bcb: BasicCoverageBlock,
- ) {
- if let Some(used_expression_operands) = self.some_used_expression_operands.as_ref() {
- if !used_expression_operands.contains_key(&expression.as_operand()) {
- self.some_unused_expressions.as_mut().unwrap().push((
- expression.clone(),
- edge_from_bcb,
- target_bcb,
- ));
- }
- }
- }
-
- /// Return the list of unused counters (if any) as a tuple with the counter (`BcbCounter`),
- /// optional `from_bcb` (if it was an edge counter), and `target_bcb`.
- pub fn get_unused_expressions(
- &self,
- ) -> Vec<(BcbCounter, Option<BasicCoverageBlock>, BasicCoverageBlock)> {
- if let Some(unused_expressions) = self.some_unused_expressions.as_ref() {
- unused_expressions.clone()
- } else {
- Vec::new()
- }
- }
-
- /// If enabled, validate that every BCB or edge counter not directly associated with a coverage
- /// span is at least indirectly associated (it is a dependency of a BCB counter that _is_
- /// associated with a coverage span).
- pub fn validate(
- &mut self,
- bcb_counters_without_direct_coverage_spans: &[(
- Option<BasicCoverageBlock>,
- BasicCoverageBlock,
- BcbCounter,
- )],
- ) {
- if self.is_enabled() {
- let mut not_validated = bcb_counters_without_direct_coverage_spans
- .iter()
- .map(|(_, _, counter_kind)| counter_kind)
- .collect::<Vec<_>>();
- let mut validating_count = 0;
- while not_validated.len() != validating_count {
- let to_validate = not_validated.split_off(0);
- validating_count = to_validate.len();
- for counter_kind in to_validate {
- if self.expression_is_used(counter_kind) {
- self.add_expression_operands(counter_kind);
- } else {
- not_validated.push(counter_kind);
- }
- }
- }
- }
- }
-
- pub fn alert_on_unused_expressions(&self, debug_counters: &DebugCounters) {
- if let Some(unused_expressions) = self.some_unused_expressions.as_ref() {
- for (counter_kind, edge_from_bcb, target_bcb) in unused_expressions {
- let unused_counter_message = if let Some(from_bcb) = edge_from_bcb.as_ref() {
- format!(
- "non-coverage edge counter found without a dependent expression, in \
- {:?}->{:?}; counter={}",
- from_bcb,
- target_bcb,
- debug_counters.format_counter(&counter_kind),
- )
- } else {
- format!(
- "non-coverage counter found without a dependent expression, in {:?}; \
- counter={}",
- target_bcb,
- debug_counters.format_counter(&counter_kind),
- )
- };
-
- if debug_options().allow_unused_expressions {
- debug!("WARNING: {}", unused_counter_message);
- } else {
- bug!("{}", unused_counter_message);
- }
- }
- }
- }
-}
-
-/// Generates the MIR pass `CoverageSpan`-specific spanview dump file.
-pub(super) fn dump_coverage_spanview<'tcx>(
- tcx: TyCtxt<'tcx>,
- mir_body: &mir::Body<'tcx>,
- basic_coverage_blocks: &CoverageGraph,
- pass_name: &str,
- body_span: Span,
- coverage_spans: &[CoverageSpan],
-) {
- let mir_source = mir_body.source;
- let def_id = mir_source.def_id();
-
- let span_viewables = span_viewables(tcx, mir_body, basic_coverage_blocks, &coverage_spans);
- let mut file = create_dump_file(tcx, "html", false, pass_name, &0i32, mir_body)
- .expect("Unexpected error creating MIR spanview HTML file");
- let crate_name = tcx.crate_name(def_id.krate);
- let item_name = tcx.def_path(def_id).to_filename_friendly_no_crate();
- let title = format!("{crate_name}.{item_name} - Coverage Spans");
- spanview::write_document(tcx, body_span, span_viewables, &title, &mut file)
- .expect("Unexpected IO error dumping coverage spans as HTML");
-}
-
-/// Converts the computed `BasicCoverageBlockData`s into `SpanViewable`s.
-fn span_viewables<'tcx>(
- tcx: TyCtxt<'tcx>,
- mir_body: &mir::Body<'tcx>,
- basic_coverage_blocks: &CoverageGraph,
- coverage_spans: &[CoverageSpan],
-) -> Vec<SpanViewable> {
- let mut span_viewables = Vec::new();
- for coverage_span in coverage_spans {
- let tooltip = coverage_span.format_coverage_statements(tcx, mir_body);
- let CoverageSpan { span, bcb, .. } = coverage_span;
- let bcb_data = &basic_coverage_blocks[*bcb];
- let id = bcb_data.id();
- let leader_bb = bcb_data.leader_bb();
- span_viewables.push(SpanViewable { bb: leader_bb, span: *span, id, tooltip });
- }
- span_viewables
-}
-
-/// Generates the MIR pass coverage-specific graphviz dump file.
-pub(super) fn dump_coverage_graphviz<'tcx>(
- tcx: TyCtxt<'tcx>,
- mir_body: &mir::Body<'tcx>,
- pass_name: &str,
- basic_coverage_blocks: &CoverageGraph,
- coverage_counters: &CoverageCounters,
- graphviz_data: &GraphvizData,
- intermediate_expressions: &[BcbCounter],
- debug_used_expressions: &UsedExpressions,
-) {
- let debug_counters = &coverage_counters.debug_counters;
-
- let mir_source = mir_body.source;
- let def_id = mir_source.def_id();
- let node_content = |bcb| {
- bcb_to_string_sections(
- tcx,
- mir_body,
- coverage_counters,
- bcb,
- &basic_coverage_blocks[bcb],
- graphviz_data.get_bcb_coverage_spans_with_counters(bcb),
- graphviz_data.get_bcb_dependency_counters(bcb),
- // intermediate_expressions are injected into the mir::START_BLOCK, so
- // include them in the first BCB.
- if bcb.index() == 0 { Some(&intermediate_expressions) } else { None },
- )
- };
- let edge_labels = |from_bcb| {
- let from_bcb_data = &basic_coverage_blocks[from_bcb];
- let from_terminator = from_bcb_data.terminator(mir_body);
- let mut edge_labels = from_terminator.kind.fmt_successor_labels();
- edge_labels.retain(|label| label != "unreachable");
- let edge_counters = from_terminator
- .successors()
- .map(|successor_bb| graphviz_data.get_edge_counter(from_bcb, successor_bb));
- iter::zip(&edge_labels, edge_counters)
- .map(|(label, some_counter)| {
- if let Some(counter) = some_counter {
- format!("{}\n{}", label, debug_counters.format_counter(counter))
- } else {
- label.to_string()
- }
- })
- .collect::<Vec<_>>()
- };
- let graphviz_name = format!("Cov_{}_{}", def_id.krate.index(), def_id.index.index());
- let mut graphviz_writer =
- GraphvizWriter::new(basic_coverage_blocks, &graphviz_name, node_content, edge_labels);
- let unused_expressions = debug_used_expressions.get_unused_expressions();
- if unused_expressions.len() > 0 {
- graphviz_writer.set_graph_label(&format!(
- "Unused expressions:\n {}",
- unused_expressions
- .as_slice()
- .iter()
- .map(|(counter_kind, edge_from_bcb, target_bcb)| {
- if let Some(from_bcb) = edge_from_bcb.as_ref() {
- format!(
- "{:?}->{:?}: {}",
- from_bcb,
- target_bcb,
- debug_counters.format_counter(&counter_kind),
- )
- } else {
- format!(
- "{:?}: {}",
- target_bcb,
- debug_counters.format_counter(&counter_kind),
- )
- }
- })
- .join("\n ")
- ));
- }
- let mut file = create_dump_file(tcx, "dot", false, pass_name, &0i32, mir_body)
- .expect("Unexpected error creating BasicCoverageBlock graphviz DOT file");
- graphviz_writer
- .write_graphviz(tcx, &mut file)
- .expect("Unexpected error writing BasicCoverageBlock graphviz DOT file");
-}
-
-fn bcb_to_string_sections<'tcx>(
- tcx: TyCtxt<'tcx>,
- mir_body: &mir::Body<'tcx>,
- coverage_counters: &CoverageCounters,
- bcb: BasicCoverageBlock,
- bcb_data: &BasicCoverageBlockData,
- some_coverage_spans_with_counters: Option<&[(CoverageSpan, BcbCounter)]>,
- some_dependency_counters: Option<&[BcbCounter]>,
- some_intermediate_expressions: Option<&[BcbCounter]>,
-) -> Vec<String> {
- let debug_counters = &coverage_counters.debug_counters;
-
- let len = bcb_data.basic_blocks.len();
- let mut sections = Vec::new();
- if let Some(collect_intermediate_expressions) = some_intermediate_expressions {
- sections.push(
- collect_intermediate_expressions
- .iter()
- .map(|expression| {
- format!("Intermediate {}", debug_counters.format_counter(expression))
- })
- .join("\n"),
- );
- }
- if let Some(coverage_spans_with_counters) = some_coverage_spans_with_counters {
- sections.push(
- coverage_spans_with_counters
- .iter()
- .map(|(covspan, counter)| {
- format!(
- "{} at {}",
- debug_counters.format_counter(counter),
- covspan.format(tcx, mir_body)
- )
- })
- .join("\n"),
- );
- }
- if let Some(dependency_counters) = some_dependency_counters {
- sections.push(format!(
- "Non-coverage counters:\n {}",
- dependency_counters
- .iter()
- .map(|counter| debug_counters.format_counter(counter))
- .join(" \n"),
- ));
- }
- if let Some(counter_kind) = coverage_counters.bcb_counter(bcb) {
- sections.push(format!("{counter_kind:?}"));
- }
- let non_term_blocks = bcb_data.basic_blocks[0..len - 1]
- .iter()
- .map(|&bb| format!("{:?}: {}", bb, mir_body[bb].terminator().kind.name()))
- .collect::<Vec<_>>();
- if non_term_blocks.len() > 0 {
- sections.push(non_term_blocks.join("\n"));
- }
- sections.push(format!(
- "{:?}: {}",
- bcb_data.basic_blocks.last().unwrap(),
- bcb_data.terminator(mir_body).kind.name(),
- ));
- sections
-}
diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs
index 59b01ffec..ff2254d69 100644
--- a/compiler/rustc_mir_transform/src/coverage/graph.rs
+++ b/compiler/rustc_mir_transform/src/coverage/graph.rs
@@ -1,4 +1,3 @@
-use itertools::Itertools;
use rustc_data_structures::graph::dominators::{self, Dominators};
use rustc_data_structures::graph::{self, GraphSuccessors, WithNumNodes, WithStartNode};
use rustc_index::bit_set::BitSet;
@@ -8,8 +7,6 @@ use rustc_middle::mir::{self, BasicBlock, BasicBlockData, Terminator, Terminator
use std::cmp::Ordering;
use std::ops::{Index, IndexMut};
-const ID_SEPARATOR: &str = ",";
-
/// A coverage-specific simplification of the MIR control flow graph (CFG). The `CoverageGraph`s
/// nodes are `BasicCoverageBlock`s, which encompass one or more MIR `BasicBlock`s.
#[derive(Debug)]
@@ -116,7 +113,7 @@ impl CoverageGraph {
match term.kind {
TerminatorKind::Return { .. }
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Yield { .. }
| TerminatorKind::SwitchInt { .. } => {
// The `bb` has more than one _outgoing_ edge, or exits the function. Save the
@@ -146,7 +143,7 @@ impl CoverageGraph {
// is as intended. (See Issue #78544 for a possible future option to support
// coverage in test programs that panic.)
TerminatorKind::Goto { .. }
- | TerminatorKind::Resume
+ | TerminatorKind::UnwindResume
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. }
| TerminatorKind::Call { .. }
@@ -199,12 +196,8 @@ impl CoverageGraph {
}
#[inline(always)]
- pub fn rank_partial_cmp(
- &self,
- a: BasicCoverageBlock,
- b: BasicCoverageBlock,
- ) -> Option<Ordering> {
- self.dominators.as_ref().unwrap().rank_partial_cmp(a, b)
+ pub fn cmp_in_dominator_order(&self, a: BasicCoverageBlock, b: BasicCoverageBlock) -> Ordering {
+ self.dominators.as_ref().unwrap().cmp_in_dominator_order(a, b)
}
}
@@ -328,10 +321,6 @@ impl BasicCoverageBlockData {
pub fn terminator<'a, 'tcx>(&self, mir_body: &'a mir::Body<'tcx>) -> &'a Terminator<'tcx> {
&mir_body[self.last_bb()].terminator()
}
-
- pub fn id(&self) -> String {
- format!("@{}", self.basic_blocks.iter().map(|bb| bb.index().to_string()).join(ID_SEPARATOR))
- }
}
/// Represents a successor from a branching BasicCoverageBlock (such as the arms of a `SwitchInt`)
diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs
index 8c9eae508..c75d33eeb 100644
--- a/compiler/rustc_mir_transform/src/coverage/mod.rs
+++ b/compiler/rustc_mir_transform/src/coverage/mod.rs
@@ -1,7 +1,6 @@
pub mod query;
mod counters;
-mod debug;
mod graph;
mod spans;
@@ -20,7 +19,6 @@ use rustc_index::IndexVec;
use rustc_middle::hir;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::mir::coverage::*;
-use rustc_middle::mir::dump_enabled;
use rustc_middle::mir::{
self, BasicBlock, BasicBlockData, Coverage, SourceInfo, Statement, StatementKind, Terminator,
TerminatorKind,
@@ -28,7 +26,7 @@ use rustc_middle::mir::{
use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::DefId;
use rustc_span::source_map::SourceMap;
-use rustc_span::{CharPos, ExpnKind, Pos, SourceFile, Span, Symbol};
+use rustc_span::{ExpnKind, SourceFile, Span, Symbol};
/// A simple error message wrapper for `coverage::Error`s.
#[derive(Debug)]
@@ -94,13 +92,12 @@ impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
}
trace!("InstrumentCoverage starting for {:?}", mir_source.def_id());
- Instrumentor::new(&self.name(), tcx, mir_body).inject_counters();
+ Instrumentor::new(tcx, mir_body).inject_counters();
trace!("InstrumentCoverage done for {:?}", mir_source.def_id());
}
}
struct Instrumentor<'a, 'tcx> {
- pass_name: &'a str,
tcx: TyCtxt<'tcx>,
mir_body: &'a mut mir::Body<'tcx>,
source_file: Lrc<SourceFile>,
@@ -112,7 +109,7 @@ struct Instrumentor<'a, 'tcx> {
}
impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
- fn new(pass_name: &'a str, tcx: TyCtxt<'tcx>, mir_body: &'a mut mir::Body<'tcx>) -> Self {
+ fn new(tcx: TyCtxt<'tcx>, mir_body: &'a mut mir::Body<'tcx>) -> Self {
let source_map = tcx.sess.source_map();
let def_id = mir_body.source.def_id();
let (some_fn_sig, hir_body) = fn_sig_and_body(tcx, def_id);
@@ -141,7 +138,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
let coverage_counters = CoverageCounters::new(&basic_coverage_blocks);
Self {
- pass_name,
tcx,
mir_body,
source_file,
@@ -154,28 +150,9 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
}
fn inject_counters(&'a mut self) {
- let tcx = self.tcx;
- let mir_source = self.mir_body.source;
- let def_id = mir_source.def_id();
let fn_sig_span = self.fn_sig_span;
let body_span = self.body_span;
- let mut graphviz_data = debug::GraphvizData::new();
- let mut debug_used_expressions = debug::UsedExpressions::new();
-
- let dump_mir = dump_enabled(tcx, self.pass_name, def_id);
- let dump_graphviz = dump_mir && tcx.sess.opts.unstable_opts.dump_mir_graphviz;
- let dump_spanview = dump_mir && tcx.sess.opts.unstable_opts.dump_mir_spanview.is_some();
-
- if dump_graphviz {
- graphviz_data.enable();
- self.coverage_counters.enable_debug();
- }
-
- if dump_graphviz || level_enabled!(tracing::Level::DEBUG) {
- debug_used_expressions.enable();
- }
-
////////////////////////////////////////////////////
// Compute `CoverageSpan`s from the `CoverageGraph`.
let coverage_spans = CoverageSpans::generate_coverage_spans(
@@ -185,17 +162,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
&self.basic_coverage_blocks,
);
- if dump_spanview {
- debug::dump_coverage_spanview(
- tcx,
- self.mir_body,
- &self.basic_coverage_blocks,
- self.pass_name,
- body_span,
- &coverage_spans,
- );
- }
-
////////////////////////////////////////////////////
// Create an optimized mix of `Counter`s and `Expression`s for the `CoverageGraph`. Ensure
// every `CoverageSpan` has a `Counter` or `Expression` assigned to its `BasicCoverageBlock`
@@ -209,14 +175,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
.make_bcb_counters(&mut self.basic_coverage_blocks, &coverage_spans);
if let Ok(()) = result {
- // If debugging, add any intermediate expressions (which are not associated with any
- // BCB) to the `debug_used_expressions` map.
- if debug_used_expressions.is_enabled() {
- for intermediate_expression in &self.coverage_counters.intermediate_expressions {
- debug_used_expressions.add_expression_operands(intermediate_expression);
- }
- }
-
////////////////////////////////////////////////////
// Remove the counter or edge counter from of each `CoverageSpan`s associated
// `BasicCoverageBlock`, and inject a `Coverage` statement into the MIR.
@@ -227,11 +185,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
// These `CoverageSpan`-associated counters are removed from their associated
// `BasicCoverageBlock`s so that the only remaining counters in the `CoverageGraph`
// are indirect counters (to be injected next, without associated code regions).
- self.inject_coverage_span_counters(
- coverage_spans,
- &mut graphviz_data,
- &mut debug_used_expressions,
- );
+ self.inject_coverage_span_counters(coverage_spans);
////////////////////////////////////////////////////
// For any remaining `BasicCoverageBlock` counters (that were not associated with
@@ -239,37 +193,17 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
// to ensure `BasicCoverageBlock` counters that other `Expression`s may depend on
// are in fact counted, even though they don't directly contribute to counting
// their own independent code region's coverage.
- self.inject_indirect_counters(&mut graphviz_data, &mut debug_used_expressions);
+ self.inject_indirect_counters();
// Intermediate expressions will be injected as the final step, after generating
// debug output, if any.
////////////////////////////////////////////////////
};
- if graphviz_data.is_enabled() {
- // Even if there was an error, a partial CoverageGraph can still generate a useful
- // graphviz output.
- debug::dump_coverage_graphviz(
- tcx,
- self.mir_body,
- self.pass_name,
- &self.basic_coverage_blocks,
- &self.coverage_counters,
- &graphviz_data,
- &self.coverage_counters.intermediate_expressions,
- &debug_used_expressions,
- );
- }
-
if let Err(e) = result {
bug!("Error processing: {:?}: {:?}", self.mir_body.source.def_id(), e.message)
};
- // Depending on current `debug_options()`, `alert_on_unused_expressions()` could panic, so
- // this check is performed as late as possible, to allow other debug output (logs and dump
- // files), which might be helpful in analyzing unused expressions, to still be generated.
- debug_used_expressions.alert_on_unused_expressions(&self.coverage_counters.debug_counters);
-
////////////////////////////////////////////////////
// Finally, inject the intermediate expressions collected along the way.
for intermediate_expression in &self.coverage_counters.intermediate_expressions {
@@ -285,15 +219,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
/// `bcb` to its `Counter`, when injected. Subsequent `CoverageSpan`s for a BCB that already has
/// a `Counter` will inject an `Expression` instead, and compute its value by adding `ZERO` to
/// the BCB `Counter` value.
- ///
- /// If debugging, add every BCB `Expression` associated with a `CoverageSpan`s to the
- /// `used_expression_operands` map.
- fn inject_coverage_span_counters(
- &mut self,
- coverage_spans: Vec<CoverageSpan>,
- graphviz_data: &mut debug::GraphvizData,
- debug_used_expressions: &mut debug::UsedExpressions,
- ) {
+ fn inject_coverage_span_counters(&mut self, coverage_spans: Vec<CoverageSpan>) {
let tcx = self.tcx;
let source_map = tcx.sess.source_map();
let body_span = self.body_span;
@@ -307,15 +233,12 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
self.coverage_counters.make_identity_counter(counter_operand)
} else if let Some(counter_kind) = self.coverage_counters.take_bcb_counter(bcb) {
bcb_counters[bcb] = Some(counter_kind.as_operand());
- debug_used_expressions.add_expression_operands(&counter_kind);
counter_kind
} else {
bug!("Every BasicCoverageBlock should have a Counter or Expression");
};
- graphviz_data.add_bcb_coverage_span_with_counter(bcb, &covspan, &counter_kind);
- let code_region =
- make_code_region(source_map, file_name, &self.source_file, span, body_span);
+ let code_region = make_code_region(source_map, file_name, span, body_span);
inject_statement(
self.mir_body,
@@ -334,11 +257,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
/// associated with a `CoverageSpan`, should only exist if the counter is an `Expression`
/// dependency (one of the expression operands). Collect them, and inject the additional
/// counters into the MIR, without a reportable coverage span.
- fn inject_indirect_counters(
- &mut self,
- graphviz_data: &mut debug::GraphvizData,
- debug_used_expressions: &mut debug::UsedExpressions,
- ) {
+ fn inject_indirect_counters(&mut self) {
let mut bcb_counters_without_direct_coverage_spans = Vec::new();
for (target_bcb, counter_kind) in self.coverage_counters.drain_bcb_counters() {
bcb_counters_without_direct_coverage_spans.push((None, target_bcb, counter_kind));
@@ -353,19 +272,8 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
));
}
- // If debug is enabled, validate that every BCB or edge counter not directly associated
- // with a coverage span is at least indirectly associated (it is a dependency of a BCB
- // counter that _is_ associated with a coverage span).
- debug_used_expressions.validate(&bcb_counters_without_direct_coverage_spans);
-
for (edge_from_bcb, target_bcb, counter_kind) in bcb_counters_without_direct_coverage_spans
{
- debug_used_expressions.add_unused_expression_if_not_found(
- &counter_kind,
- edge_from_bcb,
- target_bcb,
- );
-
match counter_kind {
BcbCounter::Counter { .. } => {
let inject_to_bb = if let Some(from_bcb) = edge_from_bcb {
@@ -376,26 +284,17 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
let to_bb = self.bcb_leader_bb(target_bcb);
let new_bb = inject_edge_counter_basic_block(self.mir_body, from_bb, to_bb);
- graphviz_data.set_edge_counter(from_bcb, new_bb, &counter_kind);
debug!(
"Edge {:?} (last {:?}) -> {:?} (leader {:?}) requires a new MIR \
- BasicBlock {:?}, for unclaimed edge counter {}",
- edge_from_bcb,
- from_bb,
- target_bcb,
- to_bb,
- new_bb,
- self.format_counter(&counter_kind),
+ BasicBlock {:?}, for unclaimed edge counter {:?}",
+ edge_from_bcb, from_bb, target_bcb, to_bb, new_bb, counter_kind,
);
new_bb
} else {
let target_bb = self.bcb_last_bb(target_bcb);
- graphviz_data.add_bcb_dependency_counter(target_bcb, &counter_kind);
debug!(
- "{:?} ({:?}) gets a new Coverage statement for unclaimed counter {}",
- target_bcb,
- target_bb,
- self.format_counter(&counter_kind),
+ "{:?} ({:?}) gets a new Coverage statement for unclaimed counter {:?}",
+ target_bcb, target_bb, counter_kind,
);
target_bb
};
@@ -430,11 +329,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
&self.basic_coverage_blocks[bcb]
}
- #[inline]
- fn format_counter(&self, counter_kind: &BcbCounter) -> String {
- self.coverage_counters.debug_counters.format_counter(counter_kind)
- }
-
fn make_mir_coverage_kind(&self, counter_kind: &BcbCounter) -> CoverageKind {
match *counter_kind {
BcbCounter::Counter { id } => {
@@ -510,40 +404,36 @@ fn inject_intermediate_expression(mir_body: &mut mir::Body<'_>, expression: Cove
fn make_code_region(
source_map: &SourceMap,
file_name: Symbol,
- source_file: &Lrc<SourceFile>,
span: Span,
body_span: Span,
) -> CodeRegion {
debug!(
- "Called make_code_region(file_name={}, source_file={:?}, span={}, body_span={})",
+ "Called make_code_region(file_name={}, span={}, body_span={})",
file_name,
- source_file,
source_map.span_to_diagnostic_string(span),
source_map.span_to_diagnostic_string(body_span)
);
- let (start_line, mut start_col) = source_file.lookup_file_pos(span.lo());
- let (end_line, end_col) = if span.hi() == span.lo() {
- let (end_line, mut end_col) = (start_line, start_col);
+ let (file, mut start_line, mut start_col, mut end_line, mut end_col) =
+ source_map.span_to_location_info(span);
+ if span.hi() == span.lo() {
// Extend an empty span by one character so the region will be counted.
- let CharPos(char_pos) = start_col;
if span.hi() == body_span.hi() {
- start_col = CharPos(char_pos.saturating_sub(1));
+ start_col = start_col.saturating_sub(1);
} else {
- end_col = CharPos(char_pos + 1);
+ end_col = start_col + 1;
}
- (end_line, end_col)
- } else {
- source_file.lookup_file_pos(span.hi())
};
- let start_line = source_map.doctest_offset_line(&source_file.name, start_line);
- let end_line = source_map.doctest_offset_line(&source_file.name, end_line);
+ if let Some(file) = file {
+ start_line = source_map.doctest_offset_line(&file.name, start_line);
+ end_line = source_map.doctest_offset_line(&file.name, end_line);
+ }
CodeRegion {
file_name,
start_line: start_line as u32,
- start_col: start_col.to_u32() + 1,
+ start_col: start_col as u32,
end_line: end_line as u32,
- end_col: end_col.to_u32() + 1,
+ end_col: end_col as u32,
}
}
diff --git a/compiler/rustc_mir_transform/src/coverage/query.rs b/compiler/rustc_mir_transform/src/coverage/query.rs
index aa205655f..56365c5d4 100644
--- a/compiler/rustc_mir_transform/src/coverage/query.rs
+++ b/compiler/rustc_mir_transform/src/coverage/query.rs
@@ -1,5 +1,6 @@
use super::*;
+use rustc_data_structures::captures::Captures;
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::{self, Body, Coverage, CoverageInfo};
use rustc_middle::query::Providers;
@@ -12,15 +13,10 @@ pub(crate) fn provide(providers: &mut Providers) {
providers.covered_code_regions = |tcx, def_id| covered_code_regions(tcx, def_id);
}
-/// The `num_counters` argument to `llvm.instrprof.increment` is the max counter_id + 1, or in
-/// other words, the number of counter value references injected into the MIR (plus 1 for the
-/// reserved `ZERO` counter, which uses counter ID `0` when included in an expression). Injected
-/// counters have a counter ID from `1..num_counters-1`.
-///
-/// `num_expressions` is the number of counter expressions added to the MIR body.
-///
-/// Both `num_counters` and `num_expressions` are used to initialize new vectors, during backend
-/// code generate, to lookup counters and expressions by simple u32 indexes.
+/// Coverage codegen needs to know the total number of counter IDs and expression IDs that have
+/// been used by a function's coverage mappings. These totals are used to create vectors to hold
+/// the relevant counter and expression data, and the maximum counter ID (+ 1) is also needed by
+/// the `llvm.instrprof.increment` intrinsic.
///
/// MIR optimization may split and duplicate some BasicBlock sequences, or optimize out some code
/// including injected counters. (It is OK if some counters are optimized out, but those counters
@@ -28,71 +24,51 @@ pub(crate) fn provide(providers: &mut Providers) {
/// calls may not work; but computing the number of counters or expressions by adding `1` to the
/// highest ID (for a given instrumented function) is valid.
///
-/// This visitor runs twice, first with `add_missing_operands` set to `false`, to find the maximum
-/// counter ID and maximum expression ID based on their enum variant `id` fields; then, as a
-/// safeguard, with `add_missing_operands` set to `true`, to find any other counter or expression
-/// IDs referenced by expression operands, if not already seen.
-///
-/// Ideally, each operand ID in a MIR `CoverageKind::Expression` will have a separate MIR `Coverage`
-/// statement for the `Counter` or `Expression` with the referenced ID. but since current or future
-/// MIR optimizations can theoretically optimize out segments of a MIR, it may not be possible to
-/// guarantee this, so the second pass ensures the `CoverageInfo` counts include all referenced IDs.
+/// It's possible for a coverage expression to remain in MIR while one or both of its operands
+/// have been optimized away. To avoid problems in codegen, we include those operands' IDs when
+/// determining the maximum counter/expression ID, even if the underlying counter/expression is
+/// no longer present.
struct CoverageVisitor {
- info: CoverageInfo,
- add_missing_operands: bool,
+ max_counter_id: CounterId,
+ max_expression_id: ExpressionId,
}
impl CoverageVisitor {
- /// Updates `num_counters` to the maximum encountered counter ID plus 1.
+ /// Updates `max_counter_id` to the maximum encountered counter ID.
#[inline(always)]
- fn update_num_counters(&mut self, counter_id: CounterId) {
- let counter_id = counter_id.as_u32();
- self.info.num_counters = std::cmp::max(self.info.num_counters, counter_id + 1);
+ fn update_max_counter_id(&mut self, counter_id: CounterId) {
+ self.max_counter_id = self.max_counter_id.max(counter_id);
}
- /// Updates `num_expressions` to the maximum encountered expression ID plus 1.
+ /// Updates `max_expression_id` to the maximum encountered expression ID.
#[inline(always)]
- fn update_num_expressions(&mut self, expression_id: ExpressionId) {
- let expression_id = expression_id.as_u32();
- self.info.num_expressions = std::cmp::max(self.info.num_expressions, expression_id + 1);
+ fn update_max_expression_id(&mut self, expression_id: ExpressionId) {
+ self.max_expression_id = self.max_expression_id.max(expression_id);
}
fn update_from_expression_operand(&mut self, operand: Operand) {
match operand {
- Operand::Counter(id) => self.update_num_counters(id),
- Operand::Expression(id) => self.update_num_expressions(id),
+ Operand::Counter(id) => self.update_max_counter_id(id),
+ Operand::Expression(id) => self.update_max_expression_id(id),
Operand::Zero => {}
}
}
fn visit_body(&mut self, body: &Body<'_>) {
- for bb_data in body.basic_blocks.iter() {
- for statement in bb_data.statements.iter() {
- if let StatementKind::Coverage(box ref coverage) = statement.kind {
- if is_inlined(body, statement) {
- continue;
- }
- self.visit_coverage(coverage);
- }
- }
+ for coverage in all_coverage_in_mir_body(body) {
+ self.visit_coverage(coverage);
}
}
fn visit_coverage(&mut self, coverage: &Coverage) {
- if self.add_missing_operands {
- match coverage.kind {
- CoverageKind::Expression { lhs, rhs, .. } => {
- self.update_from_expression_operand(lhs);
- self.update_from_expression_operand(rhs);
- }
- _ => {}
- }
- } else {
- match coverage.kind {
- CoverageKind::Counter { id, .. } => self.update_num_counters(id),
- CoverageKind::Expression { id, .. } => self.update_num_expressions(id),
- _ => {}
+ match coverage.kind {
+ CoverageKind::Counter { id, .. } => self.update_max_counter_id(id),
+ CoverageKind::Expression { id, lhs, rhs, .. } => {
+ self.update_max_expression_id(id);
+ self.update_from_expression_operand(lhs);
+ self.update_from_expression_operand(rhs);
}
+ CoverageKind::Unreachable => {}
}
}
}
@@ -101,37 +77,40 @@ fn coverageinfo<'tcx>(tcx: TyCtxt<'tcx>, instance_def: ty::InstanceDef<'tcx>) ->
let mir_body = tcx.instance_mir(instance_def);
let mut coverage_visitor = CoverageVisitor {
- info: CoverageInfo { num_counters: 0, num_expressions: 0 },
- add_missing_operands: false,
+ max_counter_id: CounterId::START,
+ max_expression_id: ExpressionId::START,
};
coverage_visitor.visit_body(mir_body);
- coverage_visitor.add_missing_operands = true;
- coverage_visitor.visit_body(mir_body);
-
- coverage_visitor.info
+ // Add 1 to the highest IDs to get the total number of IDs.
+ CoverageInfo {
+ num_counters: (coverage_visitor.max_counter_id + 1).as_u32(),
+ num_expressions: (coverage_visitor.max_expression_id + 1).as_u32(),
+ }
}
fn covered_code_regions(tcx: TyCtxt<'_>, def_id: DefId) -> Vec<&CodeRegion> {
let body = mir_body(tcx, def_id);
- body.basic_blocks
- .iter()
- .flat_map(|data| {
- data.statements.iter().filter_map(|statement| match statement.kind {
- StatementKind::Coverage(box ref coverage) => {
- if is_inlined(body, statement) {
- None
- } else {
- coverage.code_region.as_ref() // may be None
- }
- }
- _ => None,
- })
- })
+ all_coverage_in_mir_body(body)
+ // Not all coverage statements have an attached code region.
+ .filter_map(|coverage| coverage.code_region.as_ref())
.collect()
}
+fn all_coverage_in_mir_body<'a, 'tcx>(
+ body: &'a Body<'tcx>,
+) -> impl Iterator<Item = &'a Coverage> + Captures<'tcx> {
+ body.basic_blocks.iter().flat_map(|bb_data| &bb_data.statements).filter_map(|statement| {
+ match statement.kind {
+ StatementKind::Coverage(box ref coverage) if !is_inlined(body, statement) => {
+ Some(coverage)
+ }
+ _ => None,
+ }
+ })
+}
+
fn is_inlined(body: &Body<'_>, statement: &Statement<'_>) -> bool {
let scope_data = &body.source_scopes[statement.source_info.scope];
scope_data.inlined.is_some() || scope_data.inlined_parent_scope.is_some()
diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs
index deebf5345..ed0e104d6 100644
--- a/compiler/rustc_mir_transform/src/coverage/spans.rs
+++ b/compiler/rustc_mir_transform/src/coverage/spans.rs
@@ -1,18 +1,14 @@
use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB};
-use itertools::Itertools;
use rustc_data_structures::graph::WithNumNodes;
-use rustc_middle::mir::spanview::source_range_no_file;
use rustc_middle::mir::{
self, AggregateKind, BasicBlock, FakeReadCause, Rvalue, Statement, StatementKind, Terminator,
TerminatorKind,
};
-use rustc_middle::ty::TyCtxt;
use rustc_span::source_map::original_sp;
use rustc_span::{BytePos, ExpnKind, MacroKind, Span, Symbol};
use std::cell::OnceCell;
-use std::cmp::Ordering;
#[derive(Debug, Copy, Clone)]
pub(super) enum CoverageStatement {
@@ -21,31 +17,6 @@ pub(super) enum CoverageStatement {
}
impl CoverageStatement {
- pub fn format<'tcx>(&self, tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>) -> String {
- match *self {
- Self::Statement(bb, span, stmt_index) => {
- let stmt = &mir_body[bb].statements[stmt_index];
- format!(
- "{}: @{}[{}]: {:?}",
- source_range_no_file(tcx, span),
- bb.index(),
- stmt_index,
- stmt
- )
- }
- Self::Terminator(bb, span) => {
- let term = mir_body[bb].terminator();
- format!(
- "{}: @{}.{}: {:?}",
- source_range_no_file(tcx, span),
- bb.index(),
- term.kind.name(),
- term.kind
- )
- }
- }
- }
-
pub fn span(&self) -> Span {
match self {
Self::Statement(_, span, _) | Self::Terminator(_, span) => *span,
@@ -151,27 +122,6 @@ impl CoverageSpan {
self.bcb == other.bcb
}
- pub fn format<'tcx>(&self, tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>) -> String {
- format!(
- "{}\n {}",
- source_range_no_file(tcx, self.span),
- self.format_coverage_statements(tcx, mir_body).replace('\n', "\n "),
- )
- }
-
- pub fn format_coverage_statements<'tcx>(
- &self,
- tcx: TyCtxt<'tcx>,
- mir_body: &mir::Body<'tcx>,
- ) -> String {
- let mut sorted_coverage_statements = self.coverage_statements.clone();
- sorted_coverage_statements.sort_unstable_by_key(|covstmt| match *covstmt {
- CoverageStatement::Statement(bb, _, index) => (bb, index),
- CoverageStatement::Terminator(bb, _) => (bb, usize::MAX),
- });
- sorted_coverage_statements.iter().map(|covstmt| covstmt.format(tcx, mir_body)).join("\n")
- }
-
/// If the span is part of a macro, returns the macro name symbol.
pub fn current_macro(&self) -> Option<Symbol> {
self.current_macro_or_none
@@ -333,30 +283,21 @@ impl<'a, 'tcx> CoverageSpans<'a, 'tcx> {
initial_spans.push(CoverageSpan::for_fn_sig(self.fn_sig_span));
- initial_spans.sort_unstable_by(|a, b| {
- if a.span.lo() == b.span.lo() {
- if a.span.hi() == b.span.hi() {
- if a.is_in_same_bcb(b) {
- Some(Ordering::Equal)
- } else {
- // Sort equal spans by dominator relationship (so dominators always come
- // before the dominated equal spans). When later comparing two spans in
- // order, the first will either dominate the second, or they will have no
- // dominator relationship.
- self.basic_coverage_blocks.rank_partial_cmp(a.bcb, b.bcb)
- }
- } else {
- // Sort hi() in reverse order so shorter spans are attempted after longer spans.
- // This guarantees that, if a `prev` span overlaps, and is not equal to, a
- // `curr` span, the prev span either extends further left of the curr span, or
- // they start at the same position and the prev span extends further right of
- // the end of the curr span.
- b.span.hi().partial_cmp(&a.span.hi())
- }
- } else {
- a.span.lo().partial_cmp(&b.span.lo())
- }
- .unwrap()
+ initial_spans.sort_by(|a, b| {
+ // First sort by span start.
+ Ord::cmp(&a.span.lo(), &b.span.lo())
+ // If span starts are the same, sort by span end in reverse order.
+ // This ensures that if spans A and B are adjacent in the list,
+ // and they overlap but are not equal, then either:
+ // - Span A extends further left, or
+ // - Both have the same start and span A extends further right
+ .then_with(|| Ord::cmp(&a.span.hi(), &b.span.hi()).reverse())
+ // If both spans are equal, sort the BCBs in dominator order,
+ // so that dominating BCBs come before other BCBs they dominate.
+ .then_with(|| self.basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb))
+ // If two spans are otherwise identical, put closure spans first,
+ // as this seems to be what the refinement step expects.
+ .then_with(|| Ord::cmp(&a.is_closure, &b.is_closure).reverse())
});
initial_spans
@@ -486,6 +427,12 @@ impl<'a, 'tcx> CoverageSpans<'a, 'tcx> {
let merged_prefix_len = self.curr_original_span.lo() - self.curr().span.lo();
let after_macro_bang =
merged_prefix_len + BytePos(visible_macro.as_str().len() as u32 + 1);
+ if self.curr().span.lo() + after_macro_bang > self.curr().span.hi() {
+ // Something is wrong with the macro name span;
+ // return now to avoid emitting malformed mappings.
+ // FIXME(#117788): Track down why this happens.
+ return;
+ }
let mut macro_name_cov = self.curr().clone();
self.curr_mut().span =
self.curr().span.with_lo(self.curr().span.lo() + after_macro_bang);
@@ -822,7 +769,7 @@ pub(super) fn filtered_statement_span(statement: &Statement<'_>) -> Option<Span>
// and `_1` is the `Place` for `somenum`.
//
// If and when the Issue is resolved, remove this special case match pattern:
- StatementKind::FakeRead(box (cause, _)) if cause == FakeReadCause::ForGuardBinding => None,
+ StatementKind::FakeRead(box (FakeReadCause::ForGuardBinding, _)) => None,
// Retain spans from all other statements
StatementKind::FakeRead(box (_, _)) // Not including `ForGuardBinding`
@@ -867,8 +814,8 @@ pub(super) fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option<Sp
}
// Retain spans from all other terminators
- TerminatorKind::Resume
- | TerminatorKind::Terminate
+ TerminatorKind::UnwindResume
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop
diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
index 8f4dc9f69..7b14fef61 100644
--- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
+++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
@@ -3,17 +3,19 @@
//! Currently, this pass only propagates scalar values.
use rustc_const_eval::const_eval::CheckAlignment;
-use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
+use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def::DefKind;
-use rustc_middle::mir::visit::{MutVisitor, Visitor};
+use rustc_middle::mir::interpret::{AllocId, ConstAllocation, InterpResult, Scalar};
+use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_mir_dataflow::value_analysis::{
- Map, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace,
+ Map, PlaceIndex, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace,
};
use rustc_mir_dataflow::{lattice::FlatSet, Analysis, Results, ResultsVisitor};
+use rustc_span::def_id::DefId;
use rustc_span::DUMMY_SP;
use rustc_target::abi::{Align, FieldIdx, VariantIdx};
@@ -50,7 +52,7 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp {
let place_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None };
// Decide which places to track during the analysis.
- let map = Map::from_filter(tcx, body, Ty::is_scalar, place_limit);
+ let map = Map::new(tcx, body, place_limit);
// Perform the actual dataflow analysis.
let analysis = ConstAnalysis::new(tcx, body, map);
@@ -58,9 +60,10 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp {
.in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
// Collect results and patch the body afterwards.
- let mut visitor = CollectAndPatch::new(tcx);
+ let mut visitor = Collector::new(tcx, &body.local_decls);
debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
- debug_span!("patch").in_scope(|| visitor.visit_body(body));
+ let mut patch = visitor.patch;
+ debug_span!("patch").in_scope(|| patch.visit_body_preserves_cfg(body));
}
}
@@ -73,7 +76,7 @@ struct ConstAnalysis<'a, 'tcx> {
}
impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
- type Value = FlatSet<ScalarTy<'tcx>>;
+ type Value = FlatSet<Scalar>;
const NAME: &'static str = "ConstAnalysis";
@@ -107,6 +110,18 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
state: &mut State<Self::Value>,
) {
match rvalue {
+ Rvalue::Use(operand) => {
+ state.flood(target.as_ref(), self.map());
+ if let Some(target) = self.map.find(target.as_ref()) {
+ self.assign_operand(state, target, operand);
+ }
+ }
+ Rvalue::CopyForDeref(rhs) => {
+ state.flood(target.as_ref(), self.map());
+ if let Some(target) = self.map.find(target.as_ref()) {
+ self.assign_operand(state, target, &Operand::Copy(*rhs));
+ }
+ }
Rvalue::Aggregate(kind, operands) => {
// If we assign `target = Enum::Variant#0(operand)`,
// we must make sure that all `target as Variant#i` are `Top`.
@@ -134,8 +149,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
variant_target_idx,
TrackElem::Field(FieldIdx::from_usize(field_index)),
) {
- let result = self.handle_operand(operand, state);
- state.insert_idx(field, result, self.map());
+ self.assign_operand(state, field, operand);
}
}
}
@@ -172,9 +186,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
if let Some(overflow_target) = overflow_target {
let overflow = match overflow {
FlatSet::Top => FlatSet::Top,
- FlatSet::Elem(overflow) => {
- self.wrap_scalar(Scalar::from_bool(overflow), self.tcx.types.bool)
- }
+ FlatSet::Elem(overflow) => FlatSet::Elem(Scalar::from_bool(overflow)),
FlatSet::Bottom => FlatSet::Bottom,
};
// We have flooded `target` earlier.
@@ -182,6 +194,23 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
}
}
}
+ Rvalue::Cast(
+ CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize),
+ operand,
+ _,
+ ) => {
+ let pointer = self.handle_operand(operand, state);
+ state.assign(target.as_ref(), pointer, self.map());
+
+ if let Some(target_len) = self.map().find_len(target.as_ref())
+ && let operand_ty = operand.ty(self.local_decls, self.tcx)
+ && let Some(operand_ty) = operand_ty.builtin_deref(true)
+ && let ty::Array(_, len) = operand_ty.ty.kind()
+ && let Some(len) = Const::Ty(*len).try_eval_scalar_int(self.tcx, self.param_env)
+ {
+ state.insert_value_idx(target_len, FlatSet::Elem(len.into()), self.map());
+ }
+ }
_ => self.super_assign(target, rvalue, state),
}
}
@@ -191,60 +220,94 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
rvalue: &Rvalue<'tcx>,
state: &mut State<Self::Value>,
) -> ValueOrPlace<Self::Value> {
- match rvalue {
- Rvalue::Cast(
- kind @ (CastKind::IntToInt
- | CastKind::FloatToInt
- | CastKind::FloatToFloat
- | CastKind::IntToFloat),
- operand,
- ty,
- ) => match self.eval_operand(operand, state) {
- FlatSet::Elem(op) => match kind {
- CastKind::IntToInt | CastKind::IntToFloat => {
- self.ecx.int_to_int_or_float(&op, *ty)
- }
- CastKind::FloatToInt | CastKind::FloatToFloat => {
- self.ecx.float_to_float_or_int(&op, *ty)
- }
- _ => unreachable!(),
+ let val = match rvalue {
+ Rvalue::Len(place) => {
+ let place_ty = place.ty(self.local_decls, self.tcx);
+ if let ty::Array(_, len) = place_ty.ty.kind() {
+ Const::Ty(*len)
+ .try_eval_scalar(self.tcx, self.param_env)
+ .map_or(FlatSet::Top, FlatSet::Elem)
+ } else if let [ProjectionElem::Deref] = place.projection[..] {
+ state.get_len(place.local.into(), self.map())
+ } else {
+ FlatSet::Top
}
- .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
- .unwrap_or(ValueOrPlace::TOP),
- _ => ValueOrPlace::TOP,
- },
+ }
+ Rvalue::Cast(CastKind::IntToInt | CastKind::IntToFloat, operand, ty) => {
+ let Ok(layout) = self.tcx.layout_of(self.param_env.and(*ty)) else {
+ return ValueOrPlace::Value(FlatSet::Top);
+ };
+ match self.eval_operand(operand, state) {
+ FlatSet::Elem(op) => self
+ .ecx
+ .int_to_int_or_float(&op, layout)
+ .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
+ FlatSet::Bottom => FlatSet::Bottom,
+ FlatSet::Top => FlatSet::Top,
+ }
+ }
+ Rvalue::Cast(CastKind::FloatToInt | CastKind::FloatToFloat, operand, ty) => {
+ let Ok(layout) = self.tcx.layout_of(self.param_env.and(*ty)) else {
+ return ValueOrPlace::Value(FlatSet::Top);
+ };
+ match self.eval_operand(operand, state) {
+ FlatSet::Elem(op) => self
+ .ecx
+ .float_to_float_or_int(&op, layout)
+ .map_or(FlatSet::Top, |result| self.wrap_immediate(*result)),
+ FlatSet::Bottom => FlatSet::Bottom,
+ FlatSet::Top => FlatSet::Top,
+ }
+ }
+ Rvalue::Cast(CastKind::Transmute, operand, _) => {
+ match self.eval_operand(operand, state) {
+ FlatSet::Elem(op) => self.wrap_immediate(*op),
+ FlatSet::Bottom => FlatSet::Bottom,
+ FlatSet::Top => FlatSet::Top,
+ }
+ }
Rvalue::BinaryOp(op, box (left, right)) => {
// Overflows must be ignored here.
let (val, _overflow) = self.binary_op(state, *op, left, right);
- ValueOrPlace::Value(val)
+ val
}
Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
FlatSet::Elem(value) => self
.ecx
- .unary_op(*op, &value)
- .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
- .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
- FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
- FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
+ .wrapping_unary_op(*op, &value)
+ .map_or(FlatSet::Top, |val| self.wrap_immediate(*val)),
+ FlatSet::Bottom => FlatSet::Bottom,
+ FlatSet::Top => FlatSet::Top,
},
- Rvalue::Discriminant(place) => {
- ValueOrPlace::Value(state.get_discr(place.as_ref(), self.map()))
+ Rvalue::NullaryOp(null_op, ty) => {
+ let Ok(layout) = self.tcx.layout_of(self.param_env.and(*ty)) else {
+ return ValueOrPlace::Value(FlatSet::Top);
+ };
+ let val = match null_op {
+ NullOp::SizeOf if layout.is_sized() => layout.size.bytes(),
+ NullOp::AlignOf if layout.is_sized() => layout.align.abi.bytes(),
+ NullOp::OffsetOf(fields) => layout
+ .offset_of_subfield(&self.ecx, fields.iter().map(|f| f.index()))
+ .bytes(),
+ _ => return ValueOrPlace::Value(FlatSet::Top),
+ };
+ FlatSet::Elem(Scalar::from_target_usize(val, &self.tcx))
}
- _ => self.super_rvalue(rvalue, state),
- }
+ Rvalue::Discriminant(place) => state.get_discr(place.as_ref(), self.map()),
+ _ => return self.super_rvalue(rvalue, state),
+ };
+ ValueOrPlace::Value(val)
}
fn handle_constant(
&self,
- constant: &Constant<'tcx>,
+ constant: &ConstOperand<'tcx>,
_state: &mut State<Self::Value>,
) -> Self::Value {
constant
- .literal
- .eval(self.tcx, self.param_env)
- .try_to_scalar()
- .map(|value| FlatSet::Elem(ScalarTy(value, constant.ty())))
- .unwrap_or(FlatSet::Top)
+ .const_
+ .try_eval_scalar(self.tcx, self.param_env)
+ .map_or(FlatSet::Top, FlatSet::Elem)
}
fn handle_switch_int<'mir>(
@@ -261,9 +324,8 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
// We are branching on uninitialized data, this is UB, treat it as unreachable.
// This allows the set of visited edges to grow monotonically with the lattice.
FlatSet::Bottom => TerminatorEdges::None,
- FlatSet::Elem(ScalarTy(scalar, _)) => {
- let int = scalar.assert_int();
- let choice = int.assert_bits(int.size());
+ FlatSet::Elem(scalar) => {
+ let choice = scalar.assert_bits(scalar.size());
TerminatorEdges::Single(targets.target_for_value(choice))
}
FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
@@ -271,16 +333,6 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
}
}
-#[derive(Clone, PartialEq, Eq)]
-struct ScalarTy<'tcx>(Scalar, Ty<'tcx>);
-
-impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- // This is used for dataflow visualization, so we return something more concise.
- std::fmt::Display::fmt(&ConstantKind::Val(ConstValue::Scalar(self.0), self.1), f)
- }
-}
-
impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map) -> Self {
let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id());
@@ -293,34 +345,146 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
}
}
+ /// The caller must have flooded `place`.
+ fn assign_operand(
+ &self,
+ state: &mut State<FlatSet<Scalar>>,
+ place: PlaceIndex,
+ operand: &Operand<'tcx>,
+ ) {
+ match operand {
+ Operand::Copy(rhs) | Operand::Move(rhs) => {
+ if let Some(rhs) = self.map.find(rhs.as_ref()) {
+ state.insert_place_idx(place, rhs, &self.map);
+ } else if rhs.projection.first() == Some(&PlaceElem::Deref)
+ && let FlatSet::Elem(pointer) = state.get(rhs.local.into(), &self.map)
+ && let rhs_ty = self.local_decls[rhs.local].ty
+ && let Ok(rhs_layout) = self.tcx.layout_of(self.param_env.and(rhs_ty))
+ {
+ let op = ImmTy::from_scalar(pointer, rhs_layout).into();
+ self.assign_constant(state, place, op, &rhs.projection);
+ }
+ }
+ Operand::Constant(box constant) => {
+ if let Ok(constant) = self.ecx.eval_mir_constant(&constant.const_, None, None) {
+ self.assign_constant(state, place, constant, &[]);
+ }
+ }
+ }
+ }
+
+ /// The caller must have flooded `place`.
+ ///
+ /// Perform: `place = operand.projection`.
+ #[instrument(level = "trace", skip(self, state))]
+ fn assign_constant(
+ &self,
+ state: &mut State<FlatSet<Scalar>>,
+ place: PlaceIndex,
+ mut operand: OpTy<'tcx>,
+ projection: &[PlaceElem<'tcx>],
+ ) -> Option<!> {
+ for &(mut proj_elem) in projection {
+ if let PlaceElem::Index(index) = proj_elem {
+ if let FlatSet::Elem(index) = state.get(index.into(), &self.map)
+ && let Ok(offset) = index.to_target_usize(&self.tcx)
+ && let Some(min_length) = offset.checked_add(1)
+ {
+ proj_elem = PlaceElem::ConstantIndex { offset, min_length, from_end: false };
+ } else {
+ return None;
+ }
+ }
+ operand = self.ecx.project(&operand, proj_elem).ok()?;
+ }
+
+ self.map.for_each_projection_value(
+ place,
+ operand,
+ &mut |elem, op| match elem {
+ TrackElem::Field(idx) => self.ecx.project_field(op, idx.as_usize()).ok(),
+ TrackElem::Variant(idx) => self.ecx.project_downcast(op, idx).ok(),
+ TrackElem::Discriminant => {
+ let variant = self.ecx.read_discriminant(op).ok()?;
+ let discr_value = self.ecx.discriminant_for_variant(op.layout, variant).ok()?;
+ Some(discr_value.into())
+ }
+ TrackElem::DerefLen => {
+ let op: OpTy<'_> = self.ecx.deref_pointer(op).ok()?.into();
+ let len_usize = op.len(&self.ecx).ok()?;
+ let layout =
+ self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).unwrap();
+ Some(ImmTy::from_uint(len_usize, layout).into())
+ }
+ },
+ &mut |place, op| {
+ if let Ok(imm) = self.ecx.read_immediate_raw(op)
+ && let Some(imm) = imm.right()
+ {
+ let elem = self.wrap_immediate(*imm);
+ state.insert_value_idx(place, elem, &self.map);
+ }
+ },
+ );
+
+ None
+ }
+
fn binary_op(
&self,
- state: &mut State<FlatSet<ScalarTy<'tcx>>>,
+ state: &mut State<FlatSet<Scalar>>,
op: BinOp,
left: &Operand<'tcx>,
right: &Operand<'tcx>,
- ) -> (FlatSet<ScalarTy<'tcx>>, FlatSet<bool>) {
+ ) -> (FlatSet<Scalar>, FlatSet<bool>) {
let left = self.eval_operand(left, state);
let right = self.eval_operand(right, state);
+
match (left, right) {
+ (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
+ // Both sides are known, do the actual computation.
(FlatSet::Elem(left), FlatSet::Elem(right)) => {
match self.ecx.overflowing_binary_op(op, &left, &right) {
- Ok((val, overflow, ty)) => (self.wrap_scalar(val, ty), FlatSet::Elem(overflow)),
+ Ok((val, overflow)) => {
+ (FlatSet::Elem(val.to_scalar()), FlatSet::Elem(overflow))
+ }
_ => (FlatSet::Top, FlatSet::Top),
}
}
- (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
- (_, _) => {
- // Could attempt some algebraic simplifications here.
- (FlatSet::Top, FlatSet::Top)
+ // Exactly one side is known, attempt some algebraic simplifications.
+ (FlatSet::Elem(const_arg), _) | (_, FlatSet::Elem(const_arg)) => {
+ let layout = const_arg.layout;
+ if !matches!(layout.abi, rustc_target::abi::Abi::Scalar(..)) {
+ return (FlatSet::Top, FlatSet::Top);
+ }
+
+ let arg_scalar = const_arg.to_scalar();
+ let Ok(arg_value) = arg_scalar.to_bits(layout.size) else {
+ return (FlatSet::Top, FlatSet::Top);
+ };
+
+ match op {
+ BinOp::BitAnd if arg_value == 0 => (FlatSet::Elem(arg_scalar), FlatSet::Bottom),
+ BinOp::BitOr
+ if arg_value == layout.size.truncate(u128::MAX)
+ || (layout.ty.is_bool() && arg_value == 1) =>
+ {
+ (FlatSet::Elem(arg_scalar), FlatSet::Bottom)
+ }
+ BinOp::Mul if layout.ty.is_integral() && arg_value == 0 => {
+ (FlatSet::Elem(arg_scalar), FlatSet::Elem(false))
+ }
+ _ => (FlatSet::Top, FlatSet::Top),
+ }
}
+ (FlatSet::Top, FlatSet::Top) => (FlatSet::Top, FlatSet::Top),
}
}
fn eval_operand(
&self,
op: &Operand<'tcx>,
- state: &mut State<FlatSet<ScalarTy<'tcx>>>,
+ state: &mut State<FlatSet<Scalar>>,
) -> FlatSet<ImmTy<'tcx>> {
let value = match self.handle_operand(op, state) {
ValueOrPlace::Value(value) => value,
@@ -328,80 +492,89 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
};
match value {
FlatSet::Top => FlatSet::Top,
- FlatSet::Elem(ScalarTy(scalar, ty)) => self
- .tcx
- .layout_of(self.param_env.and(ty))
- .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
- .unwrap_or(FlatSet::Top),
+ FlatSet::Elem(scalar) => {
+ let ty = op.ty(self.local_decls, self.tcx);
+ self.tcx.layout_of(self.param_env.and(ty)).map_or(FlatSet::Top, |layout| {
+ FlatSet::Elem(ImmTy::from_scalar(scalar.into(), layout))
+ })
+ }
FlatSet::Bottom => FlatSet::Bottom,
}
}
- fn eval_discriminant(
- &self,
- enum_ty: Ty<'tcx>,
- variant_index: VariantIdx,
- ) -> Option<ScalarTy<'tcx>> {
+ fn eval_discriminant(&self, enum_ty: Ty<'tcx>, variant_index: VariantIdx) -> Option<Scalar> {
if !enum_ty.is_enum() {
return None;
}
- let discr = enum_ty.discriminant_for_variant(self.tcx, variant_index)?;
- let discr_layout = self.tcx.layout_of(self.param_env.and(discr.ty)).ok()?;
- let discr_value = Scalar::try_from_uint(discr.val, discr_layout.size)?;
- Some(ScalarTy(discr_value, discr.ty))
- }
-
- fn wrap_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
- FlatSet::Elem(ScalarTy(scalar, ty))
+ let enum_ty_layout = self.tcx.layout_of(self.param_env.and(enum_ty)).ok()?;
+ let discr_value = self.ecx.discriminant_for_variant(enum_ty_layout, variant_index).ok()?;
+ Some(discr_value.to_scalar())
}
- fn wrap_immediate(&self, imm: Immediate, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
+ fn wrap_immediate(&self, imm: Immediate) -> FlatSet<Scalar> {
match imm {
- Immediate::Scalar(scalar) => self.wrap_scalar(scalar, ty),
+ Immediate::Scalar(scalar) => FlatSet::Elem(scalar),
+ Immediate::Uninit => FlatSet::Bottom,
_ => FlatSet::Top,
}
}
-
- fn wrap_immty(&self, val: ImmTy<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
- self.wrap_immediate(*val, val.layout.ty)
- }
}
-struct CollectAndPatch<'tcx> {
+pub(crate) struct Patch<'tcx> {
tcx: TyCtxt<'tcx>,
/// For a given MIR location, this stores the values of the operands used by that location. In
/// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
/// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
- before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
+ pub(crate) before_effect: FxHashMap<(Location, Place<'tcx>), Const<'tcx>>,
/// Stores the assigned values for assignments where the Rvalue is constant.
- assignments: FxHashMap<Location, ScalarTy<'tcx>>,
+ pub(crate) assignments: FxHashMap<Location, Const<'tcx>>,
}
-impl<'tcx> CollectAndPatch<'tcx> {
- fn new(tcx: TyCtxt<'tcx>) -> Self {
+impl<'tcx> Patch<'tcx> {
+ pub(crate) fn new(tcx: TyCtxt<'tcx>) -> Self {
Self { tcx, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
}
- fn make_operand(&self, scalar: ScalarTy<'tcx>) -> Operand<'tcx> {
- Operand::Constant(Box::new(Constant {
- span: DUMMY_SP,
- user_ty: None,
- literal: ConstantKind::Val(ConstValue::Scalar(scalar.0), scalar.1),
- }))
+ fn make_operand(&self, const_: Const<'tcx>) -> Operand<'tcx> {
+ Operand::Constant(Box::new(ConstOperand { span: DUMMY_SP, user_ty: None, const_ }))
+ }
+}
+
+struct Collector<'tcx, 'locals> {
+ patch: Patch<'tcx>,
+ local_decls: &'locals LocalDecls<'tcx>,
+}
+
+impl<'tcx, 'locals> Collector<'tcx, 'locals> {
+ pub(crate) fn new(tcx: TyCtxt<'tcx>, local_decls: &'locals LocalDecls<'tcx>) -> Self {
+ Self { patch: Patch::new(tcx), local_decls }
+ }
+
+ fn try_make_constant(
+ &self,
+ place: Place<'tcx>,
+ state: &State<FlatSet<Scalar>>,
+ map: &Map,
+ ) -> Option<Const<'tcx>> {
+ let FlatSet::Elem(Scalar::Int(value)) = state.get(place.as_ref(), &map) else {
+ return None;
+ };
+ let ty = place.ty(self.local_decls, self.patch.tcx).ty;
+ Some(Const::Val(ConstValue::Scalar(value.into()), ty))
}
}
impl<'mir, 'tcx>
ResultsVisitor<'mir, 'tcx, Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>>
- for CollectAndPatch<'tcx>
+ for Collector<'tcx, '_>
{
- type FlowState = State<FlatSet<ScalarTy<'tcx>>>;
+ type FlowState = State<FlatSet<Scalar>>;
fn visit_statement_before_primary_effect(
&mut self,
- results: &Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
+ results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
state: &Self::FlowState,
statement: &'mir Statement<'tcx>,
location: Location,
@@ -417,7 +590,7 @@ impl<'mir, 'tcx>
fn visit_statement_after_primary_effect(
&mut self,
- results: &Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
+ results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
state: &Self::FlowState,
statement: &'mir Statement<'tcx>,
location: Location,
@@ -427,14 +600,8 @@ impl<'mir, 'tcx>
// Don't overwrite the assignment if it already uses a constant (to keep the span).
}
StatementKind::Assign(box (place, _)) => {
- match state.get(place.as_ref(), &results.analysis.0.map) {
- FlatSet::Top => (),
- FlatSet::Elem(value) => {
- self.assignments.insert(location, value);
- }
- FlatSet::Bottom => {
- // This assignment is either unreachable, or an uninitialized value is assigned.
- }
+ if let Some(value) = self.try_make_constant(place, state, &results.analysis.0.map) {
+ self.patch.assignments.insert(location, value);
}
}
_ => (),
@@ -443,7 +610,7 @@ impl<'mir, 'tcx>
fn visit_terminator_before_primary_effect(
&mut self,
- results: &Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
+ results: &mut Results<'tcx, ValueAnalysisWrapper<ConstAnalysis<'_, 'tcx>>>,
state: &Self::FlowState,
terminator: &'mir Terminator<'tcx>,
location: Location,
@@ -453,8 +620,8 @@ impl<'mir, 'tcx>
}
}
-impl<'tcx> MutVisitor<'tcx> for CollectAndPatch<'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
+impl<'tcx> MutVisitor<'tcx> for Patch<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -462,7 +629,7 @@ impl<'tcx> MutVisitor<'tcx> for CollectAndPatch<'tcx> {
if let Some(value) = self.assignments.get(&location) {
match &mut statement.kind {
StatementKind::Assign(box (_, rvalue)) => {
- *rvalue = Rvalue::Use(self.make_operand(value.clone()));
+ *rvalue = Rvalue::Use(self.make_operand(*value));
}
_ => bug!("found assignment info for non-assign statement"),
}
@@ -475,33 +642,61 @@ impl<'tcx> MutVisitor<'tcx> for CollectAndPatch<'tcx> {
match operand {
Operand::Copy(place) | Operand::Move(place) => {
if let Some(value) = self.before_effect.get(&(location, *place)) {
- *operand = self.make_operand(value.clone());
+ *operand = self.make_operand(*value);
+ } else if !place.projection.is_empty() {
+ self.super_operand(operand, location)
}
}
- _ => (),
+ Operand::Constant(_) => {}
+ }
+ }
+
+ fn process_projection_elem(
+ &mut self,
+ elem: PlaceElem<'tcx>,
+ location: Location,
+ ) -> Option<PlaceElem<'tcx>> {
+ if let PlaceElem::Index(local) = elem {
+ let offset = self.before_effect.get(&(location, local.into()))?;
+ let offset = offset.try_to_scalar()?;
+ let offset = offset.to_target_usize(&self.tcx).ok()?;
+ let min_length = offset.checked_add(1)?;
+ Some(PlaceElem::ConstantIndex { offset, min_length, from_end: false })
+ } else {
+ None
}
}
}
-struct OperandCollector<'tcx, 'map, 'a> {
- state: &'a State<FlatSet<ScalarTy<'tcx>>>,
- visitor: &'a mut CollectAndPatch<'tcx>,
+struct OperandCollector<'tcx, 'map, 'locals, 'a> {
+ state: &'a State<FlatSet<Scalar>>,
+ visitor: &'a mut Collector<'tcx, 'locals>,
map: &'map Map,
}
-impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
+impl<'tcx> Visitor<'tcx> for OperandCollector<'tcx, '_, '_, '_> {
+ fn visit_projection_elem(
+ &mut self,
+ _: PlaceRef<'tcx>,
+ elem: PlaceElem<'tcx>,
+ _: PlaceContext,
+ location: Location,
+ ) {
+ if let PlaceElem::Index(local) = elem
+ && let Some(value) = self.visitor.try_make_constant(local.into(), self.state, self.map)
+ {
+ self.visitor.patch.before_effect.insert((location, local.into()), value);
+ }
+ }
+
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
- match operand {
- Operand::Copy(place) | Operand::Move(place) => {
- match self.state.get(place.as_ref(), self.map) {
- FlatSet::Top => (),
- FlatSet::Elem(value) => {
- self.visitor.before_effect.insert((location, *place), value);
- }
- FlatSet::Bottom => (),
- }
+ if let Some(place) = operand.place() {
+ if let Some(value) = self.visitor.try_make_constant(place, self.state, self.map) {
+ self.visitor.patch.before_effect.insert((location, place), value);
+ } else if !place.projection.is_empty() {
+ // Try to propagate into `Index` projections.
+ self.super_operand(operand, location)
}
- _ => (),
}
}
}
@@ -513,8 +708,11 @@ impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for Dumm
type MemoryKind = !;
const PANIC_ON_ALLOC_FAIL: bool = true;
+ #[inline(always)]
fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment {
- unimplemented!()
+ // We do not check for alignment to avoid having to carry an `Align`
+ // in `ConstValue::ByRef`.
+ CheckAlignment::No
}
fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>, _layout: TyAndLayout<'tcx>) -> bool {
@@ -529,6 +727,27 @@ impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for Dumm
unimplemented!()
}
+ fn before_access_global(
+ _tcx: TyCtxt<'tcx>,
+ _machine: &Self,
+ _alloc_id: AllocId,
+ alloc: ConstAllocation<'tcx>,
+ _static_def_id: Option<DefId>,
+ is_write: bool,
+ ) -> InterpResult<'tcx> {
+ if is_write {
+ crate::const_prop::throw_machine_stop_str!("can't write to global");
+ }
+
+ // If the static allocation is mutable, then we can't const prop it as its content
+ // might be different at runtime.
+ if alloc.inner().mutability.is_mut() {
+ crate::const_prop::throw_machine_stop_str!("can't access mutable globals in ConstProp");
+ }
+
+ Ok(())
+ }
+
fn find_mir_or_eval_fn(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
@@ -541,6 +760,13 @@ impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for Dumm
unimplemented!()
}
+ fn panic_nounwind(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _msg: &str,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
fn call_intrinsic(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
@@ -565,8 +791,8 @@ impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for Dumm
_bin_op: BinOp,
_left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
_right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
- ) -> interpret::InterpResult<'tcx, (interpret::Scalar<Self::Provenance>, bool, Ty<'tcx>)> {
- throw_unsup!(Unsupported("".into()))
+ ) -> interpret::InterpResult<'tcx, (ImmTy<'tcx, Self::Provenance>, bool)> {
+ crate::const_prop::throw_machine_stop_str!("can't do pointer arithmetic");
}
fn expose_ptr(
@@ -590,7 +816,8 @@ impl<'mir, 'tcx: 'mir> rustc_const_eval::interpret::Machine<'mir, 'tcx> for Dumm
_ecx: &'a InterpCx<'mir, 'tcx, Self>,
) -> &'a [rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
{
- unimplemented!()
+ // Return an empty stack instead of panicking, as `cur_span` uses it to evaluate constants.
+ &[]
}
fn stack_mut<'a>(
diff --git a/compiler/rustc_mir_transform/src/dead_store_elimination.rs b/compiler/rustc_mir_transform/src/dead_store_elimination.rs
index 3f988930b..ef1410504 100644
--- a/compiler/rustc_mir_transform/src/dead_store_elimination.rs
+++ b/compiler/rustc_mir_transform/src/dead_store_elimination.rs
@@ -12,6 +12,7 @@
//! will still not cause any further changes.
//!
+use crate::util::is_within_packed;
use rustc_index::bit_set::BitSet;
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*;
@@ -49,6 +50,11 @@ pub fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, borrowed: &BitS
&& !place.is_indirect()
&& !borrowed.contains(place.local)
&& !state.contains(place.local)
+ // If `place` is a projection of a disaligned field in a packed ADT,
+ // the move may be codegened as a pointer to that field.
+ // Using that disaligned pointer may trigger UB in the callee,
+ // so do nothing.
+ && is_within_packed(tcx, body, place).is_none()
{
call_operands_to_move.push((bb, index));
}
diff --git a/compiler/rustc_mir_transform/src/deduplicate_blocks.rs b/compiler/rustc_mir_transform/src/deduplicate_blocks.rs
index 909116a77..666293cbc 100644
--- a/compiler/rustc_mir_transform/src/deduplicate_blocks.rs
+++ b/compiler/rustc_mir_transform/src/deduplicate_blocks.rs
@@ -150,7 +150,7 @@ fn rvalue_hash<H: Hasher>(hasher: &mut H, rvalue: &Rvalue<'_>) {
fn operand_hash<H: Hasher>(hasher: &mut H, operand: &Operand<'_>) {
match operand {
- Operand::Constant(box Constant { user_ty: _, literal, span: _ }) => literal.hash(hasher),
+ Operand::Constant(box ConstOperand { user_ty: _, const_, span: _ }) => const_.hash(hasher),
x => x.hash(hasher),
};
}
@@ -179,9 +179,9 @@ fn rvalue_eq<'tcx>(lhs: &Rvalue<'tcx>, rhs: &Rvalue<'tcx>) -> bool {
fn operand_eq<'tcx>(lhs: &Operand<'tcx>, rhs: &Operand<'tcx>) -> bool {
let res = match (lhs, rhs) {
(
- Operand::Constant(box Constant { user_ty: _, literal, span: _ }),
- Operand::Constant(box Constant { user_ty: _, literal: literal2, span: _ }),
- ) => literal == literal2,
+ Operand::Constant(box ConstOperand { user_ty: _, const_, span: _ }),
+ Operand::Constant(box ConstOperand { user_ty: _, const_: const2, span: _ }),
+ ) => const_ == const2,
(x, y) => x == y,
};
debug!("operand_eq lhs: `{:?}` rhs: `{:?}` result: {:?}", lhs, rhs, res);
diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs
index b73b72c31..d9a132e5c 100644
--- a/compiler/rustc_mir_transform/src/dest_prop.rs
+++ b/compiler/rustc_mir_transform/src/dest_prop.rs
@@ -647,8 +647,8 @@ impl WriteInfo {
}
}
TerminatorKind::Goto { .. }
- | TerminatorKind::Resume
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindResume
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable { .. } => (),
TerminatorKind::Drop { .. } => {
diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs
index b6b1ae6d3..b62d7da2a 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs
@@ -170,6 +170,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, '_, 'tcx> {
self.ctxt.param_env()
}
+ #[instrument(level = "debug", skip(self), ret)]
fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
let ((maybe_live, maybe_dead), multipart) = match mode {
DropFlagMode::Shallow => (self.ctxt.init_data.maybe_live_dead(path), false),
@@ -362,8 +363,13 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
UnwindAction::Unreachable => {
Unwind::To(self.patch.unreachable_cleanup_block())
}
- UnwindAction::Terminate => {
- Unwind::To(self.patch.terminate_block())
+ UnwindAction::Terminate(reason) => {
+ debug_assert_ne!(
+ reason,
+ UnwindTerminateReason::InCleanup,
+ "we are not in a cleanup block, InCleanup reason should be impossible"
+ );
+ Unwind::To(self.patch.terminate_block(reason))
}
}
};
@@ -397,10 +403,10 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
}
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
- Rvalue::Use(Operand::Constant(Box::new(Constant {
+ Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
span,
user_ty: None,
- literal: ConstantKind::from_bool(self.tcx, val),
+ const_: Const::from_bool(self.tcx, val),
})))
}
@@ -470,7 +476,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
// drop elaboration should handle that by itself
continue;
}
- TerminatorKind::Resume => {
+ TerminatorKind::UnwindResume => {
// It is possible for `Resume` to be patched
// (in particular it can be patched to be replaced with
// a Goto; see `MirPatch::new`).
@@ -496,7 +502,8 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
if let TerminatorKind::Call {
destination,
target: Some(_),
- unwind: UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate,
+ unwind:
+ UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate(_),
..
} = data.terminator().kind
{
diff --git a/compiler/rustc_mir_transform/src/errors.rs b/compiler/rustc_mir_transform/src/errors.rs
index 4b796d79e..5879a8039 100644
--- a/compiler/rustc_mir_transform/src/errors.rs
+++ b/compiler/rustc_mir_transform/src/errors.rs
@@ -4,7 +4,9 @@ use rustc_errors::{
};
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
use rustc_middle::mir::{AssertKind, UnsafetyViolationDetails};
+use rustc_middle::ty::TyCtxt;
use rustc_session::lint::{self, Lint};
+use rustc_span::def_id::DefId;
use rustc_span::Span;
#[derive(LintDiagnostic)]
@@ -237,20 +239,38 @@ pub(crate) struct FnItemRef {
pub ident: String,
}
-#[derive(LintDiagnostic)]
-#[diag(mir_transform_must_not_suspend)]
-pub(crate) struct MustNotSupend<'a> {
- #[label]
+pub(crate) struct MustNotSupend<'tcx, 'a> {
+ pub tcx: TyCtxt<'tcx>,
pub yield_sp: Span,
- #[subdiagnostic]
pub reason: Option<MustNotSuspendReason>,
- #[help]
pub src_sp: Span,
pub pre: &'a str,
- pub def_path: String,
+ pub def_id: DefId,
pub post: &'a str,
}
+// Needed for def_path_str
+impl<'a> DecorateLint<'a, ()> for MustNotSupend<'_, '_> {
+ fn decorate_lint<'b>(
+ self,
+ diag: &'b mut rustc_errors::DiagnosticBuilder<'a, ()>,
+ ) -> &'b mut rustc_errors::DiagnosticBuilder<'a, ()> {
+ diag.span_label(self.yield_sp, crate::fluent_generated::_subdiag::label);
+ if let Some(reason) = self.reason {
+ diag.subdiagnostic(reason);
+ }
+ diag.span_help(self.src_sp, crate::fluent_generated::_subdiag::help);
+ diag.set_arg("pre", self.pre);
+ diag.set_arg("def_path", self.tcx.def_path_str(self.def_id));
+ diag.set_arg("post", self.post);
+ diag
+ }
+
+ fn msg(&self) -> rustc_errors::DiagnosticMessage {
+ crate::fluent_generated::mir_transform_must_not_suspend
+ }
+}
+
#[derive(Subdiagnostic)]
#[note(mir_transform_note)]
pub(crate) struct MustNotSuspendReason {
@@ -258,10 +278,3 @@ pub(crate) struct MustNotSuspendReason {
pub span: Span,
pub reason: String,
}
-
-#[derive(Diagnostic)]
-#[diag(mir_transform_simd_shuffle_last_const)]
-pub(crate) struct SimdShuffleLastConst {
- #[primary_span]
- pub span: Span,
-}
diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs
index ff4822f33..e261b8ac2 100644
--- a/compiler/rustc_mir_transform/src/generator.rs
+++ b/compiler/rustc_mir_transform/src/generator.rs
@@ -583,6 +583,14 @@ struct LivenessInfo {
storage_liveness: IndexVec<BasicBlock, Option<BitSet<Local>>>,
}
+/// Computes which locals have to be stored in the state-machine for the
+/// given coroutine.
+///
+/// The basic idea is as follows:
+/// - a local is live until we encounter a `StorageDead` statement. In
+/// case none exist, the local is considered to be always live.
+/// - a local has to be stored if it is either directly used after the
+/// the suspend point, or if it is live and has been previously borrowed.
fn locals_live_across_suspend_points<'tcx>(
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
@@ -814,7 +822,7 @@ impl<'mir, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx, R>
fn visit_statement_before_primary_effect(
&mut self,
- _results: &R,
+ _results: &mut R,
state: &Self::FlowState,
_statement: &'mir Statement<'tcx>,
loc: Location,
@@ -824,7 +832,7 @@ impl<'mir, 'tcx, R> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx, R>
fn visit_terminator_before_primary_effect(
&mut self,
- _results: &R,
+ _results: &mut R,
state: &Self::FlowState,
_terminator: &'mir Terminator<'tcx>,
loc: Location,
@@ -853,60 +861,7 @@ impl StorageConflictVisitor<'_, '_, '_> {
}
}
-/// Validates the typeck view of the generator against the actual set of types saved between
-/// yield points.
-fn sanitize_witness<'tcx>(
- tcx: TyCtxt<'tcx>,
- body: &Body<'tcx>,
- witness: Ty<'tcx>,
- upvars: &'tcx ty::List<Ty<'tcx>>,
- layout: &GeneratorLayout<'tcx>,
-) {
- let did = body.source.def_id();
- let param_env = tcx.param_env(did);
-
- let allowed_upvars = tcx.normalize_erasing_regions(param_env, upvars);
- let allowed = match witness.kind() {
- &ty::GeneratorWitness(interior_tys) => {
- tcx.normalize_erasing_late_bound_regions(param_env, interior_tys)
- }
- _ => {
- tcx.sess.delay_span_bug(
- body.span,
- format!("unexpected generator witness type {:?}", witness.kind()),
- );
- return;
- }
- };
-
- let mut mismatches = Vec::new();
- for fty in &layout.field_tys {
- if fty.ignore_for_traits {
- continue;
- }
- let decl_ty = tcx.normalize_erasing_regions(param_env, fty.ty);
-
- // Sanity check that typeck knows about the type of locals which are
- // live across a suspension point
- if !allowed.contains(&decl_ty) && !allowed_upvars.contains(&decl_ty) {
- mismatches.push(decl_ty);
- }
- }
-
- if !mismatches.is_empty() {
- span_bug!(
- body.span,
- "Broken MIR: generator contains type {:?} in MIR, \
- but typeck only knows about {} and {:?}",
- mismatches,
- allowed,
- allowed_upvars
- );
- }
-}
-
fn compute_layout<'tcx>(
- tcx: TyCtxt<'tcx>,
liveness: LivenessInfo,
body: &Body<'tcx>,
) -> (
@@ -932,27 +887,20 @@ fn compute_layout<'tcx>(
let decl = &body.local_decls[local];
debug!(?decl);
- let ignore_for_traits = if tcx.sess.opts.unstable_opts.drop_tracking_mir {
- // Do not `assert_crate_local` here, as post-borrowck cleanup may have already cleared
- // the information. This is alright, since `ignore_for_traits` is only relevant when
- // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer
- // default.
- match decl.local_info {
- // Do not include raw pointers created from accessing `static` items, as those could
- // well be re-created by another access to the same static.
- ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => {
- !is_thread_local
- }
- // Fake borrows are only read by fake reads, so do not have any reality in
- // post-analysis MIR.
- ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true,
- _ => false,
+ // Do not `assert_crate_local` here, as post-borrowck cleanup may have already cleared
+ // the information. This is alright, since `ignore_for_traits` is only relevant when
+ // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer
+ // default.
+ let ignore_for_traits = match decl.local_info {
+ // Do not include raw pointers created from accessing `static` items, as those could
+ // well be re-created by another access to the same static.
+ ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => {
+ !is_thread_local
}
- } else {
- // FIXME(#105084) HIR-based drop tracking does not account for all the temporaries that
- // MIR building may introduce. This leads to wrongly ignored types, but this is
- // necessary for internal consistency and to avoid ICEs.
- decl.internal
+ // Fake borrows are only read by fake reads, so do not have any reality in
+ // post-analysis MIR.
+ ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true,
+ _ => false,
};
let decl =
GeneratorSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits };
@@ -1091,7 +1039,7 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
UnwindAction::Cleanup(tgt) => tgt,
UnwindAction::Continue => elaborator.patch.resume_block(),
UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(),
- UnwindAction::Terminate => elaborator.patch.terminate_block(),
+ UnwindAction::Terminate(reason) => elaborator.patch.terminate_block(reason),
})
};
elaborate_drop(
@@ -1189,10 +1137,10 @@ fn insert_panic_block<'tcx>(
) -> BasicBlock {
let assert_block = BasicBlock::new(body.basic_blocks.len());
let term = TerminatorKind::Assert {
- cond: Operand::Constant(Box::new(Constant {
+ cond: Operand::Constant(Box::new(ConstOperand {
span: body.span,
user_ty: None,
- literal: ConstantKind::from_bool(tcx, false),
+ const_: Const::from_bool(tcx, false),
})),
expected: true,
msg: Box::new(message),
@@ -1239,7 +1187,7 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
// These never unwind.
TerminatorKind::Goto { .. }
| TerminatorKind::SwitchInt { .. }
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::GeneratorDrop
@@ -1248,7 +1196,7 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
// Resume will *continue* unwinding, but if there's no other unwinding terminator it
// will never be reached.
- TerminatorKind::Resume => {}
+ TerminatorKind::UnwindResume => {}
TerminatorKind::Yield { .. } => {
unreachable!("`can_unwind` called before generator transform")
@@ -1279,14 +1227,14 @@ fn create_generator_resume_function<'tcx>(
let source_info = SourceInfo::outermost(body.span);
let poison_block = body.basic_blocks_mut().push(BasicBlockData {
statements: vec![transform.set_discr(VariantIdx::new(POISONED), source_info)],
- terminator: Some(Terminator { source_info, kind: TerminatorKind::Resume }),
+ terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }),
is_cleanup: true,
});
for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
let source_info = block.terminator().source_info;
- if let TerminatorKind::Resume = block.terminator().kind {
+ if let TerminatorKind::UnwindResume = block.terminator().kind {
// An existing `Resume` terminator is redirected to jump to our dedicated
// "poisoning block" above.
if idx != poison_block {
@@ -1445,8 +1393,6 @@ pub(crate) fn mir_generator_witnesses<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: LocalDefId,
) -> Option<GeneratorLayout<'tcx>> {
- assert!(tcx.sess.opts.unstable_opts.drop_tracking_mir);
-
let (body, _) = tcx.mir_promoted(def_id);
let body = body.borrow();
let body = &*body;
@@ -1454,22 +1400,21 @@ pub(crate) fn mir_generator_witnesses<'tcx>(
// The first argument is the generator type passed by value
let gen_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
- // Get the interior types and args which typeck computed
let movable = match *gen_ty.kind() {
ty::Generator(_, _, movability) => movability == hir::Movability::Movable,
ty::Error(_) => return None,
_ => span_bug!(body.span, "unexpected generator type {}", gen_ty),
};
- // When first entering the generator, move the resume argument into its new local.
- let always_live_locals = always_storage_live_locals(&body);
+ // The witness simply contains all locals live across suspend points.
+ let always_live_locals = always_storage_live_locals(&body);
let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
// Extract locals which are live across suspension point into `layout`
// `remap` gives a mapping from local indices onto generator struct indices
// `storage_liveness` tells us which locals have live storage at suspension points
- let (_, generator_layout, _) = compute_layout(tcx, liveness_info, body);
+ let (_, generator_layout, _) = compute_layout(liveness_info, body);
check_suspend_tys(tcx, &generator_layout, &body);
@@ -1489,15 +1434,10 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
let gen_ty = body.local_decls.raw[1].ty;
// Get the discriminant type and args which typeck computed
- let (discr_ty, upvars, interior, movable) = match *gen_ty.kind() {
+ let (discr_ty, movable) = match *gen_ty.kind() {
ty::Generator(_, args, movability) => {
let args = args.as_generator();
- (
- args.discr_ty(tcx),
- args.upvar_tys(),
- args.witness(),
- movability == hir::Movability::Movable,
- )
+ (args.discr_ty(tcx), movability == hir::Movability::Movable)
}
_ => {
tcx.sess.delay_span_bug(body.span, format!("unexpected generator type {gen_ty}"));
@@ -1574,13 +1514,7 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
// Extract locals which are live across suspension point into `layout`
// `remap` gives a mapping from local indices onto generator struct indices
// `storage_liveness` tells us which locals have live storage at suspension points
- let (remap, layout, storage_liveness) = compute_layout(tcx, liveness_info, body);
-
- if tcx.sess.opts.unstable_opts.validate_mir
- && !tcx.sess.opts.unstable_opts.drop_tracking_mir
- {
- sanitize_witness(tcx, body, interior, upvars, &layout);
- }
+ let (remap, layout, storage_liveness) = compute_layout(liveness_info, body);
let can_return = can_return(tcx, body, tcx.param_env(body.source.def_id()));
@@ -1758,8 +1692,8 @@ impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> {
TerminatorKind::Call { .. }
| TerminatorKind::Goto { .. }
| TerminatorKind::SwitchInt { .. }
- | TerminatorKind::Resume
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindResume
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Drop { .. }
@@ -1954,11 +1888,12 @@ fn check_must_not_suspend_def(
hir_id,
data.source_span,
errors::MustNotSupend {
+ tcx,
yield_sp: data.yield_span,
reason,
src_sp: data.source_span,
pre: data.descr_pre,
- def_path: tcx.def_path_str(def_id),
+ def_id,
post: data.descr_post,
},
);
diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs
new file mode 100644
index 000000000..56bdc5a17
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/gvn.rs
@@ -0,0 +1,539 @@
+//! Global value numbering.
+//!
+//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
+//! such redundancies and re-use the already-computed result when possible.
+//!
+//! In a first pass, we compute a symbolic representation of values that are assigned to SSA
+//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
+//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
+//!
+//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
+//! values, the locals in which they are stored, and a the assignment location.
+//!
+//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
+//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
+//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
+//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
+//! to `x`, we replace the assignment by `x = y`.
+//!
+//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
+//!
+//! # Operational semantic
+//!
+//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
+//! ```ignore (MIR)
+//! _a = some value // has VnIndex i
+//! // some MIR
+//! _b = some other value // also has VnIndex i
+//! ```
+//!
+//! We consider it to be replacable by:
+//! ```ignore (MIR)
+//! _a = some value // has VnIndex i
+//! // some MIR
+//! _c = some other value // also has VnIndex i
+//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
+//! _b = _a // follows from the `assume`
+//! ```
+//!
+//! Which is simplifiable to:
+//! ```ignore (MIR)
+//! _a = some value // has VnIndex i
+//! // some MIR
+//! _b = _a
+//! ```
+//!
+//! # Handling of references
+//!
+//! We handle references by assigning a different "provenance" index to each Ref/AddressOf rvalue.
+//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
+//! consider all the derefs of an immutable reference to a freeze type to give the same value:
+//! ```ignore (MIR)
+//! _a = *_b // _b is &Freeze
+//! _c = *_b // replaced by _c = _a
+//! ```
+
+use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
+use rustc_data_structures::graph::dominators::Dominators;
+use rustc_index::bit_set::BitSet;
+use rustc_index::IndexVec;
+use rustc_macros::newtype_index;
+use rustc_middle::mir::visit::*;
+use rustc_middle::mir::*;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_target::abi::{VariantIdx, FIRST_VARIANT};
+
+use crate::ssa::SsaLocals;
+use crate::MirPass;
+
+pub struct GVN;
+
+impl<'tcx> MirPass<'tcx> for GVN {
+ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
+ sess.mir_opt_level() >= 4
+ }
+
+ #[instrument(level = "trace", skip(self, tcx, body))]
+ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ debug!(def_id = ?body.source.def_id());
+ propagate_ssa(tcx, body);
+ }
+}
+
+fn propagate_ssa<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id());
+ let ssa = SsaLocals::new(body);
+ // Clone dominators as we need them while mutating the body.
+ let dominators = body.basic_blocks.dominators().clone();
+
+ let mut state = VnState::new(tcx, param_env, &ssa, &dominators, &body.local_decls);
+ for arg in body.args_iter() {
+ if ssa.is_ssa(arg) {
+ let value = state.new_opaque().unwrap();
+ state.assign(arg, value);
+ }
+ }
+
+ ssa.for_each_assignment_mut(&mut body.basic_blocks, |local, rvalue, location| {
+ let value = state.simplify_rvalue(rvalue, location).or_else(|| state.new_opaque()).unwrap();
+ // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark `local` as
+ // reusable if we have an exact type match.
+ if state.local_decls[local].ty == rvalue.ty(state.local_decls, tcx) {
+ state.assign(local, value);
+ }
+ });
+
+ // Stop creating opaques during replacement as it is useless.
+ state.next_opaque = None;
+
+ let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
+ for bb in reverse_postorder {
+ let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
+ state.visit_basic_block_data(bb, data);
+ }
+ let any_replacement = state.any_replacement;
+
+ // For each local that is reused (`y` above), we remove its storage statements do avoid any
+ // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
+ // statements.
+ StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
+
+ if any_replacement {
+ crate::simplify::remove_unused_definitions(body);
+ }
+}
+
+newtype_index! {
+ struct VnIndex {}
+}
+
+#[derive(Debug, PartialEq, Eq, Hash)]
+enum Value<'tcx> {
+ // Root values.
+ /// Used to represent values we know nothing about.
+ /// The `usize` is a counter incremented by `new_opaque`.
+ Opaque(usize),
+ /// Evaluated or unevaluated constant value.
+ Constant(Const<'tcx>),
+ /// An aggregate value, either tuple/closure/struct/enum.
+ /// This does not contain unions, as we cannot reason with the value.
+ Aggregate(Ty<'tcx>, VariantIdx, Vec<VnIndex>),
+ /// This corresponds to a `[value; count]` expression.
+ Repeat(VnIndex, ty::Const<'tcx>),
+ /// The address of a place.
+ Address {
+ place: Place<'tcx>,
+ /// Give each borrow and pointer a different provenance, so we don't merge them.
+ provenance: usize,
+ },
+
+ // Extractions.
+ /// This is the *value* obtained by projecting another value.
+ Projection(VnIndex, ProjectionElem<VnIndex, Ty<'tcx>>),
+ /// Discriminant of the given value.
+ Discriminant(VnIndex),
+ /// Length of an array or slice.
+ Len(VnIndex),
+
+ // Operations.
+ NullaryOp(NullOp<'tcx>, Ty<'tcx>),
+ UnaryOp(UnOp, VnIndex),
+ BinaryOp(BinOp, VnIndex, VnIndex),
+ CheckedBinaryOp(BinOp, VnIndex, VnIndex),
+ Cast {
+ kind: CastKind,
+ value: VnIndex,
+ from: Ty<'tcx>,
+ to: Ty<'tcx>,
+ },
+}
+
+struct VnState<'body, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ local_decls: &'body LocalDecls<'tcx>,
+ /// Value stored in each local.
+ locals: IndexVec<Local, Option<VnIndex>>,
+ /// First local to be assigned that value.
+ rev_locals: FxHashMap<VnIndex, Vec<Local>>,
+ values: FxIndexSet<Value<'tcx>>,
+ /// Counter to generate different values.
+ /// This is an option to stop creating opaques during replacement.
+ next_opaque: Option<usize>,
+ ssa: &'body SsaLocals,
+ dominators: &'body Dominators<BasicBlock>,
+ reused_locals: BitSet<Local>,
+ any_replacement: bool,
+}
+
+impl<'body, 'tcx> VnState<'body, 'tcx> {
+ fn new(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ssa: &'body SsaLocals,
+ dominators: &'body Dominators<BasicBlock>,
+ local_decls: &'body LocalDecls<'tcx>,
+ ) -> Self {
+ VnState {
+ tcx,
+ param_env,
+ local_decls,
+ locals: IndexVec::from_elem(None, local_decls),
+ rev_locals: FxHashMap::default(),
+ values: FxIndexSet::default(),
+ next_opaque: Some(0),
+ ssa,
+ dominators,
+ reused_locals: BitSet::new_empty(local_decls.len()),
+ any_replacement: false,
+ }
+ }
+
+ #[instrument(level = "trace", skip(self), ret)]
+ fn insert(&mut self, value: Value<'tcx>) -> VnIndex {
+ let (index, _) = self.values.insert_full(value);
+ VnIndex::from_usize(index)
+ }
+
+ /// Create a new `Value` for which we have no information at all, except that it is distinct
+ /// from all the others.
+ #[instrument(level = "trace", skip(self), ret)]
+ fn new_opaque(&mut self) -> Option<VnIndex> {
+ let next_opaque = self.next_opaque.as_mut()?;
+ let value = Value::Opaque(*next_opaque);
+ *next_opaque += 1;
+ Some(self.insert(value))
+ }
+
+ /// Create a new `Value::Address` distinct from all the others.
+ #[instrument(level = "trace", skip(self), ret)]
+ fn new_pointer(&mut self, place: Place<'tcx>) -> Option<VnIndex> {
+ let next_opaque = self.next_opaque.as_mut()?;
+ let value = Value::Address { place, provenance: *next_opaque };
+ *next_opaque += 1;
+ Some(self.insert(value))
+ }
+
+ fn get(&self, index: VnIndex) -> &Value<'tcx> {
+ self.values.get_index(index.as_usize()).unwrap()
+ }
+
+ /// Record that `local` is assigned `value`. `local` must be SSA.
+ #[instrument(level = "trace", skip(self))]
+ fn assign(&mut self, local: Local, value: VnIndex) {
+ self.locals[local] = Some(value);
+
+ // Only register the value if its type is `Sized`, as we will emit copies of it.
+ let is_sized = !self.tcx.features().unsized_locals
+ || self.local_decls[local].ty.is_sized(self.tcx, self.param_env);
+ if is_sized {
+ self.rev_locals.entry(value).or_default().push(local);
+ }
+ }
+
+ /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
+ /// place with the same value (if that already exists).
+ #[instrument(level = "trace", skip(self), ret)]
+ fn simplify_place_value(
+ &mut self,
+ place: &mut Place<'tcx>,
+ location: Location,
+ ) -> Option<VnIndex> {
+ // Invariant: `place` and `place_ref` point to the same value, even if they point to
+ // different memory locations.
+ let mut place_ref = place.as_ref();
+
+ // Invariant: `value` holds the value up-to the `index`th projection excluded.
+ let mut value = self.locals[place.local]?;
+ for (index, proj) in place.projection.iter().enumerate() {
+ if let Some(local) = self.try_as_local(value, location) {
+ // Both `local` and `Place { local: place.local, projection: projection[..index] }`
+ // hold the same value. Therefore, following place holds the value in the original
+ // `place`.
+ place_ref = PlaceRef { local, projection: &place.projection[index..] };
+ }
+
+ let proj = match proj {
+ ProjectionElem::Deref => {
+ let ty = Place::ty_from(
+ place.local,
+ &place.projection[..index],
+ self.local_decls,
+ self.tcx,
+ )
+ .ty;
+ if let Some(Mutability::Not) = ty.ref_mutability()
+ && let Some(pointee_ty) = ty.builtin_deref(true)
+ && pointee_ty.ty.is_freeze(self.tcx, self.param_env)
+ {
+ // An immutable borrow `_x` always points to the same value for the
+ // lifetime of the borrow, so we can merge all instances of `*_x`.
+ ProjectionElem::Deref
+ } else {
+ return None;
+ }
+ }
+ ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty),
+ ProjectionElem::Index(idx) => {
+ let idx = self.locals[idx]?;
+ ProjectionElem::Index(idx)
+ }
+ ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
+ ProjectionElem::ConstantIndex { offset, min_length, from_end }
+ }
+ ProjectionElem::Subslice { from, to, from_end } => {
+ ProjectionElem::Subslice { from, to, from_end }
+ }
+ ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
+ ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
+ ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
+ };
+ value = self.insert(Value::Projection(value, proj));
+ }
+
+ if let Some(local) = self.try_as_local(value, location)
+ && local != place.local // in case we had no projection to begin with.
+ {
+ *place = local.into();
+ self.reused_locals.insert(local);
+ self.any_replacement = true;
+ } else if place_ref.local != place.local
+ || place_ref.projection.len() < place.projection.len()
+ {
+ // By the invariant on `place_ref`.
+ *place = place_ref.project_deeper(&[], self.tcx);
+ self.reused_locals.insert(place_ref.local);
+ self.any_replacement = true;
+ }
+
+ Some(value)
+ }
+
+ #[instrument(level = "trace", skip(self), ret)]
+ fn simplify_operand(
+ &mut self,
+ operand: &mut Operand<'tcx>,
+ location: Location,
+ ) -> Option<VnIndex> {
+ match *operand {
+ Operand::Constant(ref constant) => Some(self.insert(Value::Constant(constant.const_))),
+ Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
+ let value = self.simplify_place_value(place, location)?;
+ if let Some(const_) = self.try_as_constant(value) {
+ *operand = Operand::Constant(Box::new(const_));
+ self.any_replacement = true;
+ }
+ Some(value)
+ }
+ }
+ }
+
+ #[instrument(level = "trace", skip(self), ret)]
+ fn simplify_rvalue(
+ &mut self,
+ rvalue: &mut Rvalue<'tcx>,
+ location: Location,
+ ) -> Option<VnIndex> {
+ let value = match *rvalue {
+ // Forward values.
+ Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
+ Rvalue::CopyForDeref(place) => {
+ let mut operand = Operand::Copy(place);
+ let val = self.simplify_operand(&mut operand, location);
+ *rvalue = Rvalue::Use(operand);
+ return val;
+ }
+
+ // Roots.
+ Rvalue::Repeat(ref mut op, amount) => {
+ let op = self.simplify_operand(op, location)?;
+ Value::Repeat(op, amount)
+ }
+ Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
+ Rvalue::Aggregate(box ref kind, ref mut fields) => {
+ let variant_index = match *kind {
+ AggregateKind::Array(..)
+ | AggregateKind::Tuple
+ | AggregateKind::Closure(..)
+ | AggregateKind::Generator(..) => FIRST_VARIANT,
+ AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
+ // Do not track unions.
+ AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
+ };
+ let fields: Option<Vec<_>> = fields
+ .iter_mut()
+ .map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque()))
+ .collect();
+ let ty = rvalue.ty(self.local_decls, self.tcx);
+ Value::Aggregate(ty, variant_index, fields?)
+ }
+ Rvalue::Ref(.., place) | Rvalue::AddressOf(_, place) => return self.new_pointer(place),
+
+ // Operations.
+ Rvalue::Len(ref mut place) => {
+ let place = self.simplify_place_value(place, location)?;
+ Value::Len(place)
+ }
+ Rvalue::Cast(kind, ref mut value, to) => {
+ let from = value.ty(self.local_decls, self.tcx);
+ let value = self.simplify_operand(value, location)?;
+ Value::Cast { kind, value, from, to }
+ }
+ Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
+ let lhs = self.simplify_operand(lhs, location);
+ let rhs = self.simplify_operand(rhs, location);
+ Value::BinaryOp(op, lhs?, rhs?)
+ }
+ Rvalue::CheckedBinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
+ let lhs = self.simplify_operand(lhs, location);
+ let rhs = self.simplify_operand(rhs, location);
+ Value::CheckedBinaryOp(op, lhs?, rhs?)
+ }
+ Rvalue::UnaryOp(op, ref mut arg) => {
+ let arg = self.simplify_operand(arg, location)?;
+ Value::UnaryOp(op, arg)
+ }
+ Rvalue::Discriminant(ref mut place) => {
+ let place = self.simplify_place_value(place, location)?;
+ Value::Discriminant(place)
+ }
+
+ // Unsupported values.
+ Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
+ };
+ debug!(?value);
+ Some(self.insert(value))
+ }
+}
+
+impl<'tcx> VnState<'_, 'tcx> {
+ /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
+ fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
+ if let Value::Constant(const_) = *self.get(index) {
+ // Some constants may contain pointers. We need to preserve the provenance of these
+ // pointers, but not all constants guarantee this:
+ // - valtrees purposefully do not;
+ // - ConstValue::Slice does not either.
+ match const_ {
+ Const::Ty(c) => match c.kind() {
+ ty::ConstKind::Value(valtree) => match valtree {
+ // This is just an integer, keep it.
+ ty::ValTree::Leaf(_) => {}
+ ty::ValTree::Branch(_) => return None,
+ },
+ ty::ConstKind::Param(..)
+ | ty::ConstKind::Unevaluated(..)
+ | ty::ConstKind::Expr(..) => {}
+ // Should not appear in runtime MIR.
+ ty::ConstKind::Infer(..)
+ | ty::ConstKind::Bound(..)
+ | ty::ConstKind::Placeholder(..)
+ | ty::ConstKind::Error(..) => bug!(),
+ },
+ Const::Unevaluated(..) => {}
+ // If the same slice appears twice in the MIR, we cannot guarantee that we will
+ // give the same `AllocId` to the data.
+ Const::Val(ConstValue::Slice { .. }, _) => return None,
+ Const::Val(
+ ConstValue::ZeroSized | ConstValue::Scalar(_) | ConstValue::Indirect { .. },
+ _,
+ ) => {}
+ }
+ Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_ })
+ } else {
+ None
+ }
+ }
+
+ /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
+ /// return it.
+ fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
+ let other = self.rev_locals.get(&index)?;
+ other
+ .iter()
+ .copied()
+ .find(|&other| self.ssa.assignment_dominates(self.dominators, other, loc))
+ }
+}
+
+impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+ self.simplify_operand(operand, location);
+ }
+
+ fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
+ self.super_statement(stmt, location);
+ if let StatementKind::Assign(box (_, ref mut rvalue)) = stmt.kind
+ // Do not try to simplify a constant, it's already in canonical shape.
+ && !matches!(rvalue, Rvalue::Use(Operand::Constant(_)))
+ && let Some(value) = self.simplify_rvalue(rvalue, location)
+ {
+ if let Some(const_) = self.try_as_constant(value) {
+ *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
+ self.any_replacement = true;
+ } else if let Some(local) = self.try_as_local(value, location)
+ && *rvalue != Rvalue::Use(Operand::Move(local.into()))
+ {
+ *rvalue = Rvalue::Use(Operand::Copy(local.into()));
+ self.reused_locals.insert(local);
+ self.any_replacement = true;
+ }
+ }
+ }
+}
+
+struct StorageRemover<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ reused_locals: BitSet<Local>,
+}
+
+impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
+ if let Operand::Move(place) = *operand
+ && let Some(local) = place.as_local()
+ && self.reused_locals.contains(local)
+ {
+ *operand = Operand::Copy(place);
+ }
+ }
+
+ fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
+ match stmt.kind {
+ // When removing storage statements, we need to remove both (#107511).
+ StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
+ if self.reused_locals.contains(l) =>
+ {
+ stmt.make_nop()
+ }
+ _ => self.super_statement(stmt, loc),
+ }
+ }
+}
diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs
index fc9e18378..b53e0852c 100644
--- a/compiler/rustc_mir_transform/src/inline.rs
+++ b/compiler/rustc_mir_transform/src/inline.rs
@@ -193,7 +193,7 @@ impl<'tcx> Inliner<'tcx> {
return Err("optimization fuel exhausted");
}
- let Ok(callee_body) = callsite.callee.try_subst_mir_and_normalize_erasing_regions(
+ let Ok(callee_body) = callsite.callee.try_instantiate_mir_and_normalize_erasing_regions(
self.tcx,
self.param_env,
ty::EarlyBinder::bind(callee_body.clone()),
@@ -218,7 +218,13 @@ impl<'tcx> Inliner<'tcx> {
// Normally, this shouldn't be required, but trait normalization failure can create a
// validation ICE.
let output_type = callee_body.return_ty();
- if !util::is_subtype(self.tcx, self.param_env, output_type, destination_ty) {
+ if !util::relate_types(
+ self.tcx,
+ self.param_env,
+ ty::Variance::Covariant,
+ output_type,
+ destination_ty,
+ ) {
trace!(?output_type, ?destination_ty);
return Err("failed to normalize return type");
}
@@ -248,7 +254,13 @@ impl<'tcx> Inliner<'tcx> {
self_arg_ty.into_iter().chain(arg_tuple_tys).zip(callee_body.args_iter())
{
let input_type = callee_body.local_decls[input].ty;
- if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {
+ if !util::relate_types(
+ self.tcx,
+ self.param_env,
+ ty::Variance::Covariant,
+ input_type,
+ arg_ty,
+ ) {
trace!(?arg_ty, ?input_type);
return Err("failed to normalize tuple argument type");
}
@@ -257,7 +269,13 @@ impl<'tcx> Inliner<'tcx> {
for (arg, input) in args.iter().zip(callee_body.args_iter()) {
let input_type = callee_body.local_decls[input].ty;
let arg_ty = arg.ty(&caller_body.local_decls, self.tcx);
- if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {
+ if !util::relate_types(
+ self.tcx,
+ self.param_env,
+ ty::Variance::Covariant,
+ input_type,
+ arg_ty,
+ ) {
trace!(?arg_ty, ?input_type);
return Err("failed to normalize argument type");
}
@@ -388,14 +406,16 @@ impl<'tcx> Inliner<'tcx> {
return Err("never inline hint");
}
- // Only inline local functions if they would be eligible for cross-crate
- // inlining. This is to ensure that the final crate doesn't have MIR that
- // reference unexported symbols
- if callsite.callee.def_id().is_local() {
- let is_generic = callsite.callee.args.non_erasable_generics().next().is_some();
- if !is_generic && !callee_attrs.requests_inline() {
- return Err("not exported");
- }
+ // Reachability pass defines which functions are eligible for inlining. Generally inlining
+ // other functions is incorrect because they could reference symbols that aren't exported.
+ let is_generic = callsite
+ .callee
+ .args
+ .non_erasable_generics(self.tcx, callsite.callee.def_id())
+ .next()
+ .is_some();
+ if !is_generic && !callee_attrs.requests_inline() {
+ return Err("not exported");
}
if callsite.fn_sig.c_variadic() {
@@ -479,9 +499,10 @@ impl<'tcx> Inliner<'tcx> {
work_list.push(target);
// If the place doesn't actually need dropping, treat it like a regular goto.
- let ty = callsite
- .callee
- .subst_mir(self.tcx, ty::EarlyBinder::bind(&place.ty(callee_body, tcx).ty));
+ let ty = callsite.callee.instantiate_mir(
+ self.tcx,
+ ty::EarlyBinder::bind(&place.ty(callee_body, tcx).ty),
+ );
if ty.needs_drop(tcx, self.param_env) && let UnwindAction::Cleanup(unwind) = unwind {
work_list.push(unwind);
}
@@ -648,13 +669,13 @@ impl<'tcx> Inliner<'tcx> {
// Copy only unevaluated constants from the callee_body into the caller_body.
// Although we are only pushing `ConstKind::Unevaluated` consts to
// `required_consts`, here we may not only have `ConstKind::Unevaluated`
- // because we are calling `subst_and_normalize_erasing_regions`.
+ // because we are calling `instantiate_and_normalize_erasing_regions`.
caller_body.required_consts.extend(
- callee_body.required_consts.iter().copied().filter(|&ct| match ct.literal {
- ConstantKind::Ty(_) => {
+ callee_body.required_consts.iter().copied().filter(|&ct| match ct.const_ {
+ Const::Ty(_) => {
bug!("should never encounter ty::UnevaluatedConst in `required_consts`")
}
- ConstantKind::Val(..) | ConstantKind::Unevaluated(..) => true,
+ Const::Val(..) | Const::Unevaluated(..) => true,
}),
);
}
@@ -809,9 +830,10 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
match terminator.kind {
TerminatorKind::Drop { ref place, unwind, .. } => {
// If the place doesn't actually need dropping, treat it like a regular goto.
- let ty = self
- .instance
- .subst_mir(tcx, ty::EarlyBinder::bind(&place.ty(self.callee_body, tcx).ty));
+ let ty = self.instance.instantiate_mir(
+ tcx,
+ ty::EarlyBinder::bind(&place.ty(self.callee_body, tcx).ty),
+ );
if ty.needs_drop(tcx, self.param_env) {
self.cost += CALL_PENALTY;
if let UnwindAction::Cleanup(_) = unwind {
@@ -822,7 +844,8 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
}
}
TerminatorKind::Call { func: Operand::Constant(ref f), unwind, .. } => {
- let fn_ty = self.instance.subst_mir(tcx, ty::EarlyBinder::bind(&f.literal.ty()));
+ let fn_ty =
+ self.instance.instantiate_mir(tcx, ty::EarlyBinder::bind(&f.const_.ty()));
self.cost += if let ty::FnDef(def_id, _) = *fn_ty.kind() && tcx.is_intrinsic(def_id) {
// Don't give intrinsics the extra penalty for calls
INSTR_COST
@@ -839,7 +862,7 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
self.cost += LANDINGPAD_PENALTY;
}
}
- TerminatorKind::Resume => self.cost += RESUME_PENALTY,
+ TerminatorKind::UnwindResume => self.cost += RESUME_PENALTY,
TerminatorKind::InlineAsm { unwind, .. } => {
self.cost += INSTR_COST;
if let UnwindAction::Cleanup(_) = unwind {
@@ -906,12 +929,12 @@ impl Integrator<'_, '_> {
UnwindAction::Cleanup(_) | UnwindAction::Continue => {
bug!("cleanup on cleanup block");
}
- UnwindAction::Unreachable | UnwindAction::Terminate => return unwind,
+ UnwindAction::Unreachable | UnwindAction::Terminate(_) => return unwind,
}
}
match unwind {
- UnwindAction::Unreachable | UnwindAction::Terminate => unwind,
+ UnwindAction::Unreachable | UnwindAction::Terminate(_) => unwind,
UnwindAction::Cleanup(target) => UnwindAction::Cleanup(self.map_block(target)),
// Add an unwind edge to the original call's cleanup block
UnwindAction::Continue => self.cleanup_block,
@@ -1017,15 +1040,15 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
TerminatorKind::Unreachable
}
}
- TerminatorKind::Resume => {
+ TerminatorKind::UnwindResume => {
terminator.kind = match self.cleanup_block {
UnwindAction::Cleanup(tgt) => TerminatorKind::Goto { target: tgt },
- UnwindAction::Continue => TerminatorKind::Resume,
+ UnwindAction::Continue => TerminatorKind::UnwindResume,
UnwindAction::Unreachable => TerminatorKind::Unreachable,
- UnwindAction::Terminate => TerminatorKind::Terminate,
+ UnwindAction::Terminate(reason) => TerminatorKind::UnwindTerminate(reason),
};
}
- TerminatorKind::Terminate => {}
+ TerminatorKind::UnwindTerminate(_) => {}
TerminatorKind::Unreachable => {}
TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
*real_target = self.map_block(*real_target);
diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs
index 822634129..d30e0bad8 100644
--- a/compiler/rustc_mir_transform/src/inline/cycle.rs
+++ b/compiler/rustc_mir_transform/src/inline/cycle.rs
@@ -44,7 +44,7 @@ pub(crate) fn mir_callgraph_reachable<'tcx>(
) -> bool {
trace!(%caller);
for &(callee, args) in tcx.mir_inliner_callees(caller.def) {
- let Ok(args) = caller.try_subst_mir_and_normalize_erasing_regions(
+ let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
tcx,
param_env,
ty::EarlyBinder::bind(args),
diff --git a/compiler/rustc_mir_transform/src/instsimplify.rs b/compiler/rustc_mir_transform/src/instsimplify.rs
index 8b0a0903d..a6ef2e11a 100644
--- a/compiler/rustc_mir_transform/src/instsimplify.rs
+++ b/compiler/rustc_mir_transform/src/instsimplify.rs
@@ -104,7 +104,7 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
fn try_eval_bool(&self, a: &Operand<'_>) -> Option<bool> {
let a = a.constant()?;
- if a.literal.ty().is_bool() { a.literal.try_to_bool() } else { None }
+ if a.const_.ty().is_bool() { a.const_.try_to_bool() } else { None }
}
/// Transform "&(*a)" ==> "a".
@@ -136,8 +136,8 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> {
return;
}
- let literal = ConstantKind::from_const(len, self.tcx);
- let constant = Constant { span: source_info.span, literal, user_ty: None };
+ let const_ = Const::from_ty_const(len, self.tcx);
+ let constant = ConstOperand { span: source_info.span, const_, user_ty: None };
*rvalue = Rvalue::Use(Operand::Constant(Box::new(constant)));
}
}
diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs
index 19108dabd..886ff7604 100644
--- a/compiler/rustc_mir_transform/src/large_enums.rs
+++ b/compiler/rustc_mir_transform/src/large_enums.rs
@@ -54,11 +54,8 @@ impl EnumSizeOpt {
let layout = tcx.layout_of(param_env.and(ty)).ok()?;
let variants = match &layout.variants {
Variants::Single { .. } => return None,
- Variants::Multiple { tag_encoding, .. }
- if matches!(tag_encoding, TagEncoding::Niche { .. }) =>
- {
- return None;
- }
+ Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => return None,
+
Variants::Multiple { variants, .. } if variants.len() <= 1 => return None,
Variants::Multiple { variants, .. } => variants,
};
@@ -114,7 +111,7 @@ impl EnumSizeOpt {
tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi,
Mutability::Not,
);
- let alloc = tcx.create_memory_alloc(tcx.mk_const_alloc(alloc));
+ let alloc = tcx.reserve_and_set_memory_alloc(tcx.mk_const_alloc(alloc));
Some((*adt_def, num_discrs, *alloc_cache.entry(ty).or_insert(alloc)))
}
fn optim<'tcx>(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
@@ -139,7 +136,6 @@ impl EnumSizeOpt {
let (adt_def, num_variants, alloc_id) =
self.candidate(tcx, param_env, ty, &mut alloc_cache)?;
- let alloc = tcx.global_alloc(alloc_id).unwrap_memory();
let tmp_ty = Ty::new_array(tcx, tcx.types.usize, num_variants as u64);
@@ -150,11 +146,11 @@ impl EnumSizeOpt {
};
let place = Place::from(size_array_local);
- let constant_vals = Constant {
+ let constant_vals = ConstOperand {
span,
user_ty: None,
- literal: ConstantKind::Val(
- interpret::ConstValue::ByRef { alloc, offset: Size::ZERO },
+ const_: Const::Val(
+ ConstValue::Indirect { alloc_id, offset: Size::ZERO },
tmp_ty,
),
};
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index bf798adee..c0a09b7a7 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -2,6 +2,7 @@
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
#![feature(box_patterns)]
+#![feature(decl_macro)]
#![feature(is_sorted)]
#![feature(let_chains)]
#![feature(map_try_insert)]
@@ -30,9 +31,9 @@ use rustc_hir::intravisit::{self, Visitor};
use rustc_index::IndexVec;
use rustc_middle::mir::visit::Visitor as _;
use rustc_middle::mir::{
- traversal, AnalysisPhase, Body, CallSource, ClearCrossCrate, ConstQualifs, Constant, LocalDecl,
- MirPass, MirPhase, Operand, Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, SourceInfo,
- Statement, StatementKind, TerminatorKind, START_BLOCK,
+ traversal, AnalysisPhase, Body, CallSource, ClearCrossCrate, ConstOperand, ConstQualifs,
+ LocalDecl, MirPass, MirPhase, Operand, Place, ProjectionElem, Promoted, RuntimePhase, Rvalue,
+ SourceInfo, Statement, StatementKind, TerminatorKind, START_BLOCK,
};
use rustc_middle::query::Providers;
use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt};
@@ -53,6 +54,7 @@ mod check_packed_ref;
pub mod check_unsafety;
mod remove_place_mention;
// This pass is public to allow external drivers to perform MIR cleanup
+mod add_subtyping_projections;
pub mod cleanup_post_borrowck;
mod const_debuginfo;
mod const_goto;
@@ -75,6 +77,7 @@ mod errors;
mod ffi_unwind_calls;
mod function_item_references;
mod generator;
+mod gvn;
pub mod inline;
mod instsimplify;
mod large_enums;
@@ -148,14 +151,14 @@ fn remap_mir_for_const_eval_select<'tcx>(
let terminator = bb.terminator.as_mut().expect("invalid terminator");
match terminator.kind {
TerminatorKind::Call {
- func: Operand::Constant(box Constant { ref literal, .. }),
+ func: Operand::Constant(box ConstOperand { ref const_, .. }),
ref mut args,
destination,
target,
unwind,
fn_span,
..
- } if let ty::FnDef(def_id, _) = *literal.ty().kind()
+ } if let ty::FnDef(def_id, _) = *const_.ty().kind()
&& tcx.item_name(def_id) == sym::const_eval_select
&& tcx.is_intrinsic(def_id) =>
{
@@ -342,7 +345,7 @@ fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: LocalDefId) -> Body<'_> {
let body = match tcx.hir().body_const_context(def) {
// consts and statics do not have `optimized_mir`, so we can steal the body instead of
// cloning it.
- Some(hir::ConstContext::Const | hir::ConstContext::Static(_)) => body.steal(),
+ Some(hir::ConstContext::Const { .. } | hir::ConstContext::Static(_)) => body.steal(),
Some(hir::ConstContext::ConstFn) => body.borrow().clone(),
None => bug!("`mir_for_ctfe` called on non-const {def:?}"),
};
@@ -357,9 +360,7 @@ fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: LocalDefId) -> Body<'_> {
/// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
/// end up missing the source MIR due to stealing happening.
fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal<Body<'_>> {
- if tcx.sess.opts.unstable_opts.drop_tracking_mir
- && let DefKind::Generator = tcx.def_kind(def)
- {
+ if let DefKind::Generator = tcx.def_kind(def) {
tcx.ensure_with_value().mir_generator_witnesses(def);
}
let mir_borrowck = tcx.mir_borrowck(def);
@@ -480,6 +481,8 @@ fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let passes: &[&dyn MirPass<'tcx>] = &[
// These next passes must be executed together
&add_call_guards::CriticalCallEdges,
+ &reveal_all::RevealAll, // has to be done before drop elaboration, since we need to drop opaque types, too.
+ &add_subtyping_projections::Subtyper, // calling this after reveal_all ensures that we don't deal with opaque types
&elaborate_drops::ElaborateDrops,
// This will remove extraneous landing pads which are no longer
// necessary as well as well as forcing any call in a non-unwinding
@@ -526,7 +529,6 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
body,
&[
&check_alignment::CheckAlignment,
- &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode.
&lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
&unreachable_prop::UnreachablePropagation,
&uninhabited_enum_branching::UninhabitedEnumBranching,
@@ -550,6 +552,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
// latter pass will leverage the created opportunities.
&separate_const_switch::SeparateConstSwitch,
&const_prop::ConstProp,
+ &gvn::GVN,
&dataflow_const_prop::DataflowConstProp,
//
// Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
@@ -605,6 +608,11 @@ fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
let body = tcx.mir_drops_elaborated_and_const_checked(did).steal();
let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::NotConst);
debug!("body: {:#?}", body);
+
+ if body.tainted_by_errors.is_some() {
+ return body;
+ }
+
run_optimization_passes(tcx, &mut body);
body
diff --git a/compiler/rustc_mir_transform/src/lower_intrinsics.rs b/compiler/rustc_mir_transform/src/lower_intrinsics.rs
index fc36c6e41..0d2d764c4 100644
--- a/compiler/rustc_mir_transform/src/lower_intrinsics.rs
+++ b/compiler/rustc_mir_transform/src/lower_intrinsics.rs
@@ -1,11 +1,10 @@
//! Lowers intrinsic calls
-use crate::{errors, MirPass};
+use crate::MirPass;
use rustc_middle::mir::*;
use rustc_middle::ty::GenericArgsRef;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::symbol::{sym, Symbol};
-use rustc_span::Span;
use rustc_target::abi::{FieldIdx, VariantIdx};
pub struct LowerIntrinsics;
@@ -33,10 +32,10 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
source_info: terminator.source_info,
kind: StatementKind::Assign(Box::new((
*destination,
- Rvalue::Use(Operand::Constant(Box::new(Constant {
+ Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
span: terminator.source_info.span,
user_ty: None,
- literal: ConstantKind::zero_sized(tcx.types.unit),
+ const_: Const::zero_sized(tcx.types.unit),
}))),
))),
});
@@ -176,23 +175,22 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
} else {
span_bug!(terminator.source_info.span, "Only passing a local is supported");
};
+ // Add new statement at the end of the block that does the read, and patch
+ // up the terminator.
+ block.statements.push(Statement {
+ source_info: terminator.source_info,
+ kind: StatementKind::Assign(Box::new((
+ *destination,
+ Rvalue::Use(Operand::Copy(derefed_place)),
+ ))),
+ });
terminator.kind = match *target {
None => {
// No target means this read something uninhabited,
- // so it must be unreachable, and we don't need to
- // preserve the assignment either.
+ // so it must be unreachable.
TerminatorKind::Unreachable
}
- Some(target) => {
- block.statements.push(Statement {
- source_info: terminator.source_info,
- kind: StatementKind::Assign(Box::new((
- *destination,
- Rvalue::Use(Operand::Copy(derefed_place)),
- ))),
- });
- TerminatorKind::Goto { target }
- }
+ Some(target) => TerminatorKind::Goto { target },
}
}
sym::write_via_move => {
@@ -305,9 +303,6 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
terminator.kind = TerminatorKind::Unreachable;
}
}
- sym::simd_shuffle => {
- validate_simd_shuffle(tcx, args, terminator.source_info.span);
- }
_ => {}
}
}
@@ -326,9 +321,3 @@ fn resolve_rust_intrinsic<'tcx>(
}
None
}
-
-fn validate_simd_shuffle<'tcx>(tcx: TyCtxt<'tcx>, args: &[Operand<'tcx>], span: Span) {
- if !matches!(args[2], Operand::Constant(_)) {
- tcx.sess.emit_err(errors::SimdShuffleLastConst { span });
- }
-}
diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs
index bc29fb8de..3dc627b61 100644
--- a/compiler/rustc_mir_transform/src/match_branches.rs
+++ b/compiler/rustc_mir_transform/src/match_branches.rs
@@ -98,10 +98,10 @@ impl<'tcx> MirPass<'tcx> for MatchBranchSimplification {
StatementKind::Assign(box (lhs_f, Rvalue::Use(Operand::Constant(f_c)))),
StatementKind::Assign(box (lhs_s, Rvalue::Use(Operand::Constant(s_c)))),
) if lhs_f == lhs_s
- && f_c.literal.ty().is_bool()
- && s_c.literal.ty().is_bool()
- && f_c.literal.try_eval_bool(tcx, param_env).is_some()
- && s_c.literal.try_eval_bool(tcx, param_env).is_some() => {}
+ && f_c.const_.ty().is_bool()
+ && s_c.const_.ty().is_bool()
+ && f_c.const_.try_eval_bool(tcx, param_env).is_some()
+ && s_c.const_.try_eval_bool(tcx, param_env).is_some() => {}
// Otherwise we cannot optimize. Try another block.
_ => continue 'outer,
@@ -128,8 +128,8 @@ impl<'tcx> MirPass<'tcx> for MatchBranchSimplification {
StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(s_c)))),
) => {
// From earlier loop we know that we are dealing with bool constants only:
- let f_b = f_c.literal.try_eval_bool(tcx, param_env).unwrap();
- let s_b = s_c.literal.try_eval_bool(tcx, param_env).unwrap();
+ let f_b = f_c.const_.try_eval_bool(tcx, param_env).unwrap();
+ let s_b = s_c.const_.try_eval_bool(tcx, param_env).unwrap();
if f_b == s_b {
// Same value in both blocks. Use statement as is.
(*f).clone()
diff --git a/compiler/rustc_mir_transform/src/normalize_array_len.rs b/compiler/rustc_mir_transform/src/normalize_array_len.rs
index 6c3b7c58f..d1a4b26a0 100644
--- a/compiler/rustc_mir_transform/src/normalize_array_len.rs
+++ b/compiler/rustc_mir_transform/src/normalize_array_len.rs
@@ -90,10 +90,10 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> {
&& let [PlaceElem::Deref] = &place.projection[..]
&& let Some(len) = self.slice_lengths[place.local]
{
- *rvalue = Rvalue::Use(Operand::Constant(Box::new(Constant {
+ *rvalue = Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
span: rustc_span::DUMMY_SP,
user_ty: None,
- literal: ConstantKind::from_const(len, self.tcx),
+ const_: Const::from_ty_const(len, self.tcx),
})));
}
self.super_rvalue(rvalue, loc);
diff --git a/compiler/rustc_mir_transform/src/pass_manager.rs b/compiler/rustc_mir_transform/src/pass_manager.rs
index 057f5fe82..5abb2f3d0 100644
--- a/compiler/rustc_mir_transform/src/pass_manager.rs
+++ b/compiler/rustc_mir_transform/src/pass_manager.rs
@@ -94,6 +94,8 @@ fn run_passes_inner<'tcx>(
let overridden_passes = &tcx.sess.opts.unstable_opts.mir_enable_passes;
trace!(?overridden_passes);
+ let prof_arg = tcx.sess.prof.enabled().then(|| format!("{:?}", body.source.def_id()));
+
if !body.should_skip() {
for pass in passes {
let name = pass.name();
@@ -121,7 +123,14 @@ fn run_passes_inner<'tcx>(
validate_body(tcx, body, format!("before pass {name}"));
}
- tcx.sess.time(name, || pass.run_pass(tcx, body));
+ if let Some(prof_arg) = &prof_arg {
+ tcx.sess
+ .prof
+ .generic_activity_with_arg(pass.profiler_name(), &**prof_arg)
+ .run(|| pass.run_pass(tcx, body));
+ } else {
+ pass.run_pass(tcx, body);
+ }
if dump_enabled {
dump_mir_for_pass(tcx, body, &name, true);
diff --git a/compiler/rustc_mir_transform/src/ref_prop.rs b/compiler/rustc_mir_transform/src/ref_prop.rs
index 49a940b57..67941cf43 100644
--- a/compiler/rustc_mir_transform/src/ref_prop.rs
+++ b/compiler/rustc_mir_transform/src/ref_prop.rs
@@ -108,7 +108,7 @@ enum Value<'tcx> {
}
/// For each local, save the place corresponding to `*local`.
-#[instrument(level = "trace", skip(tcx, body))]
+#[instrument(level = "trace", skip(tcx, body, ssa))]
fn compute_replacement<'tcx>(
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
diff --git a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs
index 4e85c76fb..8c48a6677 100644
--- a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs
+++ b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs
@@ -63,7 +63,7 @@ impl RemoveNoopLandingPads {
let terminator = body[bb].terminator();
match terminator.kind {
TerminatorKind::Goto { .. }
- | TerminatorKind::Resume
+ | TerminatorKind::UnwindResume
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
@@ -72,7 +72,7 @@ impl RemoveNoopLandingPads {
TerminatorKind::GeneratorDrop
| TerminatorKind::Yield { .. }
| TerminatorKind::Return
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Unreachable
| TerminatorKind::Call { .. }
| TerminatorKind::Assert { .. }
@@ -88,7 +88,7 @@ impl RemoveNoopLandingPads {
let has_resume = body
.basic_blocks
.iter_enumerated()
- .any(|(_bb, block)| matches!(block.terminator().kind, TerminatorKind::Resume));
+ .any(|(_bb, block)| matches!(block.terminator().kind, TerminatorKind::UnwindResume));
if !has_resume {
debug!("remove_noop_landing_pads: no resume block in MIR");
return;
diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs
index 9c6c55b08..a34d4b027 100644
--- a/compiler/rustc_mir_transform/src/remove_zsts.rs
+++ b/compiler/rustc_mir_transform/src/remove_zsts.rs
@@ -1,7 +1,6 @@
//! Removes operations on ZST places, and convert ZST operands to constants.
use crate::MirPass;
-use rustc_middle::mir::interpret::ConstValue;
use rustc_middle::mir::visit::*;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Ty, TyCtxt};
@@ -63,12 +62,12 @@ impl<'tcx> Replacer<'_, 'tcx> {
layout.is_zst()
}
- fn make_zst(&self, ty: Ty<'tcx>) -> Constant<'tcx> {
+ fn make_zst(&self, ty: Ty<'tcx>) -> ConstOperand<'tcx> {
debug_assert!(self.known_to_be_zst(ty));
- Constant {
+ ConstOperand {
span: rustc_span::DUMMY_SP,
user_ty: None,
- literal: ConstantKind::Val(ConstValue::ZeroSized, ty),
+ const_: Const::Val(ConstValue::ZeroSized, ty),
}
}
}
@@ -87,11 +86,6 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> {
var_debug_info.value = VarDebugInfoContents::Const(self.make_zst(place_ty))
}
}
- VarDebugInfoContents::Composite { ty, fragments: _ } => {
- if self.known_to_be_zst(ty) {
- var_debug_info.value = VarDebugInfoContents::Const(self.make_zst(ty))
- }
- }
}
}
diff --git a/compiler/rustc_mir_transform/src/required_consts.rs b/compiler/rustc_mir_transform/src/required_consts.rs
index 243cb4635..abde6a47e 100644
--- a/compiler/rustc_mir_transform/src/required_consts.rs
+++ b/compiler/rustc_mir_transform/src/required_consts.rs
@@ -1,27 +1,27 @@
use rustc_middle::mir::visit::Visitor;
-use rustc_middle::mir::{Constant, ConstantKind, Location};
+use rustc_middle::mir::{Const, ConstOperand, Location};
use rustc_middle::ty::ConstKind;
pub struct RequiredConstsVisitor<'a, 'tcx> {
- required_consts: &'a mut Vec<Constant<'tcx>>,
+ required_consts: &'a mut Vec<ConstOperand<'tcx>>,
}
impl<'a, 'tcx> RequiredConstsVisitor<'a, 'tcx> {
- pub fn new(required_consts: &'a mut Vec<Constant<'tcx>>) -> Self {
+ pub fn new(required_consts: &'a mut Vec<ConstOperand<'tcx>>) -> Self {
RequiredConstsVisitor { required_consts }
}
}
impl<'tcx> Visitor<'tcx> for RequiredConstsVisitor<'_, 'tcx> {
- fn visit_constant(&mut self, constant: &Constant<'tcx>, _: Location) {
- let literal = constant.literal;
- match literal {
- ConstantKind::Ty(c) => match c.kind() {
+ fn visit_constant(&mut self, constant: &ConstOperand<'tcx>, _: Location) {
+ let const_ = constant.const_;
+ match const_ {
+ Const::Ty(c) => match c.kind() {
ConstKind::Param(_) | ConstKind::Error(_) | ConstKind::Value(_) => {}
_ => bug!("only ConstKind::Param/Value should be encountered here, got {:#?}", c),
},
- ConstantKind::Unevaluated(..) => self.required_consts.push(*constant),
- ConstantKind::Val(..) => {}
+ Const::Unevaluated(..) => self.required_consts.push(*constant),
+ Const::Val(..) => {}
}
}
}
diff --git a/compiler/rustc_mir_transform/src/reveal_all.rs b/compiler/rustc_mir_transform/src/reveal_all.rs
index 23442f8b9..1626cf3c0 100644
--- a/compiler/rustc_mir_transform/src/reveal_all.rs
+++ b/compiler/rustc_mir_transform/src/reveal_all.rs
@@ -8,16 +8,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
pub struct RevealAll;
impl<'tcx> MirPass<'tcx> for RevealAll {
- fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
- sess.mir_opt_level() >= 3 || super::inline::Inline.is_enabled(sess)
- }
-
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
- // Do not apply this transformation to generators.
- if body.generator.is_some() {
- return;
- }
-
let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id());
RevealAllVisitor { tcx, param_env }.visit_body_preserves_cfg(body);
}
@@ -35,13 +26,38 @@ impl<'tcx> MutVisitor<'tcx> for RevealAllVisitor<'tcx> {
}
#[inline]
- fn visit_constant(&mut self, constant: &mut Constant<'tcx>, _: Location) {
+ fn visit_place(
+ &mut self,
+ place: &mut Place<'tcx>,
+ _context: PlaceContext,
+ _location: Location,
+ ) {
+ // Performance optimization: don't reintern if there is no `OpaqueCast` to remove.
+ if place.projection.iter().all(|elem| !matches!(elem, ProjectionElem::OpaqueCast(_))) {
+ return;
+ }
+ // `OpaqueCast` projections are only needed if there are opaque types on which projections are performed.
+ // After the `RevealAll` pass, all opaque types are replaced with their hidden types, so we don't need these
+ // projections anymore.
+ place.projection = self.tcx.mk_place_elems(
+ &place
+ .projection
+ .into_iter()
+ .filter(|elem| !matches!(elem, ProjectionElem::OpaqueCast(_)))
+ .collect::<Vec<_>>(),
+ );
+ self.super_place(place, _context, _location);
+ }
+
+ #[inline]
+ fn visit_constant(&mut self, constant: &mut ConstOperand<'tcx>, location: Location) {
// We have to use `try_normalize_erasing_regions` here, since it's
// possible that we visit impossible-to-satisfy where clauses here,
// see #91745
- if let Ok(c) = self.tcx.try_normalize_erasing_regions(self.param_env, constant.literal) {
- constant.literal = c;
+ if let Ok(c) = self.tcx.try_normalize_erasing_regions(self.param_env, constant.const_) {
+ constant.const_ = c;
}
+ self.super_constant(constant, location);
}
#[inline]
diff --git a/compiler/rustc_mir_transform/src/separate_const_switch.rs b/compiler/rustc_mir_transform/src/separate_const_switch.rs
index 1d8e54cdc..e1e4acccc 100644
--- a/compiler/rustc_mir_transform/src/separate_const_switch.rs
+++ b/compiler/rustc_mir_transform/src/separate_const_switch.rs
@@ -108,13 +108,13 @@ pub fn separate_const_switch(body: &mut Body<'_>) -> usize {
}
// The following terminators are not allowed
- TerminatorKind::Resume
+ TerminatorKind::UnwindResume
| TerminatorKind::Drop { .. }
| TerminatorKind::Call { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::Yield { .. }
- | TerminatorKind::Terminate
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::InlineAsm { .. }
@@ -165,8 +165,8 @@ pub fn separate_const_switch(body: &mut Body<'_>) -> usize {
});
}
- TerminatorKind::Resume
- | TerminatorKind::Terminate
+ TerminatorKind::UnwindResume
+ | TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::GeneratorDrop
diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs
index 223dc59c6..e9895d97d 100644
--- a/compiler/rustc_mir_transform/src/shim.rs
+++ b/compiler/rustc_mir_transform/src/shim.rs
@@ -99,7 +99,11 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<'
};
debug!("make_shim({:?}) = untransformed {:?}", instance, result);
- pm::run_passes(
+ // We don't validate MIR here because the shims may generate code that's
+ // only valid in a reveal-all param-env. However, since we do initial
+ // validation with the MirBuilt phase, which uses a user-facing param-env.
+ // This causes validation errors when TAITs are involved.
+ pm::run_passes_no_validate(
tcx,
&mut result,
&[
@@ -493,10 +497,10 @@ impl<'tcx> CloneShimBuilder<'tcx> {
// `func == Clone::clone(&ty) -> ty`
let func_ty = Ty::new_fn_def(tcx, self.def_id, [ty]);
- let func = Operand::Constant(Box::new(Constant {
+ let func = Operand::Constant(Box::new(ConstOperand {
span: self.span,
user_ty: None,
- literal: ConstantKind::zero_sized(func_ty),
+ const_: Const::zero_sized(func_ty),
}));
let ref_loc = self.make_place(
@@ -566,10 +570,10 @@ impl<'tcx> CloneShimBuilder<'tcx> {
TerminatorKind::Drop {
place: dest_field,
target: unwind,
- unwind: UnwindAction::Terminate,
+ unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
replace: false,
},
- true,
+ /* is_cleanup */ true,
);
unwind = next_unwind;
}
@@ -583,7 +587,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
I: IntoIterator<Item = Ty<'tcx>>,
{
self.block(vec![], TerminatorKind::Goto { target: self.block_index_offset(3) }, false);
- let unwind = self.block(vec![], TerminatorKind::Resume, true);
+ let unwind = self.block(vec![], TerminatorKind::UnwindResume, true);
let target = self.block(vec![], TerminatorKind::Return, false);
let _final_cleanup_block = self.clone_fields(dest, src, target, unwind, tys);
@@ -597,7 +601,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
args: GeneratorArgs<'tcx>,
) {
self.block(vec![], TerminatorKind::Goto { target: self.block_index_offset(3) }, false);
- let unwind = self.block(vec![], TerminatorKind::Resume, true);
+ let unwind = self.block(vec![], TerminatorKind::UnwindResume, true);
// This will get overwritten with a switch once we know the target blocks
let switch = self.block(vec![], TerminatorKind::Unreachable, false);
let unwind = self.clone_fields(dest, src, switch, unwind, args.upvar_tys());
@@ -760,10 +764,10 @@ fn build_call_shim<'tcx>(
CallKind::Direct(def_id) => {
let ty = tcx.type_of(def_id).instantiate_identity();
(
- Operand::Constant(Box::new(Constant {
+ Operand::Constant(Box::new(ConstOperand {
span,
user_ty: None,
- literal: ConstantKind::zero_sized(ty),
+ const_: Const::zero_sized(ty),
})),
rcvr.into_iter().collect::<Vec<_>>(),
)
@@ -847,14 +851,14 @@ fn build_call_shim<'tcx>(
TerminatorKind::Drop {
place: rcvr_place(),
target: BasicBlock::new(4),
- unwind: UnwindAction::Terminate,
+ unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
replace: false,
},
- true,
+ /* is_cleanup */ true,
);
// BB #4 - resume
- block(&mut blocks, vec![], TerminatorKind::Resume, true);
+ block(&mut blocks, vec![], TerminatorKind::UnwindResume, true);
}
let mut body =
diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs
index b7a51cfd6..2795cf157 100644
--- a/compiler/rustc_mir_transform/src/simplify.rs
+++ b/compiler/rustc_mir_transform/src/simplify.rs
@@ -29,6 +29,7 @@
use crate::MirPass;
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
+use rustc_index::bit_set::BitSet;
use rustc_index::{Idx, IndexSlice, IndexVec};
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
@@ -345,24 +346,22 @@ pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let basic_blocks = body.basic_blocks.as_mut();
let source_scopes = &body.source_scopes;
- let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
- let mut used_blocks = 0;
- for alive_index in reachable.iter() {
- let alive_index = alive_index.index();
- replacements[alive_index] = BasicBlock::new(used_blocks);
- if alive_index != used_blocks {
- // Swap the next alive block data with the current available slot. Since
- // alive_index is non-decreasing this is a valid operation.
- basic_blocks.raw.swap(alive_index, used_blocks);
- }
- used_blocks += 1;
- }
-
if tcx.sess.instrument_coverage() {
- save_unreachable_coverage(basic_blocks, source_scopes, used_blocks);
+ save_unreachable_coverage(basic_blocks, source_scopes, &reachable);
}
- basic_blocks.raw.truncate(used_blocks);
+ let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
+ let mut orig_index = 0;
+ let mut used_index = 0;
+ basic_blocks.raw.retain(|_| {
+ let keep = reachable.contains(BasicBlock::new(orig_index));
+ if keep {
+ replacements[orig_index] = BasicBlock::new(used_index);
+ used_index += 1;
+ }
+ orig_index += 1;
+ keep
+ });
for block in basic_blocks {
for target in block.terminator_mut().successors_mut() {
@@ -404,11 +403,12 @@ pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
fn save_unreachable_coverage(
basic_blocks: &mut IndexSlice<BasicBlock, BasicBlockData<'_>>,
source_scopes: &IndexSlice<SourceScope, SourceScopeData<'_>>,
- first_dead_block: usize,
+ reachable: &BitSet<BasicBlock>,
) {
// Identify instances that still have some live coverage counters left.
let mut live = FxHashSet::default();
- for basic_block in &basic_blocks.raw[0..first_dead_block] {
+ for bb in reachable.iter() {
+ let basic_block = &basic_blocks[bb];
for statement in &basic_block.statements {
let StatementKind::Coverage(coverage) = &statement.kind else { continue };
let CoverageKind::Counter { .. } = coverage.kind else { continue };
@@ -417,7 +417,8 @@ fn save_unreachable_coverage(
}
}
- for block in &mut basic_blocks.raw[..first_dead_block] {
+ for bb in reachable.iter() {
+ let block = &mut basic_blocks[bb];
for statement in &mut block.statements {
let StatementKind::Coverage(_) = &statement.kind else { continue };
let instance = statement.source_info.scope.inlined_instance(source_scopes);
@@ -433,7 +434,11 @@ fn save_unreachable_coverage(
// Retain coverage for instances that still have some live counters left.
let mut retained_coverage = Vec::new();
- for dead_block in &basic_blocks.raw[first_dead_block..] {
+ for dead_block in basic_blocks.indices() {
+ if reachable.contains(dead_block) {
+ continue;
+ }
+ let dead_block = &basic_blocks[dead_block];
for statement in &dead_block.statements {
let StatementKind::Coverage(coverage) = &statement.kind else { continue };
let Some(code_region) = &coverage.code_region else { continue };
diff --git a/compiler/rustc_mir_transform/src/simplify_branches.rs b/compiler/rustc_mir_transform/src/simplify_branches.rs
index 1ff488169..b508cd1c9 100644
--- a/compiler/rustc_mir_transform/src/simplify_branches.rs
+++ b/compiler/rustc_mir_transform/src/simplify_branches.rs
@@ -23,7 +23,7 @@ impl<'tcx> MirPass<'tcx> for SimplifyConstCondition {
TerminatorKind::SwitchInt {
discr: Operand::Constant(ref c), ref targets, ..
} => {
- let constant = c.literal.try_eval_bits(tcx, param_env, c.ty());
+ let constant = c.const_.try_eval_bits(tcx, param_env);
if let Some(constant) = constant {
let target = targets.target_for_value(constant);
TerminatorKind::Goto { target }
@@ -33,7 +33,7 @@ impl<'tcx> MirPass<'tcx> for SimplifyConstCondition {
}
TerminatorKind::Assert {
target, cond: Operand::Constant(ref c), expected, ..
- } => match c.literal.try_eval_bool(tcx, param_env) {
+ } => match c.const_.try_eval_bool(tcx, param_env) {
Some(v) if v == expected => TerminatorKind::Goto { target },
_ => continue,
},
diff --git a/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs b/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs
index 113ca2fc5..1a8cfc411 100644
--- a/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs
+++ b/compiler/rustc_mir_transform/src/simplify_comparison_integral.rs
@@ -206,12 +206,12 @@ fn find_branch_value_info<'tcx>(
match (left, right) {
(Constant(branch_value), Copy(to_switch_on) | Move(to_switch_on))
| (Copy(to_switch_on) | Move(to_switch_on), Constant(branch_value)) => {
- let branch_value_ty = branch_value.literal.ty();
+ let branch_value_ty = branch_value.const_.ty();
// we only want to apply this optimization if we are matching on integrals (and chars), as it is not possible to switch on floats
if !branch_value_ty.is_integral() && !branch_value_ty.is_char() {
return None;
};
- let branch_value_scalar = branch_value.literal.try_to_scalar()?;
+ let branch_value_scalar = branch_value.const_.try_to_scalar()?;
Some((branch_value_scalar, branch_value_ty, *to_switch_on))
}
_ => None,
diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs
index e66ae8ff8..c21b1724c 100644
--- a/compiler/rustc_mir_transform/src/sroa.rs
+++ b/compiler/rustc_mir_transform/src/sroa.rs
@@ -1,4 +1,5 @@
use crate::MirPass;
+use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_index::bit_set::{BitSet, GrowableBitSet};
use rustc_index::IndexVec;
use rustc_middle::mir::patch::MirPatch;
@@ -147,7 +148,7 @@ fn escaping_locals<'tcx>(
}
// We ignore anything that happens in debuginfo, since we expand it using
- // `VarDebugInfoContents::Composite`.
+ // `VarDebugInfoFragment`.
fn visit_var_debug_info(&mut self, _: &VarDebugInfo<'tcx>) {}
}
}
@@ -246,9 +247,7 @@ fn replace_flattened_locals<'tcx>(
for (index, annotation) in body.user_type_annotations.iter_enumerated_mut() {
visitor.visit_user_type_annotation(index, annotation);
}
- for var_debug_info in &mut body.var_debug_info {
- visitor.visit_var_debug_info(var_debug_info);
- }
+ visitor.expand_var_debug_info(&mut body.var_debug_info);
let ReplacementVisitor { patch, all_dead_locals, .. } = visitor;
patch.apply(body);
all_dead_locals
@@ -256,7 +255,7 @@ fn replace_flattened_locals<'tcx>(
struct ReplacementVisitor<'tcx, 'll> {
tcx: TyCtxt<'tcx>,
- /// This is only used to compute the type for `VarDebugInfoContents::Composite`.
+ /// This is only used to compute the type for `VarDebugInfoFragment`.
local_decls: &'ll LocalDecls<'tcx>,
/// Work to do.
replacements: &'ll ReplacementMap<'tcx>,
@@ -266,16 +265,38 @@ struct ReplacementVisitor<'tcx, 'll> {
}
impl<'tcx> ReplacementVisitor<'tcx, '_> {
- fn gather_debug_info_fragments(&self, local: Local) -> Option<Vec<VarDebugInfoFragment<'tcx>>> {
- let mut fragments = Vec::new();
- let parts = self.replacements.place_fragments(local.into())?;
- for (field, ty, replacement_local) in parts {
- fragments.push(VarDebugInfoFragment {
- projection: vec![PlaceElem::Field(field, ty)],
- contents: Place::from(replacement_local),
- });
- }
- Some(fragments)
+ #[instrument(level = "trace", skip(self))]
+ fn expand_var_debug_info(&mut self, var_debug_info: &mut Vec<VarDebugInfo<'tcx>>) {
+ var_debug_info.flat_map_in_place(|mut var_debug_info| {
+ let place = match var_debug_info.value {
+ VarDebugInfoContents::Const(_) => return vec![var_debug_info],
+ VarDebugInfoContents::Place(ref mut place) => place,
+ };
+
+ if let Some(repl) = self.replacements.replace_place(self.tcx, place.as_ref()) {
+ *place = repl;
+ return vec![var_debug_info];
+ }
+
+ let Some(parts) = self.replacements.place_fragments(*place) else {
+ return vec![var_debug_info];
+ };
+
+ let ty = place.ty(self.local_decls, self.tcx).ty;
+
+ parts
+ .map(|(field, field_ty, replacement_local)| {
+ let mut var_debug_info = var_debug_info.clone();
+ let composite = var_debug_info.composite.get_or_insert_with(|| {
+ Box::new(VarDebugInfoFragment { ty, projection: Vec::new() })
+ });
+ composite.projection.push(PlaceElem::Field(field, field_ty));
+
+ var_debug_info.value = VarDebugInfoContents::Place(replacement_local.into());
+ var_debug_info
+ })
+ .collect()
+ });
}
}
@@ -422,48 +443,6 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> {
self.super_statement(statement, location)
}
- #[instrument(level = "trace", skip(self))]
- fn visit_var_debug_info(&mut self, var_debug_info: &mut VarDebugInfo<'tcx>) {
- match &mut var_debug_info.value {
- VarDebugInfoContents::Place(ref mut place) => {
- if let Some(repl) = self.replacements.replace_place(self.tcx, place.as_ref()) {
- *place = repl;
- } else if let Some(local) = place.as_local()
- && let Some(fragments) = self.gather_debug_info_fragments(local)
- {
- let ty = place.ty(self.local_decls, self.tcx).ty;
- var_debug_info.value = VarDebugInfoContents::Composite { ty, fragments };
- }
- }
- VarDebugInfoContents::Composite { ty: _, ref mut fragments } => {
- let mut new_fragments = Vec::new();
- debug!(?fragments);
- fragments.retain_mut(|fragment| {
- if let Some(repl) =
- self.replacements.replace_place(self.tcx, fragment.contents.as_ref())
- {
- fragment.contents = repl;
- true
- } else if let Some(local) = fragment.contents.as_local()
- && let Some(frg) = self.gather_debug_info_fragments(local)
- {
- new_fragments.extend(frg.into_iter().map(|mut f| {
- f.projection.splice(0..0, fragment.projection.iter().copied());
- f
- }));
- false
- } else {
- true
- }
- });
- debug!(?fragments);
- debug!(?new_fragments);
- fragments.extend(new_fragments);
- }
- VarDebugInfoContents::Const(_) => {}
- }
- }
-
fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
assert!(!self.all_dead_locals.contains(*local));
}
diff --git a/compiler/rustc_mir_transform/src/ssa.rs b/compiler/rustc_mir_transform/src/ssa.rs
index 04bc461c8..43fc1b7b9 100644
--- a/compiler/rustc_mir_transform/src/ssa.rs
+++ b/compiler/rustc_mir_transform/src/ssa.rs
@@ -13,7 +13,6 @@ use rustc_middle::middle::resolve_bound_vars::Set1;
use rustc_middle::mir::visit::*;
use rustc_middle::mir::*;
-#[derive(Debug)]
pub struct SsaLocals {
/// Assignments to each local. This defines whether the local is SSA.
assignments: IndexVec<Local, Set1<LocationExtended>>,
@@ -79,14 +78,10 @@ impl SsaLocals {
visitor.assignments[local] = Set1::One(LocationExtended::Arg);
}
- if body.basic_blocks.len() > 2 {
- for (bb, data) in traversal::reverse_postorder(body) {
- visitor.visit_basic_block_data(bb, data);
- }
- } else {
- for (bb, data) in body.basic_blocks.iter_enumerated() {
- visitor.visit_basic_block_data(bb, data);
- }
+ // For SSA assignments, a RPO visit will see the assignment before it sees any use.
+ // We only visit reachable nodes: computing `dominates` on an unreachable node ICEs.
+ for (bb, data) in traversal::reverse_postorder(body) {
+ visitor.visit_basic_block_data(bb, data);
}
for var_debug_info in &body.var_debug_info {
@@ -129,6 +124,25 @@ impl SsaLocals {
self.direct_uses[local]
}
+ pub fn assignment_dominates(
+ &self,
+ dominators: &Dominators<BasicBlock>,
+ local: Local,
+ location: Location,
+ ) -> bool {
+ match self.assignments[local] {
+ Set1::One(LocationExtended::Arg) => true,
+ Set1::One(LocationExtended::Plain(ass)) => {
+ if ass.block == location.block {
+ ass.statement_index < location.statement_index
+ } else {
+ dominators.dominates(ass.block, location.block)
+ }
+ }
+ _ => false,
+ }
+ }
+
pub fn assignments<'a, 'tcx>(
&'a self,
body: &'a Body<'tcx>,
@@ -146,6 +160,24 @@ impl SsaLocals {
})
}
+ pub fn for_each_assignment_mut<'tcx>(
+ &self,
+ basic_blocks: &mut BasicBlocks<'tcx>,
+ mut f: impl FnMut(Local, &mut Rvalue<'tcx>, Location),
+ ) {
+ for &local in &self.assignment_order {
+ if let Set1::One(LocationExtended::Plain(loc)) = self.assignments[local] {
+ // `loc` must point to a direct assignment to `local`.
+ let bbs = basic_blocks.as_mut_preserves_cfg();
+ let bb = &mut bbs[loc.block];
+ let stmt = &mut bb.statements[loc.statement_index];
+ let StatementKind::Assign(box (target, ref mut rvalue)) = stmt.kind else { bug!() };
+ assert_eq!(target.as_local(), Some(local));
+ f(local, rvalue, loc)
+ }
+ }
+ }
+
/// Compute the equivalence classes for locals, based on copy statements.
///
/// The returned vector maps each local to the one it copies. In the following case:
@@ -215,7 +247,7 @@ impl<'tcx> Visitor<'tcx> for SsaVisitor<'_> {
// so we have to remove them too.
PlaceContext::NonMutatingUse(
NonMutatingUseContext::SharedBorrow
- | NonMutatingUseContext::ShallowBorrow
+ | NonMutatingUseContext::FakeBorrow
| NonMutatingUseContext::AddressOf,
)
| PlaceContext::MutatingUse(_) => {
diff --git a/compiler/rustc_mir_transform/src/unreachable_prop.rs b/compiler/rustc_mir_transform/src/unreachable_prop.rs
index bd1724bf8..0b9311a20 100644
--- a/compiler/rustc_mir_transform/src/unreachable_prop.rs
+++ b/compiler/rustc_mir_transform/src/unreachable_prop.rs
@@ -13,7 +13,11 @@ pub struct UnreachablePropagation;
impl MirPass<'_> for UnreachablePropagation {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
// Enable only under -Zmir-opt-level=2 as this can make programs less debuggable.
- sess.mir_opt_level() >= 2
+
+ // FIXME(#116171) Coverage gets confused by MIR passes that can remove all
+ // coverage statements from an instrumented function. This pass can be
+ // re-enabled when coverage codegen is robust against that happening.
+ sess.mir_opt_level() >= 2 && !sess.instrument_coverage()
}
fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {