diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:20:29 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:20:29 +0000 |
commit | 631cd5845e8de329d0e227aaa707d7ea228b8f8f (patch) | |
tree | a1b87c8f8cad01cf18f7c5f57a08f102771ed303 /compiler/rustc_mir_transform | |
parent | Adding debian version 1.69.0+dfsg1-1. (diff) | |
download | rustc-631cd5845e8de329d0e227aaa707d7ea228b8f8f.tar.xz rustc-631cd5845e8de329d0e227aaa707d7ea228b8f8f.zip |
Merging upstream version 1.70.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_mir_transform')
43 files changed, 1383 insertions, 913 deletions
diff --git a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs index 9b4b72070..5aed89139 100644 --- a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs +++ b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs @@ -34,11 +34,6 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls { return; } - // This pass only runs on functions which themselves cannot unwind, - // forcibly changing the body of the function to structurally provide - // this guarantee by aborting on an unwind. If this function can unwind, - // then there's nothing to do because it already should work correctly. - // // Here we test for this function itself whether its ABI allows // unwinding or not. let body_ty = tcx.type_of(def_id).skip_binder(); @@ -74,7 +69,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls { }; layout::fn_can_unwind(tcx, fn_def_id, sig.abi()) } - TerminatorKind::Drop { .. } | TerminatorKind::DropAndReplace { .. } => { + TerminatorKind::Drop { .. } => { tcx.sess.opts.unstable_opts.panic_in_drop == PanicStrategy::Unwind && layout::fn_can_unwind(tcx, None, Abi::Rust) } @@ -107,31 +102,14 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls { } } - // For call instructions which need to be terminated, we insert a - // singular basic block which simply terminates, and then configure the - // `cleanup` attribute for all calls we found to this basic block we - // insert which means that any unwinding that happens in the functions - // will force an abort of the process. - if !calls_to_terminate.is_empty() { - let bb = BasicBlockData { - statements: Vec::new(), - is_cleanup: true, - terminator: Some(Terminator { - source_info: SourceInfo::outermost(body.span), - kind: TerminatorKind::Abort, - }), - }; - let abort_bb = body.basic_blocks_mut().push(bb); - - for bb in calls_to_terminate { - let cleanup = body.basic_blocks_mut()[bb].terminator_mut().unwind_mut().unwrap(); - *cleanup = Some(abort_bb); - } + for id in calls_to_terminate { + let cleanup = body.basic_blocks_mut()[id].terminator_mut().unwind_mut().unwrap(); + *cleanup = UnwindAction::Terminate; } for id in cleanups_to_remove { let cleanup = body.basic_blocks_mut()[id].terminator_mut().unwind_mut().unwrap(); - *cleanup = None; + *cleanup = UnwindAction::Unreachable; } // We may have invalidated some `cleanup` blocks so clean those up now. diff --git a/compiler/rustc_mir_transform/src/add_call_guards.rs b/compiler/rustc_mir_transform/src/add_call_guards.rs index 30966d22e..e1e354efa 100644 --- a/compiler/rustc_mir_transform/src/add_call_guards.rs +++ b/compiler/rustc_mir_transform/src/add_call_guards.rs @@ -50,10 +50,11 @@ impl AddCallGuards { for block in body.basic_blocks_mut() { match block.terminator { Some(Terminator { - kind: TerminatorKind::Call { target: Some(ref mut destination), cleanup, .. }, + kind: TerminatorKind::Call { target: Some(ref mut destination), unwind, .. }, source_info, }) if pred_count[*destination] > 1 - && (cleanup.is_some() || self == &AllCallEdges) => + && (matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate) + || self == &AllCallEdges) => { // It's a critical edge, break it let call_guard = BasicBlockData { diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs index 9b2260f68..896fcd9cd 100644 --- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs +++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs @@ -64,9 +64,6 @@ fn add_moves_for_packed_drops_patch<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { add_move_for_packed_drop(tcx, body, &mut patch, terminator, loc, data.is_cleanup); } - TerminatorKind::DropAndReplace { .. } => { - span_bug!(terminator.source_info.span, "replace in AddMovesForPackedDrops"); - } _ => {} } } diff --git a/compiler/rustc_mir_transform/src/add_retag.rs b/compiler/rustc_mir_transform/src/add_retag.rs index 7d2146214..916f2904d 100644 --- a/compiler/rustc_mir_transform/src/add_retag.rs +++ b/compiler/rustc_mir_transform/src/add_retag.rs @@ -100,7 +100,7 @@ impl<'tcx> MirPass<'tcx> for AddRetag { } // `Drop` is also a call, but it doesn't return anything so we are good. - TerminatorKind::Drop { .. } | TerminatorKind::DropAndReplace { .. } => None, + TerminatorKind::Drop { .. } => None, // Not a block ending in a Call -> ignore. _ => None, } diff --git a/compiler/rustc_mir_transform/src/check_alignment.rs b/compiler/rustc_mir_transform/src/check_alignment.rs new file mode 100644 index 000000000..9311666c9 --- /dev/null +++ b/compiler/rustc_mir_transform/src/check_alignment.rs @@ -0,0 +1,242 @@ +use crate::MirPass; +use rustc_hir::def_id::DefId; +use rustc_hir::lang_items::LangItem; +use rustc_index::vec::IndexVec; +use rustc_middle::mir::*; +use rustc_middle::mir::{ + interpret::{ConstValue, Scalar}, + visit::{PlaceContext, Visitor}, +}; +use rustc_middle::ty::{Ty, TyCtxt, TypeAndMut}; +use rustc_session::Session; + +pub struct CheckAlignment; + +impl<'tcx> MirPass<'tcx> for CheckAlignment { + fn is_enabled(&self, sess: &Session) -> bool { + sess.opts.debug_assertions + } + + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + // This pass emits new panics. If for whatever reason we do not have a panic + // implementation, running this pass may cause otherwise-valid code to not compile. + if tcx.lang_items().get(LangItem::PanicImpl).is_none() { + return; + } + + let basic_blocks = body.basic_blocks.as_mut(); + let local_decls = &mut body.local_decls; + + for block in (0..basic_blocks.len()).rev() { + let block = block.into(); + for statement_index in (0..basic_blocks[block].statements.len()).rev() { + let location = Location { block, statement_index }; + let statement = &basic_blocks[block].statements[statement_index]; + let source_info = statement.source_info; + + let mut finder = PointerFinder { + local_decls, + tcx, + pointers: Vec::new(), + def_id: body.source.def_id(), + }; + for (pointer, pointee_ty) in finder.find_pointers(statement) { + debug!("Inserting alignment check for {:?}", pointer.ty(&*local_decls, tcx).ty); + + let new_block = split_block(basic_blocks, location); + insert_alignment_check( + tcx, + local_decls, + &mut basic_blocks[block], + pointer, + pointee_ty, + source_info, + new_block, + ); + } + } + } + } +} + +impl<'tcx, 'a> PointerFinder<'tcx, 'a> { + fn find_pointers(&mut self, statement: &Statement<'tcx>) -> Vec<(Place<'tcx>, Ty<'tcx>)> { + self.pointers.clear(); + self.visit_statement(statement, Location::START); + core::mem::take(&mut self.pointers) + } +} + +struct PointerFinder<'tcx, 'a> { + local_decls: &'a mut LocalDecls<'tcx>, + tcx: TyCtxt<'tcx>, + def_id: DefId, + pointers: Vec<(Place<'tcx>, Ty<'tcx>)>, +} + +impl<'tcx, 'a> Visitor<'tcx> for PointerFinder<'tcx, 'a> { + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { + if let Rvalue::AddressOf(..) = rvalue { + // Ignore dereferences inside of an AddressOf + return; + } + self.super_rvalue(rvalue, location); + } + + fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) { + if let PlaceContext::NonUse(_) = context { + return; + } + if !place.is_indirect() { + return; + } + + let pointer = Place::from(place.local); + let pointer_ty = pointer.ty(&*self.local_decls, self.tcx).ty; + + // We only want to check unsafe pointers + if !pointer_ty.is_unsafe_ptr() { + trace!("Indirect, but not an unsafe ptr, not checking {:?}", pointer_ty); + return; + } + + let Some(pointee) = pointer_ty.builtin_deref(true) else { + debug!("Indirect but no builtin deref: {:?}", pointer_ty); + return; + }; + let mut pointee_ty = pointee.ty; + if pointee_ty.is_array() || pointee_ty.is_slice() || pointee_ty.is_str() { + pointee_ty = pointee_ty.sequence_element_type(self.tcx); + } + + if !pointee_ty.is_sized(self.tcx, self.tcx.param_env_reveal_all_normalized(self.def_id)) { + debug!("Unsafe pointer, but unsized: {:?}", pointer_ty); + return; + } + + if [self.tcx.types.bool, self.tcx.types.i8, self.tcx.types.u8, self.tcx.types.str_] + .contains(&pointee_ty) + { + debug!("Trivially aligned pointee type: {:?}", pointer_ty); + return; + } + + self.pointers.push((pointer, pointee_ty)) + } +} + +fn split_block( + basic_blocks: &mut IndexVec<BasicBlock, BasicBlockData<'_>>, + location: Location, +) -> BasicBlock { + let block_data = &mut basic_blocks[location.block]; + + // Drain every statement after this one and move the current terminator to a new basic block + let new_block = BasicBlockData { + statements: block_data.statements.split_off(location.statement_index), + terminator: block_data.terminator.take(), + is_cleanup: block_data.is_cleanup, + }; + + basic_blocks.push(new_block) +} + +fn insert_alignment_check<'tcx>( + tcx: TyCtxt<'tcx>, + local_decls: &mut IndexVec<Local, LocalDecl<'tcx>>, + block_data: &mut BasicBlockData<'tcx>, + pointer: Place<'tcx>, + pointee_ty: Ty<'tcx>, + source_info: SourceInfo, + new_block: BasicBlock, +) { + // Cast the pointer to a *const () + let const_raw_ptr = tcx.mk_ptr(TypeAndMut { ty: tcx.types.unit, mutbl: Mutability::Not }); + let rvalue = Rvalue::Cast(CastKind::PtrToPtr, Operand::Copy(pointer), const_raw_ptr); + let thin_ptr = local_decls.push(LocalDecl::with_source_info(const_raw_ptr, source_info)).into(); + block_data + .statements + .push(Statement { source_info, kind: StatementKind::Assign(Box::new((thin_ptr, rvalue))) }); + + // Transmute the pointer to a usize (equivalent to `ptr.addr()`) + let rvalue = Rvalue::Cast(CastKind::Transmute, Operand::Copy(thin_ptr), tcx.types.usize); + let addr = local_decls.push(LocalDecl::with_source_info(tcx.types.usize, source_info)).into(); + block_data + .statements + .push(Statement { source_info, kind: StatementKind::Assign(Box::new((addr, rvalue))) }); + + // Get the alignment of the pointee + let alignment = + local_decls.push(LocalDecl::with_source_info(tcx.types.usize, source_info)).into(); + let rvalue = Rvalue::NullaryOp(NullOp::AlignOf, pointee_ty); + block_data.statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new((alignment, rvalue))), + }); + + // Subtract 1 from the alignment to get the alignment mask + let alignment_mask = + local_decls.push(LocalDecl::with_source_info(tcx.types.usize, source_info)).into(); + let one = Operand::Constant(Box::new(Constant { + span: source_info.span, + user_ty: None, + literal: ConstantKind::Val( + ConstValue::Scalar(Scalar::from_target_usize(1, &tcx)), + tcx.types.usize, + ), + })); + block_data.statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + alignment_mask, + Rvalue::BinaryOp(BinOp::Sub, Box::new((Operand::Copy(alignment), one))), + ))), + }); + + // BitAnd the alignment mask with the pointer + let alignment_bits = + local_decls.push(LocalDecl::with_source_info(tcx.types.usize, source_info)).into(); + block_data.statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + alignment_bits, + Rvalue::BinaryOp( + BinOp::BitAnd, + Box::new((Operand::Copy(addr), Operand::Copy(alignment_mask))), + ), + ))), + }); + + // Check if the alignment bits are all zero + let is_ok = local_decls.push(LocalDecl::with_source_info(tcx.types.bool, source_info)).into(); + let zero = Operand::Constant(Box::new(Constant { + span: source_info.span, + user_ty: None, + literal: ConstantKind::Val( + ConstValue::Scalar(Scalar::from_target_usize(0, &tcx)), + tcx.types.usize, + ), + })); + block_data.statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + is_ok, + Rvalue::BinaryOp(BinOp::Eq, Box::new((Operand::Copy(alignment_bits), zero.clone()))), + ))), + }); + + // Set this block's terminator to our assert, continuing to new_block if we pass + block_data.terminator = Some(Terminator { + source_info, + kind: TerminatorKind::Assert { + cond: Operand::Copy(is_ok), + expected: true, + target: new_block, + msg: AssertKind::MisalignedPointerDereference { + required: Operand::Copy(alignment), + found: Operand::Copy(addr), + }, + unwind: UnwindAction::Terminate, + }, + }); +} diff --git a/compiler/rustc_mir_transform/src/check_const_item_mutation.rs b/compiler/rustc_mir_transform/src/check_const_item_mutation.rs index 536745d2c..3d32c5865 100644 --- a/compiler/rustc_mir_transform/src/check_const_item_mutation.rs +++ b/compiler/rustc_mir_transform/src/check_const_item_mutation.rs @@ -24,7 +24,7 @@ struct ConstMutationChecker<'a, 'tcx> { impl<'tcx> ConstMutationChecker<'_, 'tcx> { fn is_const_item(&self, local: Local) -> Option<DefId> { - if let Some(box LocalInfo::ConstRef { def_id }) = self.body.local_decls[local].local_info { + if let LocalInfo::ConstRef { def_id } = *self.body.local_decls[local].local_info() { Some(def_id) } else { None diff --git a/compiler/rustc_mir_transform/src/check_unsafety.rs b/compiler/rustc_mir_transform/src/check_unsafety.rs index d00ee1f4b..d908f6b3a 100644 --- a/compiler/rustc_mir_transform/src/check_unsafety.rs +++ b/compiler/rustc_mir_transform/src/check_unsafety.rs @@ -1,4 +1,4 @@ -use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::unord::{UnordItems, UnordSet}; use rustc_errors::struct_span_err; use rustc_hir as hir; use rustc_hir::def::DefKind; @@ -24,7 +24,7 @@ pub struct UnsafetyChecker<'a, 'tcx> { param_env: ty::ParamEnv<'tcx>, /// Used `unsafe` blocks in this function. This is used for the "unused_unsafe" lint. - used_unsafe_blocks: FxHashSet<HirId>, + used_unsafe_blocks: UnordSet<HirId>, } impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> { @@ -55,10 +55,9 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { | TerminatorKind::Drop { .. } | TerminatorKind::Yield { .. } | TerminatorKind::Assert { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::Resume - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::FalseEdge { .. } @@ -101,13 +100,16 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { | StatementKind::StorageLive(..) | StatementKind::StorageDead(..) | StatementKind::Retag { .. } - | StatementKind::AscribeUserType(..) + | StatementKind::PlaceMention(..) | StatementKind::Coverage(..) | StatementKind::Intrinsic(..) | StatementKind::ConstEvalCounter | StatementKind::Nop => { // safe (at least as emitted during MIR construction) } + // `AscribeUserType` just exists to help MIR borrowck. + // It has no semantics, and everything is already reported by `PlaceMention`. + StatementKind::AscribeUserType(..) => return, } self.super_statement(statement, location); } @@ -129,7 +131,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { let def_id = def_id.expect_local(); let UnsafetyCheckResult { violations, used_unsafe_blocks, .. } = self.tcx.unsafety_check_result(def_id); - self.register_violations(violations, used_unsafe_blocks.iter().copied()); + self.register_violations(violations, used_unsafe_blocks.items().copied()); } }, _ => {} @@ -151,7 +153,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { let local_def_id = def_id.expect_local(); let UnsafetyCheckResult { violations, used_unsafe_blocks, .. } = self.tcx.unsafety_check_result(local_def_id); - self.register_violations(violations, used_unsafe_blocks.iter().copied()); + self.register_violations(violations, used_unsafe_blocks.items().copied()); } } } @@ -180,7 +182,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> { // If the projection root is an artificial local that we introduced when // desugaring `static`, give a more specific error message // (avoid the general "raw pointer" clause below, that would only be confusing). - if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info { + if let LocalInfo::StaticRef { def_id, .. } = *decl.local_info() { if self.tcx.is_mutable_static(def_id) { self.require_unsafe( UnsafetyViolationKind::General, @@ -268,14 +270,14 @@ impl<'tcx> UnsafetyChecker<'_, 'tcx> { .lint_root; self.register_violations( [&UnsafetyViolation { source_info, lint_root, kind, details }], - [], + UnordItems::empty(), ); } fn register_violations<'a>( &mut self, violations: impl IntoIterator<Item = &'a UnsafetyViolation>, - new_used_unsafe_blocks: impl IntoIterator<Item = HirId>, + new_used_unsafe_blocks: UnordItems<HirId, impl Iterator<Item = HirId>>, ) { let safety = self.body.source_scopes[self.source_info.scope] .local_data @@ -308,9 +310,7 @@ impl<'tcx> UnsafetyChecker<'_, 'tcx> { }), }; - new_used_unsafe_blocks.into_iter().for_each(|hir_id| { - self.used_unsafe_blocks.insert(hir_id); - }); + self.used_unsafe_blocks.extend_unord(new_used_unsafe_blocks); } fn check_mut_borrowing_layout_constrained_field( &mut self, @@ -407,7 +407,7 @@ enum Context { struct UnusedUnsafeVisitor<'a, 'tcx> { tcx: TyCtxt<'tcx>, - used_unsafe_blocks: &'a FxHashSet<HirId>, + used_unsafe_blocks: &'a UnordSet<HirId>, context: Context, unused_unsafes: &'a mut Vec<(HirId, UnusedUnsafe)>, } @@ -458,7 +458,7 @@ impl<'tcx> intravisit::Visitor<'tcx> for UnusedUnsafeVisitor<'_, 'tcx> { fn check_unused_unsafe( tcx: TyCtxt<'_>, def_id: LocalDefId, - used_unsafe_blocks: &FxHashSet<HirId>, + used_unsafe_blocks: &UnordSet<HirId>, ) -> Vec<(HirId, UnusedUnsafe)> { let body_id = tcx.hir().maybe_body_owned_by(def_id); @@ -505,7 +505,7 @@ fn unsafety_check_result( if body.is_custom_mir() { return tcx.arena.alloc(UnsafetyCheckResult { violations: Vec::new(), - used_unsafe_blocks: FxHashSet::default(), + used_unsafe_blocks: Default::default(), unused_unsafes: Some(Vec::new()), }); } diff --git a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs index d435d3ee6..0923824db 100644 --- a/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs +++ b/compiler/rustc_mir_transform/src/cleanup_post_borrowck.rs @@ -24,6 +24,7 @@ impl<'tcx> MirPass<'tcx> for CleanupPostBorrowck { for statement in basic_block.statements.iter_mut() { match statement.kind { StatementKind::AscribeUserType(..) + | StatementKind::PlaceMention(..) | StatementKind::Assign(box (_, Rvalue::Ref(_, BorrowKind::Shallow, _))) | StatementKind::FakeRead(..) => statement.make_nop(), _ => (), diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index 6b2eefce2..1bb45341e 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -1,15 +1,13 @@ //! Propagates constants for early reporting of statically known //! assertion failures -use std::cell::Cell; - use either::Right; use rustc_const_eval::const_eval::CheckAlignment; use rustc_data_structures::fx::FxHashSet; use rustc_hir::def::DefKind; use rustc_index::bit_set::BitSet; -use rustc_index::vec::IndexVec; +use rustc_index::vec::{IndexSlice, IndexVec}; use rustc_middle::mir::visit::{ MutVisitor, MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor, }; @@ -17,7 +15,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::InternalSubsts; use rustc_middle::ty::{self, ConstKind, Instance, ParamEnv, Ty, TyCtxt, TypeVisitableExt}; -use rustc_span::{def_id::DefId, Span}; +use rustc_span::{def_id::DefId, Span, DUMMY_SP}; use rustc_target::abi::{self, Align, HasDataLayout, Size, TargetDataLayout}; use rustc_target::spec::abi::Abi as CallAbi; use rustc_trait_selection::traits; @@ -25,8 +23,8 @@ use rustc_trait_selection::traits; use crate::MirPass; use rustc_const_eval::interpret::{ self, compile_time_machine, AllocId, ConstAllocation, ConstValue, CtfeValidationMode, Frame, - ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, PlaceTy, - Pointer, Scalar, StackPopCleanup, StackPopUnwind, + ImmTy, Immediate, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, + Scalar, StackPopCleanup, }; /// The maximum number of bytes that we'll allocate space for a local or the return value. @@ -56,7 +54,7 @@ pub struct ConstProp; impl<'tcx> MirPass<'tcx> for ConstProp { fn is_enabled(&self, sess: &rustc_session::Session) -> bool { - sess.mir_opt_level() >= 1 + sess.mir_opt_level() >= 2 } #[instrument(skip(self, tcx), level = "debug")] @@ -117,10 +115,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp { .predicates .iter() .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None }); - if traits::impossible_predicates( - tcx, - traits::elaborate_predicates(tcx, predicates).map(|o| o.predicate).collect(), - ) { + if traits::impossible_predicates(tcx, traits::elaborate(tcx, predicates).collect()) { trace!("ConstProp skipped for {:?}: found unsatisfiable predicates", def_id); return; } @@ -129,7 +124,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp { let dummy_body = &Body::new( body.source, - (*body.basic_blocks).clone(), + (*body.basic_blocks).to_owned(), body.source_scopes.clone(), body.local_decls.clone(), Default::default(), @@ -154,22 +149,15 @@ impl<'tcx> MirPass<'tcx> for ConstProp { pub struct ConstPropMachine<'mir, 'tcx> { /// The virtual call stack. stack: Vec<Frame<'mir, 'tcx>>, - /// `OnlyInsideOwnBlock` locals that were written in the current block get erased at the end. pub written_only_inside_own_block_locals: FxHashSet<Local>, - /// Locals that need to be cleared after every block terminates. - pub only_propagate_inside_block_locals: BitSet<Local>, pub can_const_prop: IndexVec<Local, ConstPropMode>, } impl ConstPropMachine<'_, '_> { - pub fn new( - only_propagate_inside_block_locals: BitSet<Local>, - can_const_prop: IndexVec<Local, ConstPropMode>, - ) -> Self { + pub fn new(can_const_prop: IndexVec<Local, ConstPropMode>) -> Self { Self { stack: Vec::new(), written_only_inside_own_block_locals: Default::default(), - only_propagate_inside_block_locals, can_const_prop, } } @@ -189,7 +177,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> } #[inline(always)] - fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>, _layout: TyAndLayout<'tcx>) -> bool { false // for now, we don't enforce validity } fn alignment_check_failed( @@ -218,7 +206,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> _args: &[OpTy<'tcx>], _destination: &PlaceTy<'tcx>, _target: Option<BasicBlock>, - _unwind: StackPopUnwind, + _unwind: UnwindAction, ) -> InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> { Ok(None) } @@ -229,7 +217,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> _args: &[OpTy<'tcx>], _destination: &PlaceTy<'tcx>, _target: Option<BasicBlock>, - _unwind: StackPopUnwind, + _unwind: UnwindAction, ) -> InterpResult<'tcx> { throw_machine_stop_str!("calling intrinsics isn't supported in ConstProp") } @@ -237,7 +225,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> fn assert_panic( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _msg: &rustc_middle::mir::AssertMessage<'tcx>, - _unwind: Option<rustc_middle::mir::BasicBlock>, + _unwind: rustc_middle::mir::UnwindAction, ) -> InterpResult<'tcx> { bug!("panics terminators are not evaluated in ConstProp") } @@ -257,16 +245,17 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> frame: usize, local: Local, ) -> InterpResult<'tcx, &'a mut interpret::Operand<Self::Provenance>> { - if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation { - throw_machine_stop_str!("tried to write to a local that is marked as not propagatable") - } - if frame == 0 && ecx.machine.only_propagate_inside_block_locals.contains(local) { - trace!( - "mutating local {:?} which is restricted to its block. \ - Will remove it from const-prop after block is finished.", - local - ); - ecx.machine.written_only_inside_own_block_locals.insert(local); + assert_eq!(frame, 0); + match ecx.machine.can_const_prop[local] { + ConstPropMode::NoPropagation => { + throw_machine_stop_str!( + "tried to write to a local that is marked as not propagatable" + ) + } + ConstPropMode::OnlyInsideOwnBlock => { + ecx.machine.written_only_inside_own_block_locals.insert(local); + } + ConstPropMode::FullConstProp => {} } ecx.machine.stack[frame].locals[local].access_mut() } @@ -327,10 +316,7 @@ struct ConstPropagator<'mir, 'tcx> { ecx: InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, - local_decls: &'mir IndexVec<Local, LocalDecl<'tcx>>, - // Because we have `MutVisitor` we can't obtain the `SourceInfo` from a `Location`. So we store - // the last known `SourceInfo` here and just keep revisiting it. - source_info: Option<SourceInfo>, + local_decls: &'mir IndexSlice<Local, LocalDecl<'tcx>>, } impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> { @@ -374,17 +360,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let param_env = tcx.param_env_reveal_all_normalized(def_id); let can_const_prop = CanConstProp::check(tcx, param_env, body); - let mut only_propagate_inside_block_locals = BitSet::new_empty(can_const_prop.len()); - for (l, mode) in can_const_prop.iter_enumerated() { - if *mode == ConstPropMode::OnlyInsideOwnBlock { - only_propagate_inside_block_locals.insert(l); - } - } let mut ecx = InterpCx::new( tcx, tcx.def_span(def_id), param_env, - ConstPropMachine::new(only_propagate_inside_block_locals, can_const_prop), + ConstPropMachine::new(can_const_prop), ); let ret_layout = ecx @@ -411,13 +391,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { ) .expect("failed to push initial stack frame"); - ConstPropagator { - ecx, - tcx, - param_env, - local_decls: &dummy_body.local_decls, - source_info: None, - } + ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls } } fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> { @@ -446,10 +420,9 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { /// Remove `local` from the pool of `Locals`. Allows writing to them, /// but not reading from them anymore. fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { - ecx.frame_mut().locals[local] = LocalState { - value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)), - layout: Cell::new(None), - }; + ecx.frame_mut().locals[local].value = + LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)); + ecx.machine.written_only_inside_own_block_locals.remove(&local); } /// Returns the value, if any, of evaluating `c`. @@ -492,11 +465,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { scalar, )) = *value { - *operand = self.operand_from_scalar( - scalar, - value.layout.ty, - self.source_info.unwrap().span, - ); + *operand = self.operand_from_scalar(scalar, value.layout.ty); } } } @@ -504,7 +473,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { } } - fn const_prop(&mut self, rvalue: &Rvalue<'tcx>, place: Place<'tcx>) -> Option<()> { + fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>) -> Option<()> { // Perform any special handling for specific Rvalue types. // Generally, checks here fall into one of two categories: // 1. Additional checking to provide useful lints to the user @@ -532,6 +501,15 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { return None; } + // Do not try creating references, nor any types with potentially-complex + // invariants. This avoids an issue where checking validity would do a + // bunch of work generating a nice message about the invariant violation, + // only to not show it to anyone (since this isn't the lint). + Rvalue::Cast(CastKind::Transmute, op, dst_ty) if !dst_ty.is_primitive() => { + trace!("skipping Transmute of {:?} to {:?}", op, dst_ty); + + return None; + } // There's no other checking to do at this time. Rvalue::Aggregate(..) @@ -561,7 +539,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { return None; } - self.eval_rvalue_with_identities(rvalue, place) + Some(()) } // Attempt to use algebraic identities to eliminate constant expressions @@ -621,20 +599,24 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { } /// Creates a new `Operand::Constant` from a `Scalar` value - fn operand_from_scalar(&self, scalar: Scalar, ty: Ty<'tcx>, span: Span) -> Operand<'tcx> { + fn operand_from_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> Operand<'tcx> { Operand::Constant(Box::new(Constant { - span, + span: DUMMY_SP, user_ty: None, literal: ConstantKind::from_scalar(self.tcx, scalar, ty), })) } - fn replace_with_const( - &mut self, - rval: &mut Rvalue<'tcx>, - value: &OpTy<'tcx>, - source_info: SourceInfo, - ) { + fn replace_with_const(&mut self, place: Place<'tcx>, rval: &mut Rvalue<'tcx>) { + // This will return None if the above `const_prop` invocation only "wrote" a + // type whose creation requires no write. E.g. a generator whose initial state + // consists solely of uninitialized memory (so it doesn't capture any locals). + let Some(ref value) = self.get_const(place) else { return }; + if !self.should_const_prop(value) { + return; + } + trace!("replacing {:?}={:?} with {:?}", place, rval, value); + if let Rvalue::Use(Operand::Constant(c)) = rval { match c.literal { ConstantKind::Ty(c) if matches!(c.kind(), ConstKind::Unevaluated(..)) => {} @@ -664,11 +646,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { if let Some(Right(imm)) = imm { match *imm { interpret::Immediate::Scalar(scalar) => { - *rval = Rvalue::Use(self.operand_from_scalar( - scalar, - value.layout.ty, - source_info.span, - )); + *rval = Rvalue::Use(self.operand_from_scalar(scalar, value.layout.ty)); } Immediate::ScalarPair(..) => { // Found a value represented as a pair. For now only do const-prop if the type @@ -701,7 +679,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let const_val = ConstValue::ByRef { alloc, offset: Size::ZERO }; let literal = ConstantKind::Val(const_val, ty); *rval = Rvalue::Use(Operand::Constant(Box::new(Constant { - span: source_info.span, + span: DUMMY_SP, user_ty: None, literal, }))); @@ -730,6 +708,19 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { _ => false, } } + + fn ensure_not_propagated(&self, local: Local) { + if cfg!(debug_assertions) { + assert!( + self.get_const(local.into()).is_none() + || self + .layout_of(self.local_decls[local].ty) + .map_or(true, |layout| layout.is_zst()), + "failed to remove values for `{local:?}`, value={:?}", + self.get_const(local.into()), + ) + } + } } /// The mode that `ConstProp` is allowed to run in for a given `Local`. @@ -739,8 +730,6 @@ pub enum ConstPropMode { FullConstProp, /// The `Local` can only be propagated into and from its own block. OnlyInsideOwnBlock, - /// The `Local` can be propagated into but reads cannot be propagated. - OnlyPropagateInto, /// The `Local` cannot be part of propagation at all. Any statement /// referencing it either for reading or writing will not get propagated. NoPropagation, @@ -750,8 +739,6 @@ pub struct CanConstProp { can_const_prop: IndexVec<Local, ConstPropMode>, // False at the beginning. Once set, no more assignments are allowed to that local. found_assignment: BitSet<Local>, - // Cache of locals' information - local_kinds: IndexVec<Local, LocalKind>, } impl CanConstProp { @@ -764,10 +751,6 @@ impl CanConstProp { let mut cpv = CanConstProp { can_const_prop: IndexVec::from_elem(ConstPropMode::FullConstProp, &body.local_decls), found_assignment: BitSet::new_empty(body.local_decls.len()), - local_kinds: IndexVec::from_fn_n( - |local| body.local_kind(local), - body.local_decls.len(), - ), }; for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { let ty = body.local_decls[local].ty; @@ -780,24 +763,10 @@ impl CanConstProp { continue; } } - // Cannot use args at all - // Cannot use locals because if x < y { y - x } else { x - y } would - // lint for x != y - // FIXME(oli-obk): lint variables until they are used in a condition - // FIXME(oli-obk): lint if return value is constant - if cpv.local_kinds[local] == LocalKind::Arg { - *val = ConstPropMode::OnlyPropagateInto; - trace!( - "local {:?} can't be const propagated because it's a function argument", - local - ); - } else if cpv.local_kinds[local] == LocalKind::Var { - *val = ConstPropMode::OnlyInsideOwnBlock; - trace!( - "local {:?} will only be propagated inside its block, because it's a user variable", - local - ); - } + } + // Consider that arguments are assigned on entry. + for arg in body.args_iter() { + cpv.found_assignment.insert(arg); } cpv.visit_body(&body); cpv.can_const_prop @@ -827,7 +796,6 @@ impl Visitor<'_> for CanConstProp { // states as applicable. ConstPropMode::OnlyInsideOwnBlock => {} ConstPropMode::NoPropagation => {} - ConstPropMode::OnlyPropagateInto => {} other @ ConstPropMode::FullConstProp => { trace!( "local {:?} can't be propagated because of multiple assignments. Previous state: {:?}", @@ -854,7 +822,6 @@ impl Visitor<'_> for CanConstProp { // mutation. | NonMutatingUse(NonMutatingUseContext::SharedBorrow) | NonMutatingUse(NonMutatingUseContext::ShallowBorrow) - | NonMutatingUse(NonMutatingUseContext::UniqueBorrow) | NonMutatingUse(NonMutatingUseContext::AddressOf) | MutatingUse(MutatingUseContext::Borrow) | MutatingUse(MutatingUseContext::AddressOf) => { @@ -886,48 +853,23 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { } } - fn visit_constant(&mut self, constant: &mut Constant<'tcx>, location: Location) { - trace!("visit_constant: {:?}", constant); - self.super_constant(constant, location); - self.eval_constant(constant); - } + fn visit_assign( + &mut self, + place: &mut Place<'tcx>, + rvalue: &mut Rvalue<'tcx>, + location: Location, + ) { + self.super_assign(place, rvalue, location); - fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) { - trace!("visit_statement: {:?}", statement); - let source_info = statement.source_info; - self.source_info = Some(source_info); - match statement.kind { - StatementKind::Assign(box (place, ref mut rval)) => { - let can_const_prop = self.ecx.machine.can_const_prop[place.local]; - if let Some(()) = self.const_prop(rval, place) { - // This will return None if the above `const_prop` invocation only "wrote" a - // type whose creation requires no write. E.g. a generator whose initial state - // consists solely of uninitialized memory (so it doesn't capture any locals). - if let Some(ref value) = self.get_const(place) && self.should_const_prop(value) { - trace!("replacing {:?} with {:?}", rval, value); - self.replace_with_const(rval, value, source_info); - if can_const_prop == ConstPropMode::FullConstProp - || can_const_prop == ConstPropMode::OnlyInsideOwnBlock - { - trace!("propagated into {:?}", place); - } - } - match can_const_prop { - ConstPropMode::OnlyInsideOwnBlock => { - trace!( - "found local restricted to its block. \ - Will remove it from const-prop after block is finished. Local: {:?}", - place.local - ); - } - ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => { - trace!("can't propagate into {:?}", place); - if place.local != RETURN_PLACE { - Self::remove_const(&mut self.ecx, place.local); - } - } - ConstPropMode::FullConstProp => {} - } + let Some(()) = self.check_rvalue(rvalue) else { return }; + + match self.ecx.machine.can_const_prop[place.local] { + // Do nothing if the place is indirect. + _ if place.is_indirect() => {} + ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local), + ConstPropMode::OnlyInsideOwnBlock | ConstPropMode::FullConstProp => { + if let Some(()) = self.eval_rvalue_with_identities(rvalue, *place) { + self.replace_with_const(*place, rvalue); } else { // Const prop failed, so erase the destination, ensuring that whatever happens // from here on, does not know about the previous value. @@ -947,8 +889,22 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { Self::remove_const(&mut self.ecx, place.local); } } + } + } + + fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) { + trace!("visit_statement: {:?}", statement); + + // We want to evaluate operands before any change to the assigned-to value, + // so we recurse first. + self.super_statement(statement, location); + + match statement.kind { StatementKind::SetDiscriminant { ref place, .. } => { match self.ecx.machine.can_const_prop[place.local] { + // Do nothing if the place is indirect. + _ if place.is_indirect() => {} + ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local), ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => { if self.ecx.statement(statement).is_ok() { trace!("propped discriminant into {:?}", place); @@ -956,28 +912,22 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { Self::remove_const(&mut self.ecx, place.local); } } - ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => { - Self::remove_const(&mut self.ecx, place.local); - } } } - StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { + StatementKind::StorageLive(local) => { let frame = self.ecx.frame_mut(); - frame.locals[local].value = if let StatementKind::StorageLive(_) = statement.kind { - LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)) - } else { - LocalValue::Dead - }; + frame.locals[local].value = + LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)); + } + StatementKind::StorageDead(local) => { + let frame = self.ecx.frame_mut(); + frame.locals[local].value = LocalValue::Dead; } _ => {} } - - self.super_statement(statement, location); } fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) { - let source_info = terminator.source_info; - self.source_info = Some(source_info); self.super_terminator(terminator, location); match &mut terminator.kind { @@ -987,11 +937,7 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { && self.should_const_prop(value) { trace!("assertion on {:?} should be {:?}", value, expected); - *cond = self.operand_from_scalar( - value_const, - self.tcx.types.bool, - source_info.span, - ); + *cond = self.operand_from_scalar(value_const, self.tcx.types.bool); } } TerminatorKind::SwitchInt { ref mut discr, .. } => { @@ -1003,11 +949,10 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { // None of these have Operands to const-propagate. TerminatorKind::Goto { .. } | TerminatorKind::Resume - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::FalseEdge { .. } @@ -1027,22 +972,30 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { // We remove all Locals which are restricted in propagation to their containing blocks and // which were modified in the current block. // Take it out of the ecx so we can get a mutable reference to the ecx for `remove_const`. - let mut locals = std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals); - for &local in locals.iter() { + let mut written_only_inside_own_block_locals = + std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals); + + // This loop can get very hot for some bodies: it check each local in each bb. + // To avoid this quadratic behaviour, we only clear the locals that were modified inside + // the current block. + for local in written_only_inside_own_block_locals.drain() { + debug_assert_eq!( + self.ecx.machine.can_const_prop[local], + ConstPropMode::OnlyInsideOwnBlock + ); Self::remove_const(&mut self.ecx, local); } - locals.clear(); - // Put it back so we reuse the heap of the storage - self.ecx.machine.written_only_inside_own_block_locals = locals; + self.ecx.machine.written_only_inside_own_block_locals = + written_only_inside_own_block_locals; + if cfg!(debug_assertions) { - // Ensure we are correctly erasing locals with the non-debug-assert logic. - for local in self.ecx.machine.only_propagate_inside_block_locals.iter() { - assert!( - self.get_const(local.into()).is_none() - || self - .layout_of(self.local_decls[local].ty) - .map_or(true, |layout| layout.is_zst()) - ) + for (local, &mode) in self.ecx.machine.can_const_prop.iter_enumerated() { + match mode { + ConstPropMode::FullConstProp => {} + ConstPropMode::NoPropagation | ConstPropMode::OnlyInsideOwnBlock => { + self.ensure_not_propagated(local); + } + } } } } diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs index fd9475748..699fe4489 100644 --- a/compiler/rustc_mir_transform/src/const_prop_lint.rs +++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs @@ -1,24 +1,17 @@ //! Propagates constants for early reporting of statically known //! assertion failures -use std::cell::Cell; - -use either::{Left, Right}; +use either::Left; use rustc_const_eval::interpret::Immediate; use rustc_const_eval::interpret::{ - self, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, Scalar, StackPopCleanup, + self, InterpCx, InterpResult, LocalValue, MemoryKind, OpTy, Scalar, StackPopCleanup, }; use rustc_hir::def::DefKind; use rustc_hir::HirId; use rustc_index::bit_set::BitSet; -use rustc_index::vec::IndexVec; use rustc_middle::mir::visit::Visitor; -use rustc_middle::mir::{ - AssertKind, BinOp, Body, Constant, Local, LocalDecl, Location, Operand, Place, Rvalue, - SourceInfo, SourceScope, SourceScopeData, Statement, StatementKind, Terminator, TerminatorKind, - UnOp, RETURN_PLACE, -}; +use rustc_middle::mir::*; use rustc_middle::ty::layout::{LayoutError, LayoutOf, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::InternalSubsts; use rustc_middle::ty::{ @@ -98,10 +91,7 @@ impl<'tcx> MirLint<'tcx> for ConstProp { .predicates .iter() .filter_map(|(p, _)| if p.is_global() { Some(*p) } else { None }); - if traits::impossible_predicates( - tcx, - traits::elaborate_predicates(tcx, predicates).map(|o| o.predicate).collect(), - ) { + if traits::impossible_predicates(tcx, traits::elaborate(tcx, predicates).collect()) { trace!("ConstProp skipped for {:?}: found unsatisfiable predicates", def_id); return; } @@ -110,7 +100,7 @@ impl<'tcx> MirLint<'tcx> for ConstProp { let dummy_body = &Body::new( body.source, - (*body.basic_blocks).clone(), + (*body.basic_blocks).to_owned(), body.source_scopes.clone(), body.local_decls.clone(), Default::default(), @@ -137,11 +127,8 @@ struct ConstPropagator<'mir, 'tcx> { ecx: InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, - source_scopes: &'mir IndexVec<SourceScope, SourceScopeData<'tcx>>, - local_decls: &'mir IndexVec<Local, LocalDecl<'tcx>>, - // Because we have `MutVisitor` we can't obtain the `SourceInfo` from a `Location`. So we store - // the last known `SourceInfo` here and just keep revisiting it. - source_info: Option<SourceInfo>, + worklist: Vec<BasicBlock>, + visited_blocks: BitSet<BasicBlock>, } impl<'tcx> LayoutOfHelpers<'tcx> for ConstPropagator<'_, 'tcx> { @@ -185,17 +172,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { let param_env = tcx.param_env_reveal_all_normalized(def_id); let can_const_prop = CanConstProp::check(tcx, param_env, body); - let mut only_propagate_inside_block_locals = BitSet::new_empty(can_const_prop.len()); - for (l, mode) in can_const_prop.iter_enumerated() { - if *mode == ConstPropMode::OnlyInsideOwnBlock { - only_propagate_inside_block_locals.insert(l); - } - } let mut ecx = InterpCx::new( tcx, tcx.def_span(def_id), param_env, - ConstPropMachine::new(only_propagate_inside_block_locals, can_const_prop), + ConstPropMachine::new(can_const_prop), ); let ret_layout = ecx @@ -226,12 +207,19 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { ecx, tcx, param_env, - source_scopes: &dummy_body.source_scopes, - local_decls: &dummy_body.local_decls, - source_info: None, + worklist: vec![START_BLOCK], + visited_blocks: BitSet::new_empty(body.basic_blocks.len()), } } + fn body(&self) -> &'mir Body<'tcx> { + self.ecx.frame().body + } + + fn local_decls(&self) -> &'mir LocalDecls<'tcx> { + &self.body().local_decls + } + fn get_const(&self, place: Place<'tcx>) -> Option<OpTy<'tcx>> { let op = match self.ecx.eval_place_to_op(place, None) { Ok(op) => { @@ -258,22 +246,21 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { /// Remove `local` from the pool of `Locals`. Allows writing to them, /// but not reading from them anymore. fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { - ecx.frame_mut().locals[local] = LocalState { - value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)), - layout: Cell::new(None), - }; + ecx.frame_mut().locals[local].value = + LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)); + ecx.machine.written_only_inside_own_block_locals.remove(&local); } fn lint_root(&self, source_info: SourceInfo) -> Option<HirId> { - source_info.scope.lint_root(self.source_scopes) + source_info.scope.lint_root(&self.body().source_scopes) } - fn use_ecx<F, T>(&mut self, source_info: SourceInfo, f: F) -> Option<T> + fn use_ecx<F, T>(&mut self, location: Location, f: F) -> Option<T> where F: FnOnce(&mut Self) -> InterpResult<'tcx, T>, { // Overwrite the PC -- whatever the interpreter does to it does not make any sense anyway. - self.ecx.frame_mut().loc = Right(source_info.span); + self.ecx.frame_mut().loc = Left(location); match f(self) { Ok(val) => Some(val), Err(error) => { @@ -292,7 +279,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { } /// Returns the value, if any, of evaluating `c`. - fn eval_constant(&mut self, c: &Constant<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> { + fn eval_constant(&mut self, c: &Constant<'tcx>, location: Location) -> Option<OpTy<'tcx>> { // FIXME we need to revisit this for #67176 if c.needs_subst() { return None; @@ -306,45 +293,41 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { // manually normalized. let val = self.tcx.try_normalize_erasing_regions(self.param_env, c.literal).ok()?; - self.use_ecx(source_info, |this| this.ecx.eval_mir_constant(&val, Some(c.span), None)) + self.use_ecx(location, |this| this.ecx.eval_mir_constant(&val, Some(c.span), None)) } /// Returns the value, if any, of evaluating `place`. - fn eval_place(&mut self, place: Place<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> { + fn eval_place(&mut self, place: Place<'tcx>, location: Location) -> Option<OpTy<'tcx>> { trace!("eval_place(place={:?})", place); - self.use_ecx(source_info, |this| this.ecx.eval_place_to_op(place, None)) + self.use_ecx(location, |this| this.ecx.eval_place_to_op(place, None)) } /// Returns the value, if any, of evaluating `op`. Calls upon `eval_constant` /// or `eval_place`, depending on the variant of `Operand` used. - fn eval_operand(&mut self, op: &Operand<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> { + fn eval_operand(&mut self, op: &Operand<'tcx>, location: Location) -> Option<OpTy<'tcx>> { match *op { - Operand::Constant(ref c) => self.eval_constant(c, source_info), - Operand::Move(place) | Operand::Copy(place) => self.eval_place(place, source_info), + Operand::Constant(ref c) => self.eval_constant(c, location), + Operand::Move(place) | Operand::Copy(place) => self.eval_place(place, location), } } fn report_assert_as_lint( &self, lint: &'static lint::Lint, - source_info: SourceInfo, + location: Location, message: &'static str, panic: AssertKind<impl std::fmt::Debug>, ) { - if let Some(lint_root) = self.lint_root(source_info) { + let source_info = self.body().source_info(location); + if let Some(lint_root) = self.lint_root(*source_info) { self.tcx.struct_span_lint_hir(lint, lint_root, source_info.span, message, |lint| { lint.span_label(source_info.span, format!("{:?}", panic)) }); } } - fn check_unary_op( - &mut self, - op: UnOp, - arg: &Operand<'tcx>, - source_info: SourceInfo, - ) -> Option<()> { - if let (val, true) = self.use_ecx(source_info, |this| { + fn check_unary_op(&mut self, op: UnOp, arg: &Operand<'tcx>, location: Location) -> Option<()> { + if let (val, true) = self.use_ecx(location, |this| { let val = this.ecx.read_immediate(&this.ecx.eval_operand(arg, None)?)?; let (_res, overflow, _ty) = this.ecx.overflowing_unary_op(op, &val)?; Ok((val, overflow)) @@ -354,7 +337,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { assert_eq!(op, UnOp::Neg, "Neg is the only UnOp that can overflow"); self.report_assert_as_lint( lint::builtin::ARITHMETIC_OVERFLOW, - source_info, + location, "this arithmetic operation will overflow", AssertKind::OverflowNeg(val.to_const_int()), ); @@ -369,28 +352,27 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { op: BinOp, left: &Operand<'tcx>, right: &Operand<'tcx>, - source_info: SourceInfo, + location: Location, ) -> Option<()> { - let r = self.use_ecx(source_info, |this| { + let r = self.use_ecx(location, |this| { this.ecx.read_immediate(&this.ecx.eval_operand(right, None)?) }); - let l = self.use_ecx(source_info, |this| { - this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?) - }); + let l = self + .use_ecx(location, |this| this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?)); // Check for exceeding shifts *even if* we cannot evaluate the LHS. if matches!(op, BinOp::Shr | BinOp::Shl) { let r = r.clone()?; // We need the type of the LHS. We cannot use `place_layout` as that is the type // of the result, which for checked binops is not the same! - let left_ty = left.ty(self.local_decls, self.tcx); + let left_ty = left.ty(self.local_decls(), self.tcx); let left_size = self.ecx.layout_of(left_ty).ok()?.size; let right_size = r.layout.size; let r_bits = r.to_scalar().to_bits(right_size).ok(); if r_bits.map_or(false, |b| b >= left_size.bits() as u128) { - debug!("check_binary_op: reporting assert for {:?}", source_info); + debug!("check_binary_op: reporting assert for {:?}", location); self.report_assert_as_lint( lint::builtin::ARITHMETIC_OVERFLOW, - source_info, + location, "this arithmetic operation will overflow", AssertKind::Overflow( op, @@ -412,13 +394,13 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { if let (Some(l), Some(r)) = (l, r) { // The remaining operators are handled through `overflowing_binary_op`. - if self.use_ecx(source_info, |this| { + if self.use_ecx(location, |this| { let (_res, overflow, _ty) = this.ecx.overflowing_binary_op(op, &l, &r)?; Ok(overflow) })? { self.report_assert_as_lint( lint::builtin::ARITHMETIC_OVERFLOW, - source_info, + location, "this arithmetic operation will overflow", AssertKind::Overflow(op, l.to_const_int(), r.to_const_int()), ); @@ -428,12 +410,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { Some(()) } - fn const_prop( - &mut self, - rvalue: &Rvalue<'tcx>, - source_info: SourceInfo, - place: Place<'tcx>, - ) -> Option<()> { + fn check_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) -> Option<()> { // Perform any special handling for specific Rvalue types. // Generally, checks here fall into one of two categories: // 1. Additional checking to provide useful lints to the user @@ -448,11 +425,11 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { // lint. Rvalue::UnaryOp(op, arg) => { trace!("checking UnaryOp(op = {:?}, arg = {:?})", op, arg); - self.check_unary_op(*op, arg, source_info)?; + self.check_unary_op(*op, arg, location)?; } Rvalue::BinaryOp(op, box (left, right)) => { trace!("checking BinaryOp(op = {:?}, left = {:?}, right = {:?})", op, left, right); - self.check_binary_op(*op, left, right, source_info)?; + self.check_binary_op(*op, left, right, location)?; } Rvalue::CheckedBinaryOp(op, box (left, right)) => { trace!( @@ -461,7 +438,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { left, right ); - self.check_binary_op(*op, left, right, source_info)?; + self.check_binary_op(*op, left, right, location)?; } // Do not try creating references (#67862) @@ -500,22 +477,105 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { if rvalue.needs_subst() { return None; } - if !rvalue - .ty(&self.ecx.frame().body.local_decls, *self.ecx.tcx) - .is_sized(*self.ecx.tcx, self.param_env) - { + if !rvalue.ty(self.local_decls(), self.tcx).is_sized(self.tcx, self.param_env) { // the interpreter doesn't support unsized locals (only unsized arguments), // but rustc does (in a kinda broken way), so we have to skip them here return None; } - self.use_ecx(source_info, |this| this.ecx.eval_rvalue_into_place(rvalue, place)) + Some(()) + } + + fn check_assertion( + &mut self, + expected: bool, + msg: &AssertKind<Operand<'tcx>>, + cond: &Operand<'tcx>, + location: Location, + ) -> Option<!> { + let ref value = self.eval_operand(&cond, location)?; + trace!("assertion on {:?} should be {:?}", value, expected); + + let expected = Scalar::from_bool(expected); + let value_const = self.use_ecx(location, |this| this.ecx.read_scalar(&value))?; + + if expected != value_const { + // Poison all places this operand references so that further code + // doesn't use the invalid value + if let Some(place) = cond.place() { + Self::remove_const(&mut self.ecx, place.local); + } + + enum DbgVal<T> { + Val(T), + Underscore, + } + impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Val(val) => val.fmt(fmt), + Self::Underscore => fmt.write_str("_"), + } + } + } + let mut eval_to_int = |op| { + // This can be `None` if the lhs wasn't const propagated and we just + // triggered the assert on the value of the rhs. + self.eval_operand(op, location) + .and_then(|op| self.ecx.read_immediate(&op).ok()) + .map_or(DbgVal::Underscore, |op| DbgVal::Val(op.to_const_int())) + }; + let msg = match msg { + AssertKind::DivisionByZero(op) => AssertKind::DivisionByZero(eval_to_int(op)), + AssertKind::RemainderByZero(op) => AssertKind::RemainderByZero(eval_to_int(op)), + AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => { + // Division overflow is *UB* in the MIR, and different than the + // other overflow checks. + AssertKind::Overflow(*bin_op, eval_to_int(op1), eval_to_int(op2)) + } + AssertKind::BoundsCheck { ref len, ref index } => { + let len = eval_to_int(len); + let index = eval_to_int(index); + AssertKind::BoundsCheck { len, index } + } + // Remaining overflow errors are already covered by checks on the binary operators. + AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return None, + // Need proper const propagator for these. + _ => return None, + }; + self.report_assert_as_lint( + lint::builtin::UNCONDITIONAL_PANIC, + location, + "this operation will panic at runtime", + msg, + ); + } + + None + } + + fn ensure_not_propagated(&self, local: Local) { + if cfg!(debug_assertions) { + assert!( + self.get_const(local.into()).is_none() + || self + .layout_of(self.local_decls()[local].ty) + .map_or(true, |layout| layout.is_zst()), + "failed to remove values for `{local:?}`, value={:?}", + self.get_const(local.into()), + ) + } } } impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { fn visit_body(&mut self, body: &Body<'tcx>) { - for (bb, data) in body.basic_blocks.iter_enumerated() { + while let Some(bb) = self.worklist.pop() { + if !self.visited_blocks.insert(bb) { + continue; + } + + let data = &body.basic_blocks[bb]; self.visit_basic_block_data(bb, data); } } @@ -527,202 +587,147 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) { trace!("visit_constant: {:?}", constant); self.super_constant(constant, location); - self.eval_constant(constant, self.source_info.unwrap()); + self.eval_constant(constant, location); + } + + fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { + self.super_assign(place, rvalue, location); + + let Some(()) = self.check_rvalue(rvalue, location) else { return }; + + match self.ecx.machine.can_const_prop[place.local] { + // Do nothing if the place is indirect. + _ if place.is_indirect() => {} + ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local), + ConstPropMode::OnlyInsideOwnBlock | ConstPropMode::FullConstProp => { + if self + .use_ecx(location, |this| this.ecx.eval_rvalue_into_place(rvalue, *place)) + .is_none() + { + // Const prop failed, so erase the destination, ensuring that whatever happens + // from here on, does not know about the previous value. + // This is important in case we have + // ```rust + // let mut x = 42; + // x = SOME_MUTABLE_STATIC; + // // x must now be uninit + // ``` + // FIXME: we overzealously erase the entire local, because that's easier to + // implement. + trace!( + "propagation into {:?} failed. + Nuking the entire site from orbit, it's the only way to be sure", + place, + ); + Self::remove_const(&mut self.ecx, place.local); + } + } + } } fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { trace!("visit_statement: {:?}", statement); - let source_info = statement.source_info; - self.source_info = Some(source_info); - if let StatementKind::Assign(box (place, ref rval)) = statement.kind { - let can_const_prop = self.ecx.machine.can_const_prop[place.local]; - if let Some(()) = self.const_prop(rval, source_info, place) { - match can_const_prop { - ConstPropMode::OnlyInsideOwnBlock => { - trace!( - "found local restricted to its block. \ - Will remove it from const-prop after block is finished. Local: {:?}", - place.local - ); - } - ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => { - trace!("can't propagate into {:?}", place); - if place.local != RETURN_PLACE { + + // We want to evaluate operands before any change to the assigned-to value, + // so we recurse first. + self.super_statement(statement, location); + + match statement.kind { + StatementKind::SetDiscriminant { ref place, .. } => { + match self.ecx.machine.can_const_prop[place.local] { + // Do nothing if the place is indirect. + _ if place.is_indirect() => {} + ConstPropMode::NoPropagation => self.ensure_not_propagated(place.local), + ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => { + if self.use_ecx(location, |this| this.ecx.statement(statement)).is_some() { + trace!("propped discriminant into {:?}", place); + } else { Self::remove_const(&mut self.ecx, place.local); } } - ConstPropMode::FullConstProp => {} } - } else { - // Const prop failed, so erase the destination, ensuring that whatever happens - // from here on, does not know about the previous value. - // This is important in case we have - // ```rust - // let mut x = 42; - // x = SOME_MUTABLE_STATIC; - // // x must now be uninit - // ``` - // FIXME: we overzealously erase the entire local, because that's easier to - // implement. - trace!( - "propagation into {:?} failed. - Nuking the entire site from orbit, it's the only way to be sure", - place, - ); - Self::remove_const(&mut self.ecx, place.local); } - } else { - match statement.kind { - StatementKind::SetDiscriminant { ref place, .. } => { - match self.ecx.machine.can_const_prop[place.local] { - ConstPropMode::FullConstProp | ConstPropMode::OnlyInsideOwnBlock => { - if self - .use_ecx(source_info, |this| this.ecx.statement(statement)) - .is_some() - { - trace!("propped discriminant into {:?}", place); - } else { - Self::remove_const(&mut self.ecx, place.local); - } - } - ConstPropMode::OnlyPropagateInto | ConstPropMode::NoPropagation => { - Self::remove_const(&mut self.ecx, place.local); - } - } - } - StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { - let frame = self.ecx.frame_mut(); - frame.locals[local].value = - if let StatementKind::StorageLive(_) = statement.kind { - LocalValue::Live(interpret::Operand::Immediate( - interpret::Immediate::Uninit, - )) - } else { - LocalValue::Dead - }; - } - _ => {} + StatementKind::StorageLive(local) => { + let frame = self.ecx.frame_mut(); + frame.locals[local].value = + LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)); + } + StatementKind::StorageDead(local) => { + let frame = self.ecx.frame_mut(); + frame.locals[local].value = LocalValue::Dead; } + _ => {} } - - self.super_statement(statement, location); } fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { - let source_info = terminator.source_info; - self.source_info = Some(source_info); self.super_terminator(terminator, location); match &terminator.kind { TerminatorKind::Assert { expected, ref msg, ref cond, .. } => { - if let Some(ref value) = self.eval_operand(&cond, source_info) { - trace!("assertion on {:?} should be {:?}", value, expected); - let expected = Scalar::from_bool(*expected); - let Ok(value_const) = self.ecx.read_scalar(&value) else { - // FIXME should be used use_ecx rather than a local match... but we have - // quite a few of these read_scalar/read_immediate that need fixing. - return - }; - if expected != value_const { - enum DbgVal<T> { - Val(T), - Underscore, - } - impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Val(val) => val.fmt(fmt), - Self::Underscore => fmt.write_str("_"), - } - } - } - let mut eval_to_int = |op| { - // This can be `None` if the lhs wasn't const propagated and we just - // triggered the assert on the value of the rhs. - self.eval_operand(op, source_info) - .and_then(|op| self.ecx.read_immediate(&op).ok()) - .map_or(DbgVal::Underscore, |op| DbgVal::Val(op.to_const_int())) - }; - let msg = match msg { - AssertKind::DivisionByZero(op) => { - Some(AssertKind::DivisionByZero(eval_to_int(op))) - } - AssertKind::RemainderByZero(op) => { - Some(AssertKind::RemainderByZero(eval_to_int(op))) - } - AssertKind::Overflow(bin_op @ (BinOp::Div | BinOp::Rem), op1, op2) => { - // Division overflow is *UB* in the MIR, and different than the - // other overflow checks. - Some(AssertKind::Overflow( - *bin_op, - eval_to_int(op1), - eval_to_int(op2), - )) - } - AssertKind::BoundsCheck { ref len, ref index } => { - let len = eval_to_int(len); - let index = eval_to_int(index); - Some(AssertKind::BoundsCheck { len, index }) - } - // Remaining overflow errors are already covered by checks on the binary operators. - AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => None, - // Need proper const propagator for these. - _ => None, - }; - // Poison all places this operand references so that further code - // doesn't use the invalid value - match cond { - Operand::Move(ref place) | Operand::Copy(ref place) => { - Self::remove_const(&mut self.ecx, place.local); - } - Operand::Constant(_) => {} - } - if let Some(msg) = msg { - self.report_assert_as_lint( - lint::builtin::UNCONDITIONAL_PANIC, - source_info, - "this operation will panic at runtime", - msg, - ); - } - } + self.check_assertion(*expected, msg, cond, location); + } + TerminatorKind::SwitchInt { ref discr, ref targets } => { + if let Some(ref value) = self.eval_operand(&discr, location) + && let Some(value_const) = self.use_ecx(location, |this| this.ecx.read_scalar(&value)) + && let Ok(constant) = value_const.try_to_int() + && let Ok(constant) = constant.to_bits(constant.size()) + { + // We managed to evaluate the discriminant, so we know we only need to visit + // one target. + let target = targets.target_for_value(constant); + self.worklist.push(target); + return; } + // We failed to evaluate the discriminant, fallback to visiting all successors. } // None of these have Operands to const-propagate. TerminatorKind::Goto { .. } | TerminatorKind::Resume - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } - | TerminatorKind::SwitchInt { .. } | TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {} } + self.worklist.extend(terminator.successors()); + } + + fn visit_basic_block_data(&mut self, block: BasicBlock, data: &BasicBlockData<'tcx>) { + self.super_basic_block_data(block, data); + // We remove all Locals which are restricted in propagation to their containing blocks and // which were modified in the current block. // Take it out of the ecx so we can get a mutable reference to the ecx for `remove_const`. - let mut locals = std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals); - for &local in locals.iter() { + let mut written_only_inside_own_block_locals = + std::mem::take(&mut self.ecx.machine.written_only_inside_own_block_locals); + + // This loop can get very hot for some bodies: it check each local in each bb. + // To avoid this quadratic behaviour, we only clear the locals that were modified inside + // the current block. + for local in written_only_inside_own_block_locals.drain() { + debug_assert_eq!( + self.ecx.machine.can_const_prop[local], + ConstPropMode::OnlyInsideOwnBlock + ); Self::remove_const(&mut self.ecx, local); } - locals.clear(); - // Put it back so we reuse the heap of the storage - self.ecx.machine.written_only_inside_own_block_locals = locals; + self.ecx.machine.written_only_inside_own_block_locals = + written_only_inside_own_block_locals; + if cfg!(debug_assertions) { - // Ensure we are correctly erasing locals with the non-debug-assert logic. - for local in self.ecx.machine.only_propagate_inside_block_locals.iter() { - assert!( - self.get_const(local.into()).is_none() - || self - .layout_of(self.local_decls[local].ty) - .map_or(true, |layout| layout.is_zst()) - ) + for (local, &mode) in self.ecx.machine.can_const_prop.iter_enumerated() { + match mode { + ConstPropMode::FullConstProp => {} + ConstPropMode::NoPropagation | ConstPropMode::OnlyInsideOwnBlock => { + self.ensure_not_propagated(local); + } + } } } } diff --git a/compiler/rustc_mir_transform/src/copy_prop.rs b/compiler/rustc_mir_transform/src/copy_prop.rs index f27beb64a..b571215f2 100644 --- a/compiler/rustc_mir_transform/src/copy_prop.rs +++ b/compiler/rustc_mir_transform/src/copy_prop.rs @@ -1,5 +1,5 @@ use rustc_index::bit_set::BitSet; -use rustc_index::vec::IndexVec; +use rustc_index::vec::IndexSlice; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; use rustc_middle::ty::TyCtxt; @@ -102,7 +102,7 @@ struct Replacer<'a, 'tcx> { fully_moved: BitSet<Local>, storage_to_remove: BitSet<Local>, borrowed_locals: BitSet<Local>, - copy_classes: &'a IndexVec<Local, Local>, + copy_classes: &'a IndexSlice<Local, Local>, } impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { @@ -131,7 +131,6 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { PlaceContext::NonMutatingUse( NonMutatingUseContext::SharedBorrow | NonMutatingUseContext::ShallowBorrow - | NonMutatingUseContext::UniqueBorrow | NonMutatingUseContext::AddressOf, ) => true, // For debuginfo, merging locals is ok. diff --git a/compiler/rustc_mir_transform/src/coverage/debug.rs b/compiler/rustc_mir_transform/src/coverage/debug.rs index 22ea8710e..725883b83 100644 --- a/compiler/rustc_mir_transform/src/coverage/debug.rs +++ b/compiler/rustc_mir_transform/src/coverage/debug.rs @@ -818,11 +818,10 @@ pub(super) fn term_type(kind: &TerminatorKind<'_>) -> &'static str { TerminatorKind::Goto { .. } => "Goto", TerminatorKind::SwitchInt { .. } => "SwitchInt", TerminatorKind::Resume => "Resume", - TerminatorKind::Abort => "Abort", + TerminatorKind::Terminate => "Terminate", TerminatorKind::Return => "Return", TerminatorKind::Unreachable => "Unreachable", TerminatorKind::Drop { .. } => "Drop", - TerminatorKind::DropAndReplace { .. } => "DropAndReplace", TerminatorKind::Call { .. } => "Call", TerminatorKind::Assert { .. } => "Assert", TerminatorKind::Yield { .. } => "Yield", diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index a2671eef2..7391a77b0 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -5,7 +5,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::dominators::{self, Dominators}; use rustc_data_structures::graph::{self, GraphSuccessors, WithNumNodes, WithStartNode}; use rustc_index::bit_set::BitSet; -use rustc_index::vec::IndexVec; +use rustc_index::vec::{IndexSlice, IndexVec}; use rustc_middle::mir::coverage::*; use rustc_middle::mir::{self, BasicBlock, BasicBlockData, Terminator, TerminatorKind}; @@ -37,8 +37,7 @@ impl CoverageGraph { // `SwitchInt` to have multiple targets to the same destination `BasicBlock`, so // de-duplication is required. This is done without reordering the successors. - let bcbs_len = bcbs.len(); - let mut seen = IndexVec::from_elem_n(false, bcbs_len); + let mut seen = IndexVec::from_elem(false, &bcbs); let successors = IndexVec::from_fn_n( |bcb| { for b in seen.iter_mut() { @@ -60,7 +59,7 @@ impl CoverageGraph { bcbs.len(), ); - let mut predecessors = IndexVec::from_elem_n(Vec::new(), bcbs.len()); + let mut predecessors = IndexVec::from_elem(Vec::new(), &bcbs); for (bcb, bcb_successors) in successors.iter_enumerated() { for &successor in bcb_successors { predecessors[successor].push(bcb); @@ -123,7 +122,7 @@ impl CoverageGraph { match term.kind { TerminatorKind::Return { .. } - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Yield { .. } | TerminatorKind::SwitchInt { .. } => { // The `bb` has more than one _outgoing_ edge, or exits the function. Save the @@ -137,7 +136,7 @@ impl CoverageGraph { debug!(" because term.kind = {:?}", term.kind); // Note that this condition is based on `TerminatorKind`, even though it // theoretically boils down to `successors().len() != 1`; that is, either zero - // (e.g., `Return`, `Abort`) or multiple successors (e.g., `SwitchInt`), but + // (e.g., `Return`, `Terminate`) or multiple successors (e.g., `SwitchInt`), but // since the BCB CFG ignores things like unwind branches (which exist in the // `Terminator`s `successors()` list) checking the number of successors won't // work. @@ -156,7 +155,6 @@ impl CoverageGraph { | TerminatorKind::Resume | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Call { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::Assert { .. } @@ -177,10 +175,10 @@ impl CoverageGraph { fn add_basic_coverage_block( bcbs: &mut IndexVec<BasicCoverageBlock, BasicCoverageBlockData>, - bb_to_bcb: &mut IndexVec<BasicBlock, Option<BasicCoverageBlock>>, + bb_to_bcb: &mut IndexSlice<BasicBlock, Option<BasicCoverageBlock>>, basic_blocks: Vec<BasicBlock>, ) { - let bcb = BasicCoverageBlock::from_usize(bcbs.len()); + let bcb = bcbs.next_index(); for &bb in basic_blocks.iter() { bb_to_bcb[bb] = Some(bcb); } @@ -538,29 +536,29 @@ impl TraverseCoverageGraphWithLoops { "TraverseCoverageGraphWithLoops::next - context_stack: {:?}", self.context_stack.iter().rev().collect::<Vec<_>>() ); - while let Some(next_bcb) = { - // Strip contexts with empty worklists from the top of the stack - while self.context_stack.last().map_or(false, |context| context.worklist.is_empty()) { + + while let Some(context) = self.context_stack.last_mut() { + if let Some(next_bcb) = context.worklist.pop() { + if !self.visited.insert(next_bcb) { + debug!("Already visited: {:?}", next_bcb); + continue; + } + debug!("Visiting {:?}", next_bcb); + if self.backedges[next_bcb].len() > 0 { + debug!("{:?} is a loop header! Start a new TraversalContext...", next_bcb); + self.context_stack.push(TraversalContext { + loop_backedges: Some((self.backedges[next_bcb].clone(), next_bcb)), + worklist: Vec::new(), + }); + } + self.extend_worklist(basic_coverage_blocks, next_bcb); + return Some(next_bcb); + } else { + // Strip contexts with empty worklists from the top of the stack self.context_stack.pop(); } - // Pop the next bcb off of the current context_stack. If none, all BCBs were visited. - self.context_stack.last_mut().map_or(None, |context| context.worklist.pop()) - } { - if !self.visited.insert(next_bcb) { - debug!("Already visited: {:?}", next_bcb); - continue; - } - debug!("Visiting {:?}", next_bcb); - if self.backedges[next_bcb].len() > 0 { - debug!("{:?} is a loop header! Start a new TraversalContext...", next_bcb); - self.context_stack.push(TraversalContext { - loop_backedges: Some((self.backedges[next_bcb].clone(), next_bcb)), - worklist: Vec::new(), - }); - } - self.extend_worklist(basic_coverage_blocks, next_bcb); - return Some(next_bcb); } + None } diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index 9a6171598..5ecb2d6a6 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -577,5 +577,5 @@ fn get_body_span<'tcx>( fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 { // FIXME(cjgillot) Stop hashing HIR manually here. let owner = hir_body.id().hir_id.owner; - tcx.hir_owner_nodes(owner).unwrap().hash_including_bodies.to_smaller_hash() + tcx.hir_owner_nodes(owner).unwrap().opt_hash_including_bodies.unwrap().to_smaller_hash() } diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs index 8ee316773..287ae2170 100644 --- a/compiler/rustc_mir_transform/src/coverage/spans.rs +++ b/compiler/rustc_mir_transform/src/coverage/spans.rs @@ -832,6 +832,7 @@ pub(super) fn filtered_statement_span(statement: &Statement<'_>) -> Option<Span> | StatementKind::SetDiscriminant { .. } | StatementKind::Deinit(..) | StatementKind::Retag(_, _) + | StatementKind::PlaceMention(..) | StatementKind::AscribeUserType(_, _) => { Some(statement.source_info.span) } @@ -850,7 +851,6 @@ pub(super) fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option<Sp TerminatorKind::Unreachable // Unreachable blocks are not connected to the MIR CFG | TerminatorKind::Assert { .. } | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::SwitchInt { .. } // For `FalseEdge`, only the `real` branch is taken, so it is similar to a `Goto`. | TerminatorKind::FalseEdge { .. } @@ -869,7 +869,7 @@ pub(super) fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option<Sp // Retain spans from all other terminators TerminatorKind::Resume - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Yield { .. } | TerminatorKind::GeneratorDrop diff --git a/compiler/rustc_mir_transform/src/coverage/tests.rs b/compiler/rustc_mir_transform/src/coverage/tests.rs index fa7f22303..0f6c06e37 100644 --- a/compiler/rustc_mir_transform/src/coverage/tests.rs +++ b/compiler/rustc_mir_transform/src/coverage/tests.rs @@ -65,7 +65,7 @@ impl<'tcx> MockBlocks<'tcx> { } fn push(&mut self, kind: TerminatorKind<'tcx>) -> BasicBlock { - let next_lo = if let Some(last) = self.blocks.last() { + let next_lo = if let Some(last) = self.blocks.last_index() { self.blocks[last].terminator().source_info.span.hi() } else { BytePos(1) @@ -86,7 +86,6 @@ impl<'tcx> MockBlocks<'tcx> { TerminatorKind::Assert { ref mut target, .. } | TerminatorKind::Call { target: Some(ref mut target), .. } | TerminatorKind::Drop { ref mut target, .. } - | TerminatorKind::DropAndReplace { ref mut target, .. } | TerminatorKind::FalseEdge { real_target: ref mut target, .. } | TerminatorKind::FalseUnwind { real_target: ref mut target, .. } | TerminatorKind::Goto { ref mut target } @@ -141,7 +140,7 @@ impl<'tcx> MockBlocks<'tcx> { args: vec![], destination: self.dummy_place.clone(), target: Some(TEMP_BLOCK), - cleanup: None, + unwind: UnwindAction::Continue, from_hir_call: false, fn_span: DUMMY_SP, }, @@ -184,7 +183,6 @@ fn debug_basic_blocks(mir_body: &Body<'_>) -> String { TerminatorKind::Assert { target, .. } | TerminatorKind::Call { target: Some(target), .. } | TerminatorKind::Drop { target, .. } - | TerminatorKind::DropAndReplace { target, .. } | TerminatorKind::FalseEdge { real_target: target, .. } | TerminatorKind::FalseUnwind { real_target: target, .. } | TerminatorKind::Goto { target } diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 49ded10ba..d4db7e2de 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -8,12 +8,12 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::DefKind; use rustc_middle::mir::visit::{MutVisitor, Visitor}; use rustc_middle::mir::*; +use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace}; use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects}; use rustc_span::DUMMY_SP; -use rustc_target::abi::Align; -use rustc_target::abi::VariantIdx; +use rustc_target::abi::{Align, FieldIdx, VariantIdx}; use crate::MirPass; @@ -147,7 +147,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { for (field_index, operand) in operands.iter().enumerate() { if let Some(field) = self.map().apply( variant_target_idx, - TrackElem::Field(Field::from_usize(field_index)), + TrackElem::Field(FieldIdx::from_usize(field_index)), ) { let result = self.handle_operand(operand, state); state.insert_idx(field, result, self.map()); @@ -548,7 +548,7 @@ impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachi unimplemented!() } - fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { + fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>, _layout: TyAndLayout<'tcx>) -> bool { unimplemented!() } fn alignment_check_failed( @@ -567,7 +567,7 @@ impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachi _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>], _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>, _target: Option<BasicBlock>, - _unwind: rustc_const_eval::interpret::StackPopUnwind, + _unwind: UnwindAction, ) -> interpret::InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> { unimplemented!() } @@ -578,7 +578,7 @@ impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachi _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>], _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>, _target: Option<BasicBlock>, - _unwind: rustc_const_eval::interpret::StackPopUnwind, + _unwind: UnwindAction, ) -> interpret::InterpResult<'tcx> { unimplemented!() } @@ -586,7 +586,7 @@ impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachi fn assert_panic( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _msg: &rustc_middle::mir::AssertMessage<'tcx>, - _unwind: Option<BasicBlock>, + _unwind: UnwindAction, ) -> interpret::InterpResult<'tcx> { unimplemented!() } diff --git a/compiler/rustc_mir_transform/src/dead_store_elimination.rs b/compiler/rustc_mir_transform/src/dead_store_elimination.rs index 9dbfb089d..18c407b42 100644 --- a/compiler/rustc_mir_transform/src/dead_store_elimination.rs +++ b/compiler/rustc_mir_transform/src/dead_store_elimination.rs @@ -56,7 +56,9 @@ pub fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, borrowed: &BitS | StatementKind::ConstEvalCounter | StatementKind::Nop => (), - StatementKind::FakeRead(_) | StatementKind::AscribeUserType(_, _) => { + StatementKind::FakeRead(_) + | StatementKind::PlaceMention(_) + | StatementKind::AscribeUserType(_, _) => { bug!("{:?} not found in this MIR phase!", &statement.kind) } } diff --git a/compiler/rustc_mir_transform/src/deduce_param_attrs.rs b/compiler/rustc_mir_transform/src/deduce_param_attrs.rs index 89ca04a15..e5c3fa564 100644 --- a/compiler/rustc_mir_transform/src/deduce_param_attrs.rs +++ b/compiler/rustc_mir_transform/src/deduce_param_attrs.rs @@ -5,7 +5,7 @@ //! purposes on a best-effort basis. We compute them here and store them into the crate metadata so //! dependent crates can use them. -use rustc_hir::def_id::DefId; +use rustc_hir::def_id::LocalDefId; use rustc_index::bit_set::BitSet; use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::{Body, Local, Location, Operand, Terminator, TerminatorKind, RETURN_PLACE}; @@ -149,7 +149,10 @@ fn type_will_always_be_passed_directly(ty: Ty<'_>) -> bool { /// body of the function instead of just the signature. These can be useful for optimization /// purposes on a best-effort basis. We compute them here and store them into the crate metadata so /// dependent crates can use them. -pub fn deduced_param_attrs<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx [DeducedParamAttrs] { +pub fn deduced_param_attrs<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: LocalDefId, +) -> &'tcx [DeducedParamAttrs] { // This computation is unfortunately rather expensive, so don't do it unless we're optimizing. // Also skip it in incremental mode. if tcx.sess.opts.optimize == OptLevel::No || tcx.sess.opts.incremental.is_some() { @@ -182,10 +185,6 @@ pub fn deduced_param_attrs<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx [Ded return &[]; } - // Deduced attributes for other crates should be read from the metadata instead of via this - // function. - debug_assert!(def_id.is_local()); - // Grab the optimized MIR. Analyze it to determine which arguments have been mutated. let body: &Body<'tcx> = tcx.optimized_mir(def_id); let mut deduce_read_only = DeduceReadOnly::new(body.arg_count); diff --git a/compiler/rustc_mir_transform/src/deref_separator.rs b/compiler/rustc_mir_transform/src/deref_separator.rs index 7508df92d..b8a5b92be 100644 --- a/compiler/rustc_mir_transform/src/deref_separator.rs +++ b/compiler/rustc_mir_transform/src/deref_separator.rs @@ -40,7 +40,7 @@ impl<'tcx> MutVisitor<'tcx> for DerefChecker<'tcx> { let temp = self.patcher.new_internal_with_info( ty, self.local_decls[p_ref.local].source_info.span, - Some(Box::new(LocalInfo::DerefTemp)), + LocalInfo::DerefTemp, ); // We are adding current p_ref's projections to our diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs index 2e481b972..391649177 100644 --- a/compiler/rustc_mir_transform/src/dest_prop.rs +++ b/compiler/rustc_mir_transform/src/dest_prop.rs @@ -83,7 +83,7 @@ //! that ever have their address taken. Of course that requires actually having alias analysis //! (and a model to build it on), so this might be a bit of a ways off. //! -//! * Various perf improvents. There are a bunch of comments in here marked `PERF` with ideas for +//! * Various perf improvements. There are a bunch of comments in here marked `PERF` with ideas for //! how to do things more efficiently. However, the complexity of the pass as a whole should be //! kept in mind. //! @@ -583,7 +583,9 @@ impl WriteInfo { | StatementKind::Coverage(_) | StatementKind::StorageLive(_) | StatementKind::StorageDead(_) => (), - StatementKind::FakeRead(_) | StatementKind::AscribeUserType(_, _) => { + StatementKind::FakeRead(_) + | StatementKind::AscribeUserType(_, _) + | StatementKind::PlaceMention(_) => { bug!("{:?} not found in this MIR phase", statement) } } @@ -643,15 +645,14 @@ impl WriteInfo { } } TerminatorKind::Goto { .. } - | TerminatorKind::Resume { .. } - | TerminatorKind::Abort { .. } + | TerminatorKind::Resume + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable { .. } => (), TerminatorKind::Drop { .. } => { // `Drop`s create a `&mut` and so are not considered } - TerminatorKind::DropAndReplace { .. } - | TerminatorKind::Yield { .. } + TerminatorKind::Yield { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::FalseEdge { .. } | TerminatorKind::FalseUnwind { .. } => { @@ -787,7 +788,7 @@ impl<'tcx> Visitor<'tcx> for FindAssignments<'_, '_, 'tcx> { fn is_local_required(local: Local, body: &Body<'_>) -> bool { match body.local_kind(local) { LocalKind::Arg | LocalKind::ReturnPointer => true, - LocalKind::Var | LocalKind::Temp => false, + LocalKind::Temp => false, } } diff --git a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs index 954bb5aff..856234994 100644 --- a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs +++ b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs @@ -9,6 +9,7 @@ use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::visit::MutVisitor; use rustc_middle::mir::*; use rustc_middle::ty::{Ty, TyCtxt}; +use rustc_target::abi::FieldIdx; /// Constructs the types used when accessing a Box's pointer pub fn build_ptr_tys<'tcx>( @@ -32,9 +33,9 @@ pub fn build_projection<'tcx>( ptr_ty: Ty<'tcx>, ) -> [PlaceElem<'tcx>; 3] { [ - PlaceElem::Field(Field::new(0), unique_ty), - PlaceElem::Field(Field::new(0), nonnull_ty), - PlaceElem::Field(Field::new(0), ptr_ty), + PlaceElem::Field(FieldIdx::new(0), unique_ty), + PlaceElem::Field(FieldIdx::new(0), nonnull_ty), + PlaceElem::Field(FieldIdx::new(0), ptr_ty), ] } @@ -91,13 +92,14 @@ pub struct ElaborateBoxDerefs; impl<'tcx> MirPass<'tcx> for ElaborateBoxDerefs { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { if let Some(def_id) = tcx.lang_items().owned_box() { - let unique_did = tcx.adt_def(def_id).non_enum_variant().fields[0].did; + let unique_did = + tcx.adt_def(def_id).non_enum_variant().fields[FieldIdx::from_u32(0)].did; let Some(nonnull_def) = tcx.type_of(unique_did).subst_identity().ty_adt_def() else { span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique") }; - let nonnull_did = nonnull_def.non_enum_variant().fields[0].did; + let nonnull_did = nonnull_def.non_enum_variant().fields[FieldIdx::from_u32(0)].did; let patch = MirPatch::new(body); diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index bdfd8dc6e..a702113bd 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -14,19 +14,18 @@ use rustc_mir_dataflow::un_derefer::UnDerefer; use rustc_mir_dataflow::MoveDataParamEnv; use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits}; use rustc_mir_dataflow::{Analysis, ResultsCursor}; -use rustc_span::Span; -use rustc_target::abi::VariantIdx; +use rustc_span::{DesugaringKind, Span}; +use rustc_target::abi::{FieldIdx, VariantIdx}; use std::fmt; -/// During MIR building, Drop and DropAndReplace terminators are inserted in every place where a drop may occur. +/// During MIR building, Drop terminators are inserted in every place where a drop may occur. /// However, in this phase, the presence of these terminators does not guarantee that a destructor will run, /// as the target of the drop may be uninitialized. /// In general, the compiler cannot determine at compile time whether a destructor will run or not. /// -/// At a high level, this pass refines Drop and DropAndReplace to only run the destructor if the +/// At a high level, this pass refines Drop to only run the destructor if the /// target is initialized. The way this is achievied is by inserting drop flags for every variable /// that may be dropped, and then using those flags to determine whether a destructor should run. -/// This pass also removes DropAndReplace, replacing it with a Drop paired with an assign statement. /// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or /// "drop shim" for the type of the dropped place. /// @@ -121,8 +120,7 @@ fn remove_dead_unwinds<'tcx>( .into_results_cursor(body); for (bb, bb_data) in body.basic_blocks.iter_enumerated() { let place = match bb_data.terminator().kind { - TerminatorKind::Drop { ref place, unwind: Some(_), .. } - | TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => { + TerminatorKind::Drop { ref place, unwind: UnwindAction::Cleanup(_), .. } => { und.derefer(place.as_ref(), body).unwrap_or(*place) } _ => continue, @@ -162,7 +160,7 @@ fn remove_dead_unwinds<'tcx>( let basic_blocks = body.basic_blocks.as_mut(); for &bb in dead_unwinds.iter() { if let Some(unwind) = basic_blocks[bb].terminator_mut().unwind_mut() { - *unwind = None; + *unwind = UnwindAction::Unreachable; } } } @@ -254,7 +252,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, '_, 'tcx> { } } - fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> { + fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path> { rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e { ProjectionElem::Field(idx, _) => idx == field, _ => false, @@ -343,8 +341,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { } let terminator = data.terminator(); let place = match terminator.kind { - TerminatorKind::Drop { ref place, .. } - | TerminatorKind::DropAndReplace { ref place, .. } => { + TerminatorKind::Drop { ref place, .. } => { self.un_derefer.derefer(place.as_ref(), self.body).unwrap_or(*place) } _ => continue, @@ -402,7 +399,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { let loc = Location { block: bb, statement_index: data.statements.len() }; let terminator = data.terminator(); - let resume_block = self.patch.resume_block(); match terminator.kind { TerminatorKind::Drop { mut place, target, unwind } => { if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) { @@ -411,124 +407,53 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { self.init_data.seek_before(loc); match self.move_data().rev_lookup.find(place.as_ref()) { - LookupResult::Exact(path) => elaborate_drop( - &mut Elaborator { ctxt: self }, - terminator.source_info, - place, - path, - target, - if data.is_cleanup { + LookupResult::Exact(path) => { + let unwind = if data.is_cleanup { Unwind::InCleanup } else { - Unwind::To(Option::unwrap_or(unwind, resume_block)) - }, - bb, - ), + match unwind { + UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup), + UnwindAction::Continue => Unwind::To(self.patch.resume_block()), + UnwindAction::Unreachable => { + Unwind::To(self.patch.unreachable_cleanup_block()) + } + UnwindAction::Terminate => { + Unwind::To(self.patch.terminate_block()) + } + } + }; + elaborate_drop( + &mut Elaborator { ctxt: self }, + terminator.source_info, + place, + path, + target, + unwind, + bb, + ) + } LookupResult::Parent(..) => { - self.tcx.sess.delay_span_bug( - terminator.source_info.span, - &format!("drop of untracked value {:?}", bb), - ); + if !matches!( + terminator.source_info.span.desugaring_kind(), + Some(DesugaringKind::Replace), + ) { + self.tcx.sess.delay_span_bug( + terminator.source_info.span, + &format!("drop of untracked value {:?}", bb), + ); + } + // A drop and replace behind a pointer/array/whatever. + // The borrow checker requires that these locations are initialized before the assignment, + // so we just leave an unconditional drop. + assert!(!data.is_cleanup); } } } - TerminatorKind::DropAndReplace { mut place, ref value, target, unwind } => { - assert!(!data.is_cleanup); - - if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) { - place = new_place; - } - self.elaborate_replace(loc, place, value, target, unwind); - } _ => continue, } } } - /// Elaborate a MIR `replace` terminator. This instruction - /// is not directly handled by codegen, and therefore - /// must be desugared. - /// - /// The desugaring drops the location if needed, and then writes - /// the value (including setting the drop flag) over it in *both* arms. - /// - /// The `replace` terminator can also be called on places that - /// are not tracked by elaboration (for example, - /// `replace x[i] <- tmp0`). The borrow checker requires that - /// these locations are initialized before the assignment, - /// so we just generate an unconditional drop. - fn elaborate_replace( - &mut self, - loc: Location, - place: Place<'tcx>, - value: &Operand<'tcx>, - target: BasicBlock, - unwind: Option<BasicBlock>, - ) { - let bb = loc.block; - let data = &self.body[bb]; - let terminator = data.terminator(); - assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported"); - - let assign = Statement { - kind: StatementKind::Assign(Box::new((place, Rvalue::Use(value.clone())))), - source_info: terminator.source_info, - }; - - let unwind = unwind.unwrap_or_else(|| self.patch.resume_block()); - let unwind = self.patch.new_block(BasicBlockData { - statements: vec![assign.clone()], - terminator: Some(Terminator { - kind: TerminatorKind::Goto { target: unwind }, - ..*terminator - }), - is_cleanup: true, - }); - - let target = self.patch.new_block(BasicBlockData { - statements: vec![assign], - terminator: Some(Terminator { kind: TerminatorKind::Goto { target }, ..*terminator }), - is_cleanup: false, - }); - - match self.move_data().rev_lookup.find(place.as_ref()) { - LookupResult::Exact(path) => { - debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path); - self.init_data.seek_before(loc); - elaborate_drop( - &mut Elaborator { ctxt: self }, - terminator.source_info, - place, - path, - target, - Unwind::To(unwind), - bb, - ); - on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| { - self.set_drop_flag( - Location { block: target, statement_index: 0 }, - child, - DropFlagState::Present, - ); - self.set_drop_flag( - Location { block: unwind, statement_index: 0 }, - child, - DropFlagState::Present, - ); - }); - } - LookupResult::Parent(parent) => { - // drop and replace behind a pointer/array/whatever. The location - // must be initialized. - debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent); - self.patch.patch_terminator( - bb, - TerminatorKind::Drop { place, target, unwind: Some(unwind) }, - ); - } - } - } - fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> { Rvalue::Use(Operand::Constant(Box::new(Constant { span, @@ -560,7 +485,10 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { continue; } if let TerminatorKind::Call { - destination, target: Some(tgt), cleanup: Some(_), .. + destination, + target: Some(tgt), + unwind: UnwindAction::Cleanup(_), + .. } = data.terminator().kind { assert!(!self.patch.is_patched(bb)); @@ -600,22 +528,12 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { debug!("drop_flags_for_locs({:?})", data); for i in 0..(data.statements.len() + 1) { debug!("drop_flag_for_locs: stmt {}", i); - let mut allow_initializations = true; if i == data.statements.len() { match data.terminator().kind { TerminatorKind::Drop { .. } => { // drop elaboration should handle that by itself continue; } - TerminatorKind::DropAndReplace { .. } => { - // this contains the move of the source and - // the initialization of the destination. We - // only want the former - the latter is handled - // by the elaboration code and must be done - // *after* the destination is dropped. - assert!(self.patch.is_patched(bb)); - allow_initializations = false; - } TerminatorKind::Resume => { // It is possible for `Resume` to be patched // (in particular it can be patched to be replaced with @@ -632,19 +550,19 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { self.body, self.env, loc, - |path, ds| { - if ds == DropFlagState::Absent || allow_initializations { - self.set_drop_flag(loc, path, ds) - } - }, + |path, ds| self.set_drop_flag(loc, path, ds), ) } // There may be a critical edge after this call, // so mark the return as initialized *before* the // call. - if let TerminatorKind::Call { destination, target: Some(_), cleanup: None, .. } = - data.terminator().kind + if let TerminatorKind::Call { + destination, + target: Some(_), + unwind: UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate, + .. + } = data.terminator().kind { assert!(!self.patch.is_patched(bb)); diff --git a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs index e6546911a..c9b24adba 100644 --- a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs +++ b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs @@ -1,5 +1,6 @@ -use rustc_hir::def_id::{CrateNum, LocalDefId, LOCAL_CRATE}; +use rustc_hir::def_id::{LocalDefId, LOCAL_CRATE}; use rustc_middle::mir::*; +use rustc_middle::query::LocalCrate; use rustc_middle::ty::layout; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, TyCtxt}; @@ -121,9 +122,7 @@ fn has_ffi_unwind_calls(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> bool { tainted } -fn required_panic_strategy(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<PanicStrategy> { - assert_eq!(cnum, LOCAL_CRATE); - +fn required_panic_strategy(tcx: TyCtxt<'_>, _: LocalCrate) -> Option<PanicStrategy> { if tcx.is_panic_runtime(LOCAL_CRATE) { return Some(tcx.sess.panic_strategy()); } diff --git a/compiler/rustc_mir_transform/src/function_item_references.rs b/compiler/rustc_mir_transform/src/function_item_references.rs index 66d32b954..8601c1b2d 100644 --- a/compiler/rustc_mir_transform/src/function_item_references.rs +++ b/compiler/rustc_mir_transform/src/function_item_references.rs @@ -34,7 +34,7 @@ impl<'tcx> Visitor<'tcx> for FunctionItemRefChecker<'_, 'tcx> { args, destination: _, target: _, - cleanup: _, + unwind: _, from_hir_call: _, fn_span: _, } = &terminator.kind diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs index 2e97312ee..4c4423721 100644 --- a/compiler/rustc_mir_transform/src/generator.rs +++ b/compiler/rustc_mir_transform/src/generator.rs @@ -70,10 +70,10 @@ use rustc_mir_dataflow::impls::{ }; use rustc_mir_dataflow::storage::always_storage_live_locals; use rustc_mir_dataflow::{self, Analysis}; -use rustc_span::def_id::DefId; +use rustc_span::def_id::{DefId, LocalDefId}; use rustc_span::symbol::sym; use rustc_span::Span; -use rustc_target::abi::VariantIdx; +use rustc_target::abi::{FieldIdx, VariantIdx}; use rustc_target::spec::PanicStrategy; use std::{iter, ops}; @@ -162,9 +162,10 @@ impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> { place, Place { local: SELF_ARG, - projection: self - .tcx() - .mk_place_elems(&[ProjectionElem::Field(Field::new(0), self.ref_gen_ty)]), + projection: self.tcx().mk_place_elems(&[ProjectionElem::Field( + FieldIdx::new(0), + self.ref_gen_ty, + )]), }, self.tcx, ); @@ -273,7 +274,7 @@ impl<'tcx> TransformVisitor<'tcx> { statements.push(Statement { kind: StatementKind::Assign(Box::new(( Place::return_place(), - Rvalue::Aggregate(Box::new(kind), vec![]), + Rvalue::Aggregate(Box::new(kind), IndexVec::new()), ))), source_info, }); @@ -286,7 +287,7 @@ impl<'tcx> TransformVisitor<'tcx> { statements.push(Statement { kind: StatementKind::Assign(Box::new(( Place::return_place(), - Rvalue::Aggregate(Box::new(kind), vec![val]), + Rvalue::Aggregate(Box::new(kind), [val].into()), ))), source_info, }); @@ -297,7 +298,7 @@ impl<'tcx> TransformVisitor<'tcx> { let self_place = Place::from(SELF_ARG); let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index); let mut projection = base.projection.to_vec(); - projection.push(ProjectionElem::Field(Field::new(idx), ty)); + projection.push(ProjectionElem::Field(FieldIdx::new(idx), ty)); Place { local: base.local, projection: self.tcx.mk_place_elems(&projection) } } @@ -924,13 +925,19 @@ fn compute_layout<'tcx>( debug!(?decl); let ignore_for_traits = if tcx.sess.opts.unstable_opts.drop_tracking_mir { + // Do not `assert_crate_local` here, as post-borrowck cleanup may have already cleared + // the information. This is alright, since `ignore_for_traits` is only relevant when + // this code runs on pre-cleanup MIR, and `ignore_for_traits = false` is the safer + // default. match decl.local_info { // Do not include raw pointers created from accessing `static` items, as those could // well be re-created by another access to the same static. - Some(box LocalInfo::StaticRef { is_thread_local, .. }) => !is_thread_local, + ClearCrossCrate::Set(box LocalInfo::StaticRef { is_thread_local, .. }) => { + !is_thread_local + } // Fake borrows are only read by fake reads, so do not have any reality in // post-analysis MIR. - Some(box LocalInfo::FakeBorrow) => true, + ClearCrossCrate::Set(box LocalInfo::FakeBorrow) => true, _ => false, } } else { @@ -961,7 +968,7 @@ fn compute_layout<'tcx>( // Build the generator variant field list. // Create a map from local indices to generator struct indices. - let mut variant_fields: IndexVec<VariantIdx, IndexVec<Field, GeneratorSavedLocal>> = + let mut variant_fields: IndexVec<VariantIdx, IndexVec<FieldIdx, GeneratorSavedLocal>> = iter::repeat(IndexVec::new()).take(RESERVED_VARIANTS).collect(); let mut remap = FxHashMap::default(); for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() { @@ -1053,7 +1060,12 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let unwind = if block_data.is_cleanup { Unwind::InCleanup } else { - Unwind::To(unwind.unwrap_or_else(|| elaborator.patch.resume_block())) + Unwind::To(match *unwind { + UnwindAction::Cleanup(tgt) => tgt, + UnwindAction::Continue => elaborator.patch.resume_block(), + UnwindAction::Unreachable => elaborator.patch.unreachable_cleanup_block(), + UnwindAction::Terminate => elaborator.patch.terminate_block(), + }) }; elaborate_drop( &mut elaborator, @@ -1140,7 +1152,7 @@ fn insert_panic_block<'tcx>( expected: true, msg: message, target: assert_block, - cleanup: None, + unwind: UnwindAction::Continue, }; let source_info = SourceInfo::outermost(body.span); @@ -1182,7 +1194,7 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { // These never unwind. TerminatorKind::Goto { .. } | TerminatorKind::SwitchInt { .. } - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::GeneratorDrop @@ -1199,7 +1211,6 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { // These may unwind. TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } | TerminatorKind::Assert { .. } => return true, @@ -1242,8 +1253,8 @@ fn create_generator_resume_function<'tcx>( } else if !block.is_cleanup { // Any terminators that *can* unwind but don't have an unwind target set are also // pointed at our poisoning block (unless they're part of the cleanup path). - if let Some(unwind @ None) = block.terminator_mut().unwind_mut() { - *unwind = Some(poison_block); + if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { + *unwind = UnwindAction::Cleanup(poison_block); } } } @@ -1288,8 +1299,11 @@ fn create_generator_resume_function<'tcx>( fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { let return_block = insert_term_block(body, TerminatorKind::Return); - let term = - TerminatorKind::Drop { place: Place::from(SELF_ARG), target: return_block, unwind: None }; + let term = TerminatorKind::Drop { + place: Place::from(SELF_ARG), + target: return_block, + unwind: UnwindAction::Continue, + }; let source_info = SourceInfo::outermost(body.span); // Create a block to destroy an unresumed generators. This can only destroy upvars. @@ -1381,10 +1395,9 @@ fn create_cases<'tcx>( #[instrument(level = "debug", skip(tcx), ret)] pub(crate) fn mir_generator_witnesses<'tcx>( tcx: TyCtxt<'tcx>, - def_id: DefId, + def_id: LocalDefId, ) -> GeneratorLayout<'tcx> { assert!(tcx.sess.opts.unstable_opts.drop_tracking_mir); - let def_id = def_id.expect_local(); let (body, _) = tcx.mir_promoted(ty::WithOptConstParam::unknown(def_id)); let body = body.borrow(); @@ -1543,6 +1556,13 @@ impl<'tcx> MirPass<'tcx> for StateTransform { body.arg_count = 2; // self, resume arg body.spread_arg = None; + // The original arguments to the function are no longer arguments, mark them as such. + // Otherwise they'll conflict with our new arguments, which although they don't have + // argument_index set, will get emitted as unnamed arguments. + for var in &mut body.var_debug_info { + var.argument_index = None; + } + body.generator.as_mut().unwrap().yield_ty = None; body.generator.as_mut().unwrap().generator_layout = Some(layout); @@ -1648,6 +1668,7 @@ impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> { | StatementKind::StorageDead(_) | StatementKind::Retag(..) | StatementKind::AscribeUserType(..) + | StatementKind::PlaceMention(..) | StatementKind::Coverage(..) | StatementKind::Intrinsic(..) | StatementKind::ConstEvalCounter @@ -1664,7 +1685,7 @@ impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> { args, destination, target: Some(_), - cleanup: _, + unwind: _, from_hir_call: _, fn_span: _, } => { @@ -1687,11 +1708,10 @@ impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> { | TerminatorKind::Goto { .. } | TerminatorKind::SwitchInt { .. } | TerminatorKind::Resume - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Assert { .. } | TerminatorKind::GeneratorDrop | TerminatorKind::FalseEdge { .. } @@ -1872,12 +1892,14 @@ fn check_must_not_suspend_def( data: SuspendCheckData<'_>, ) -> bool { if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) { - let msg = format!( - "{}`{}`{} held across a suspend point, but should not be", - data.descr_pre, - tcx.def_path_str(def_id), - data.descr_post, - ); + let msg = rustc_errors::DelayDm(|| { + format!( + "{}`{}`{} held across a suspend point, but should not be", + data.descr_pre, + tcx.def_path_str(def_id), + data.descr_post, + ) + }); tcx.struct_span_lint_hir( rustc_session::lint::builtin::MUST_NOT_SUSPEND, hir_id, diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 6e6d6566f..f0cb317f4 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -10,7 +10,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::{self, Instance, InstanceDef, ParamEnv, Ty, TyCtxt}; use rustc_session::config::OptLevel; use rustc_span::{hygiene::ExpnKind, ExpnData, LocalExpnId, Span}; -use rustc_target::abi::VariantIdx; +use rustc_target::abi::{FieldIdx, FIRST_VARIANT}; use rustc_target::spec::abi::Abi; use crate::simplify::{remove_dead_blocks, CfgSimplifier}; @@ -270,7 +270,9 @@ impl<'tcx> Inliner<'tcx> { | InstanceDef::FnPtrShim(..) | InstanceDef::ClosureOnceShim { .. } | InstanceDef::DropGlue(..) - | InstanceDef::CloneShim(..) => return Ok(()), + | InstanceDef::CloneShim(..) + | InstanceDef::ThreadLocalShim(..) + | InstanceDef::FnPtrAddrShim(..) => return Ok(()), } if self.tcx.is_constructor(callee_def_id) { @@ -424,13 +426,6 @@ impl<'tcx> Inliner<'tcx> { debug!(" final inline threshold = {}", threshold); // FIXME: Give a bonus to functions with only a single caller - let diverges = matches!( - callee_body.basic_blocks[START_BLOCK].terminator().kind, - TerminatorKind::Unreachable | TerminatorKind::Call { target: None, .. } - ); - if diverges && !matches!(callee_attrs.inline, InlineAttr::Always) { - return Err("callee diverges unconditionally"); - } let mut checker = CostChecker { tcx: self.tcx, @@ -453,14 +448,12 @@ impl<'tcx> Inliner<'tcx> { checker.visit_basic_block_data(bb, blk); let term = blk.terminator(); - if let TerminatorKind::Drop { ref place, target, unwind } - | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } = term.kind - { + if let TerminatorKind::Drop { ref place, target, unwind } = term.kind { work_list.push(target); // If the place doesn't actually need dropping, treat it like a regular goto. let ty = callsite.callee.subst_mir(self.tcx, &place.ty(callee_body, tcx).ty); - if ty.needs_drop(tcx, self.param_env) && let Some(unwind) = unwind { + if ty.needs_drop(tcx, self.param_env) && let UnwindAction::Cleanup(unwind) = unwind { work_list.push(unwind); } } else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set @@ -507,7 +500,7 @@ impl<'tcx> Inliner<'tcx> { ) { let terminator = caller_body[callsite.block].terminator.take().unwrap(); match terminator.kind { - TerminatorKind::Call { args, destination, cleanup, .. } => { + TerminatorKind::Call { args, destination, unwind, .. } => { // If the call is something like `a[*i] = f(i)`, where // `i : &mut usize`, then just duplicating the `a[*i]` // Place could result in two different locations if `f` @@ -578,7 +571,7 @@ impl<'tcx> Inliner<'tcx> { destination: destination_local, callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(), callsite, - cleanup_block: cleanup, + cleanup_block: unwind, in_cleanup_block: false, tcx: self.tcx, expn_data, @@ -708,7 +701,7 @@ impl<'tcx> Inliner<'tcx> { // The `tmp0`, `tmp1`, and `tmp2` in our example above. let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| { // This is e.g., `tuple_tmp.0` in our example above. - let tuple_field = Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty)); + let tuple_field = Operand::Move(tcx.mk_place_field(tuple, FieldIdx::new(i), ty)); // Spill to a local to make e.g., `tmp0`. self.create_temp_if_necessary(tuple_field, callsite, caller_body) @@ -815,20 +808,19 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> { fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) { let tcx = self.tcx; match terminator.kind { - TerminatorKind::Drop { ref place, unwind, .. } - | TerminatorKind::DropAndReplace { ref place, unwind, .. } => { + TerminatorKind::Drop { ref place, unwind, .. } => { // If the place doesn't actually need dropping, treat it like a regular goto. let ty = self.instance.subst_mir(tcx, &place.ty(self.callee_body, tcx).ty); if ty.needs_drop(tcx, self.param_env) { self.cost += CALL_PENALTY; - if unwind.is_some() { + if let UnwindAction::Cleanup(_) = unwind { self.cost += LANDINGPAD_PENALTY; } } else { self.cost += INSTR_COST; } } - TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => { + TerminatorKind::Call { func: Operand::Constant(ref f), unwind, .. } => { let fn_ty = self.instance.subst_mir(tcx, &f.literal.ty()); self.cost += if let ty::FnDef(def_id, _) = *fn_ty.kind() && tcx.is_intrinsic(def_id) { // Don't give intrinsics the extra penalty for calls @@ -836,20 +828,20 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> { } else { CALL_PENALTY }; - if cleanup.is_some() { + if let UnwindAction::Cleanup(_) = unwind { self.cost += LANDINGPAD_PENALTY; } } - TerminatorKind::Assert { cleanup, .. } => { + TerminatorKind::Assert { unwind, .. } => { self.cost += CALL_PENALTY; - if cleanup.is_some() { + if let UnwindAction::Cleanup(_) = unwind { self.cost += LANDINGPAD_PENALTY; } } TerminatorKind::Resume => self.cost += RESUME_PENALTY, - TerminatorKind::InlineAsm { cleanup, .. } => { + TerminatorKind::InlineAsm { unwind, .. } => { self.cost += INSTR_COST; - if cleanup.is_some() { + if let UnwindAction::Cleanup(_) = unwind { self.cost += LANDINGPAD_PENALTY; } } @@ -914,8 +906,8 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> { check_equal(self, *f_ty); } ty::Adt(adt_def, substs) => { - let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0)); - let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else { + let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT); + let Some(field) = adt_def.variant(var).fields.get(f) else { self.validation = Err("malformed MIR"); return; }; @@ -987,7 +979,7 @@ struct Integrator<'a, 'tcx> { destination: Local, callsite_scope: SourceScopeData<'tcx>, callsite: &'a CallSite<'tcx>, - cleanup_block: Option<BasicBlock>, + cleanup_block: UnwindAction, in_cleanup_block: bool, tcx: TyCtxt<'tcx>, expn_data: LocalExpnId, @@ -1022,18 +1014,21 @@ impl Integrator<'_, '_> { new } - fn map_unwind(&self, unwind: Option<BasicBlock>) -> Option<BasicBlock> { + fn map_unwind(&self, unwind: UnwindAction) -> UnwindAction { if self.in_cleanup_block { - if unwind.is_some() { - bug!("cleanup on cleanup block"); + match unwind { + UnwindAction::Cleanup(_) | UnwindAction::Continue => { + bug!("cleanup on cleanup block"); + } + UnwindAction::Unreachable | UnwindAction::Terminate => return unwind, } - return unwind; } match unwind { - Some(target) => Some(self.map_block(target)), + UnwindAction::Unreachable | UnwindAction::Terminate => unwind, + UnwindAction::Cleanup(target) => UnwindAction::Cleanup(self.map_block(target)), // Add an unwind edge to the original call's cleanup block - None => self.cleanup_block, + UnwindAction::Continue => self.cleanup_block, } } } @@ -1120,20 +1115,19 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> { *tgt = self.map_block(*tgt); } } - TerminatorKind::Drop { ref mut target, ref mut unwind, .. } - | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => { + TerminatorKind::Drop { ref mut target, ref mut unwind, .. } => { *target = self.map_block(*target); *unwind = self.map_unwind(*unwind); } - TerminatorKind::Call { ref mut target, ref mut cleanup, .. } => { + TerminatorKind::Call { ref mut target, ref mut unwind, .. } => { if let Some(ref mut tgt) = *target { *tgt = self.map_block(*tgt); } - *cleanup = self.map_unwind(*cleanup); + *unwind = self.map_unwind(*unwind); } - TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => { + TerminatorKind::Assert { ref mut target, ref mut unwind, .. } => { *target = self.map_block(*target); - *cleanup = self.map_unwind(*cleanup); + *unwind = self.map_unwind(*unwind); } TerminatorKind::Return => { terminator.kind = if let Some(tgt) = self.callsite.target { @@ -1143,11 +1137,14 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> { } } TerminatorKind::Resume => { - if let Some(tgt) = self.cleanup_block { - terminator.kind = TerminatorKind::Goto { target: tgt } - } + terminator.kind = match self.cleanup_block { + UnwindAction::Cleanup(tgt) => TerminatorKind::Goto { target: tgt }, + UnwindAction::Continue => TerminatorKind::Resume, + UnwindAction::Unreachable => TerminatorKind::Unreachable, + UnwindAction::Terminate => TerminatorKind::Terminate, + }; } - TerminatorKind::Abort => {} + TerminatorKind::Terminate => {} TerminatorKind::Unreachable => {} TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => { *real_target = self.map_block(*real_target); @@ -1158,11 +1155,11 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> { { bug!("False unwinds should have been removed before inlining") } - TerminatorKind::InlineAsm { ref mut destination, ref mut cleanup, .. } => { + TerminatorKind::InlineAsm { ref mut destination, ref mut unwind, .. } => { if let Some(ref mut tgt) = *destination { *tgt = self.map_block(*tgt); } - *cleanup = self.map_unwind(*cleanup); + *unwind = self.map_unwind(*unwind); } } } diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs index 792457c80..8aa3c23d0 100644 --- a/compiler/rustc_mir_transform/src/inline/cycle.rs +++ b/compiler/rustc_mir_transform/src/inline/cycle.rs @@ -83,7 +83,11 @@ pub(crate) fn mir_callgraph_reachable<'tcx>( | InstanceDef::ReifyShim(_) | InstanceDef::FnPtrShim(..) | InstanceDef::ClosureOnceShim { .. } + | InstanceDef::ThreadLocalShim { .. } | InstanceDef::CloneShim(..) => {} + + // This shim does not call any other functions, thus there can be no recursion. + InstanceDef::FnPtrAddrShim(..) => continue, InstanceDef::DropGlue(..) => { // FIXME: A not fully substituted drop shim can cause ICEs if one attempts to // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this diff --git a/compiler/rustc_mir_transform/src/instcombine.rs b/compiler/rustc_mir_transform/src/instcombine.rs index 4182da195..3d06a0a49 100644 --- a/compiler/rustc_mir_transform/src/instcombine.rs +++ b/compiler/rustc_mir_transform/src/instcombine.rs @@ -3,12 +3,14 @@ use crate::MirPass; use rustc_hir::Mutability; use rustc_middle::mir::{ - BinOp, Body, Constant, ConstantKind, LocalDecls, Operand, Place, ProjectionElem, Rvalue, - SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, UnOp, + BinOp, Body, CastKind, Constant, ConstantKind, LocalDecls, Operand, Place, ProjectionElem, + Rvalue, SourceInfo, Statement, StatementKind, SwitchTargets, Terminator, TerminatorKind, UnOp, }; use rustc_middle::ty::layout::ValidityRequirement; +use rustc_middle::ty::util::IntTypeExt; use rustc_middle::ty::{self, ParamEnv, SubstsRef, Ty, TyCtxt}; use rustc_span::symbol::Symbol; +use rustc_target::abi::FieldIdx; pub struct InstCombine; @@ -44,6 +46,7 @@ impl<'tcx> MirPass<'tcx> for InstCombine { &mut block.terminator.as_mut().unwrap(), &mut block.statements, ); + ctx.combine_duplicate_switch_targets(&mut block.terminator.as_mut().unwrap()); } } } @@ -144,9 +147,53 @@ impl<'tcx> InstCombineContext<'tcx, '_> { } fn combine_cast(&self, _source_info: &SourceInfo, rvalue: &mut Rvalue<'tcx>) { - if let Rvalue::Cast(_kind, operand, ty) = rvalue { - if operand.ty(self.local_decls, self.tcx) == *ty { + if let Rvalue::Cast(kind, operand, cast_ty) = rvalue { + let operand_ty = operand.ty(self.local_decls, self.tcx); + if operand_ty == *cast_ty { *rvalue = Rvalue::Use(operand.clone()); + } else if *kind == CastKind::Transmute { + // Transmuting an integer to another integer is just a signedness cast + if let (ty::Int(int), ty::Uint(uint)) | (ty::Uint(uint), ty::Int(int)) = (operand_ty.kind(), cast_ty.kind()) + && int.bit_width() == uint.bit_width() + { + // The width check isn't strictly necessary, as different widths + // are UB and thus we'd be allowed to turn it into a cast anyway. + // But let's keep the UB around for codegen to exploit later. + // (If `CastKind::Transmute` ever becomes *not* UB for mismatched sizes, + // then the width check is necessary for big-endian correctness.) + *kind = CastKind::IntToInt; + return; + } + + // Transmuting a fieldless enum to its repr is a discriminant read + if let ty::Adt(adt_def, ..) = operand_ty.kind() + && adt_def.is_enum() + && adt_def.is_payloadfree() + && let Some(place) = operand.place() + && let Some(repr_int) = adt_def.repr().int + && repr_int.to_ty(self.tcx) == *cast_ty + { + *rvalue = Rvalue::Discriminant(place); + return; + } + + // Transmuting a transparent struct/union to a field's type is a projection + if let ty::Adt(adt_def, substs) = operand_ty.kind() + && adt_def.repr().transparent() + && (adt_def.is_struct() || adt_def.is_union()) + && let Some(place) = operand.place() + { + let variant = adt_def.non_enum_variant(); + for (i, field) in variant.fields.iter().enumerate() { + let field_ty = field.ty(self.tcx, substs); + if field_ty == *cast_ty { + let place = place.project_deeper(&[ProjectionElem::Field(FieldIdx::from_usize(i), *cast_ty)], self.tcx); + let operand = if operand.is_move() { Operand::Move(place) } else { Operand::Copy(place) }; + *rvalue = Rvalue::Use(operand); + return; + } + } + } } } } @@ -217,6 +264,19 @@ impl<'tcx> InstCombineContext<'tcx, '_> { terminator.kind = TerminatorKind::Goto { target: destination_block }; } + fn combine_duplicate_switch_targets(&self, terminator: &mut Terminator<'tcx>) { + let TerminatorKind::SwitchInt { targets, .. } = &mut terminator.kind + else { return }; + + let otherwise = targets.otherwise(); + if targets.iter().any(|t| t.1 == otherwise) { + *targets = SwitchTargets::new( + targets.iter().filter(|t| t.1 != otherwise), + targets.otherwise(), + ); + } + } + fn combine_intrinsic_assert( &self, terminator: &mut Terminator<'tcx>, diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs index 89e0a007d..9447a2ff0 100644 --- a/compiler/rustc_mir_transform/src/large_enums.rs +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -158,10 +158,12 @@ impl EnumSizeOpt { tmp_ty, ), }; - let rval = Rvalue::Use(Operand::Constant(box (constant_vals))); + let rval = Rvalue::Use(Operand::Constant(Box::new(constant_vals))); - let const_assign = - Statement { source_info, kind: StatementKind::Assign(box (place, rval)) }; + let const_assign = Statement { + source_info, + kind: StatementKind::Assign(Box::new((place, rval))), + }; let discr_place = Place::from( local_decls @@ -170,7 +172,10 @@ impl EnumSizeOpt { let store_discr = Statement { source_info, - kind: StatementKind::Assign(box (discr_place, Rvalue::Discriminant(*rhs))), + kind: StatementKind::Assign(Box::new(( + discr_place, + Rvalue::Discriminant(*rhs), + ))), }; let discr_cast_place = @@ -178,14 +183,14 @@ impl EnumSizeOpt { let cast_discr = Statement { source_info, - kind: StatementKind::Assign(box ( + kind: StatementKind::Assign(Box::new(( discr_cast_place, Rvalue::Cast( CastKind::IntToInt, Operand::Copy(discr_place), tcx.types.usize, ), - )), + ))), }; let size_place = @@ -193,14 +198,14 @@ impl EnumSizeOpt { let store_size = Statement { source_info, - kind: StatementKind::Assign(box ( + kind: StatementKind::Assign(Box::new(( size_place, Rvalue::Use(Operand::Copy(Place { local: size_array_local, projection: tcx .mk_place_elems(&[PlaceElem::Index(discr_cast_place.local)]), })), - )), + ))), }; let dst = @@ -208,10 +213,10 @@ impl EnumSizeOpt { let dst_ptr = Statement { source_info, - kind: StatementKind::Assign(box ( + kind: StatementKind::Assign(Box::new(( dst, Rvalue::AddressOf(Mutability::Mut, *lhs), - )), + ))), }; let dst_cast_ty = tcx.mk_mut_ptr(tcx.types.u8); @@ -220,10 +225,10 @@ impl EnumSizeOpt { let dst_cast = Statement { source_info, - kind: StatementKind::Assign(box ( + kind: StatementKind::Assign(Box::new(( dst_cast_place, Rvalue::Cast(CastKind::PtrToPtr, Operand::Copy(dst), dst_cast_ty), - )), + ))), }; let src = @@ -231,10 +236,10 @@ impl EnumSizeOpt { let src_ptr = Statement { source_info, - kind: StatementKind::Assign(box ( + kind: StatementKind::Assign(Box::new(( src, Rvalue::AddressOf(Mutability::Not, *rhs), - )), + ))), }; let src_cast_ty = tcx.mk_imm_ptr(tcx.types.u8); @@ -243,24 +248,24 @@ impl EnumSizeOpt { let src_cast = Statement { source_info, - kind: StatementKind::Assign(box ( + kind: StatementKind::Assign(Box::new(( src_cast_place, Rvalue::Cast(CastKind::PtrToPtr, Operand::Copy(src), src_cast_ty), - )), + ))), }; let deinit_old = - Statement { source_info, kind: StatementKind::Deinit(box dst) }; + Statement { source_info, kind: StatementKind::Deinit(Box::new(dst)) }; let copy_bytes = Statement { source_info, - kind: StatementKind::Intrinsic( - box NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { + kind: StatementKind::Intrinsic(Box::new( + NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { src: Operand::Copy(src_cast_place), dst: Operand::Copy(dst_cast_place), count: Operand::Copy(size_place), }), - ), + )), }; let store_dead = Statement { diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index cdd28ae0c..fc12d423c 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -1,12 +1,10 @@ #![allow(rustc::potential_query_instability)] #![feature(box_patterns)] #![feature(drain_filter)] -#![feature(box_syntax)] #![feature(let_chains)] #![feature(map_try_insert)] #![feature(min_specialization)] #![feature(never_type)] -#![feature(once_cell)] #![feature(option_get_or_insert_default)] #![feature(trusted_step)] #![feature(try_blocks)] @@ -30,9 +28,9 @@ use rustc_hir::intravisit::{self, Visitor}; use rustc_index::vec::IndexVec; use rustc_middle::mir::visit::Visitor as _; use rustc_middle::mir::{ - traversal, AnalysisPhase, Body, ConstQualifs, Constant, LocalDecl, MirPass, MirPhase, Operand, - Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, SourceInfo, Statement, StatementKind, - TerminatorKind, + traversal, AnalysisPhase, Body, ClearCrossCrate, ConstQualifs, Constant, LocalDecl, MirPass, + MirPhase, Operand, Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, SourceInfo, + Statement, StatementKind, TerminatorKind, }; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt}; @@ -92,6 +90,7 @@ mod separate_const_switch; mod shim; mod ssa; // This pass is public to allow external drivers to perform MIR cleanup +mod check_alignment; pub mod simplify; mod simplify_branches; mod simplify_comparison_integral; @@ -113,7 +112,6 @@ pub fn provide(providers: &mut Providers) { mir_keys, mir_const, mir_const_qualif: |tcx, def_id| { - let def_id = def_id.expect_local(); if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) { tcx.mir_const_qualif_const_arg(def) } else { @@ -134,7 +132,6 @@ pub fn provide(providers: &mut Providers) { mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable, mir_inliner_callees: inline::cycle::mir_inliner_callees, promoted_mir: |tcx, def_id| { - let def_id = def_id.expect_local(); if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) { tcx.promoted_mir_of_const_arg(def) } else { @@ -162,7 +159,7 @@ fn remap_mir_for_const_eval_select<'tcx>( ref mut args, destination, target, - cleanup, + unwind, fn_span, .. } if let ty::FnDef(def_id, _) = *literal.ty().kind() @@ -199,7 +196,7 @@ fn remap_mir_for_const_eval_select<'tcx>( }; method(place) }).collect(); - terminator.kind = TerminatorKind::Call { func, args: arguments, destination, target, cleanup, from_hir_call: false, fn_span }; + terminator.kind = TerminatorKind::Call { func, args: arguments, destination, target, unwind, from_hir_call: false, fn_span }; } _ => {} } @@ -207,8 +204,7 @@ fn remap_mir_for_const_eval_select<'tcx>( body } -fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool { - let def_id = def_id.expect_local(); +fn is_mir_available(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool { tcx.mir_keys(()).contains(&def_id) } @@ -278,14 +274,14 @@ fn mir_const(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> &Steal< // Unsafety check uses the raw mir, so make sure it is run. if !tcx.sess.opts.unstable_opts.thir_unsafeck { if let Some(param_did) = def.const_param_did { - tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did)); + tcx.ensure_with_value().unsafety_check_result_for_const_arg((def.did, param_did)); } else { - tcx.ensure().unsafety_check_result(def.did); + tcx.ensure_with_value().unsafety_check_result(def.did); } } // has_ffi_unwind_calls query uses the raw mir, so make sure it is run. - tcx.ensure().has_ffi_unwind_calls(def.did); + tcx.ensure_with_value().has_ffi_unwind_calls(def.did); let mut body = tcx.mir_built(def).steal(); @@ -351,12 +347,11 @@ fn mir_promoted( } /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it) -fn mir_for_ctfe(tcx: TyCtxt<'_>, def_id: DefId) -> &Body<'_> { - let did = def_id.expect_local(); - if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) { +fn mir_for_ctfe(tcx: TyCtxt<'_>, def_id: LocalDefId) -> &Body<'_> { + if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) { tcx.mir_for_ctfe_of_const_arg(def) } else { - tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did))) + tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(def_id))) } } @@ -416,8 +411,6 @@ fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) - pm::run_passes(tcx, &mut body, &[&ctfe_limit::CtfeLimit], None); - debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE"); - body } @@ -435,7 +428,7 @@ fn mir_drops_elaborated_and_const_checked( if tcx.sess.opts.unstable_opts.drop_tracking_mir && let DefKind::Generator = tcx.def_kind(def.did) { - tcx.ensure().mir_generator_witnesses(def.did); + tcx.ensure_with_value().mir_generator_witnesses(def.did); } let mir_borrowck = tcx.mir_borrowck_opt_const_arg(def); @@ -446,7 +439,7 @@ fn mir_drops_elaborated_and_const_checked( // Do not compute the mir call graph without said call graph actually being used. if inline::Inline.is_enabled(&tcx.sess) { - let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def)); + tcx.ensure_with_value().mir_inliner_callees(ty::InstanceDef::Item(def)); } } @@ -535,6 +528,12 @@ fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { &[&lower_intrinsics::LowerIntrinsics, &simplify::SimplifyCfg::new("elaborate-drops")]; pm::run_passes(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::PostCleanup))); + + // Clear this by anticipation. Optimizations and runtime MIR have no reason to look + // into this information, which is meant for borrowck diagnostics. + for decl in &mut body.local_decls { + decl.local_info = ClearCrossCrate::Clear; + } } fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { @@ -547,6 +546,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { tcx, body, &[ + &check_alignment::CheckAlignment, &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode. &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first &unreachable_prop::UnreachablePropagation, @@ -566,8 +566,6 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { &separate_const_switch::SeparateConstSwitch, &simplify::SimplifyLocals::new("before-const-prop"), ©_prop::CopyProp, - // - // FIXME(#70073): This pass is responsible for both optimization as well as some lints. &const_prop::ConstProp, &dataflow_const_prop::DataflowConstProp, // @@ -596,8 +594,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { } /// Optimize the MIR and prepare it for codegen. -fn optimized_mir(tcx: TyCtxt<'_>, did: DefId) -> &Body<'_> { - let did = did.expect_local(); +fn optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> &Body<'_> { assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None); tcx.arena.alloc(inner_optimized_mir(tcx, did)) } @@ -615,7 +612,7 @@ fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> { // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked` // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it // computes and caches its result. - Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did), + Some(hir::ConstContext::ConstFn) => tcx.ensure_with_value().mir_for_ctfe(did), None => {} Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other), } @@ -626,8 +623,6 @@ fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> { debug!("body: {:#?}", body); run_optimization_passes(tcx, &mut body); - debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR"); - body } @@ -651,7 +646,5 @@ fn promoted_mir( run_analysis_to_runtime_passes(tcx, body); } - debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR"); - tcx.arena.alloc(promoted) } diff --git a/compiler/rustc_mir_transform/src/lower_intrinsics.rs b/compiler/rustc_mir_transform/src/lower_intrinsics.rs index f596cc180..c136642df 100644 --- a/compiler/rustc_mir_transform/src/lower_intrinsics.rs +++ b/compiler/rustc_mir_transform/src/lower_intrinsics.rs @@ -6,6 +6,7 @@ use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_span::symbol::{sym, Symbol}; use rustc_span::Span; +use rustc_target::abi::{FieldIdx, VariantIdx}; pub struct LowerIntrinsics; @@ -149,6 +150,35 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics { terminator.kind = TerminatorKind::Goto { target }; } } + sym::read_via_copy => { + let [arg] = args.as_slice() else { + span_bug!(terminator.source_info.span, "Wrong number of arguments"); + }; + let derefed_place = + if let Some(place) = arg.place() && let Some(local) = place.as_local() { + tcx.mk_place_deref(local.into()) + } else { + span_bug!(terminator.source_info.span, "Only passing a local is supported"); + }; + terminator.kind = match *target { + None => { + // No target means this read something uninhabited, + // so it must be unreachable, and we don't need to + // preserve the assignment either. + TerminatorKind::Unreachable + } + Some(target) => { + block.statements.push(Statement { + source_info: terminator.source_info, + kind: StatementKind::Assign(Box::new(( + *destination, + Rvalue::Use(Operand::Copy(derefed_place)), + ))), + }); + TerminatorKind::Goto { target } + } + } + } sym::discriminant_value => { if let (Some(target), Some(arg)) = (*target, args[0].place()) { let arg = tcx.mk_place_deref(arg); @@ -162,6 +192,61 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics { terminator.kind = TerminatorKind::Goto { target }; } } + sym::option_payload_ptr => { + if let (Some(target), Some(arg)) = (*target, args[0].place()) { + let ty::RawPtr(ty::TypeAndMut { ty: dest_ty, .. }) = + destination.ty(local_decls, tcx).ty.kind() + else { bug!(); }; + + block.statements.push(Statement { + source_info: terminator.source_info, + kind: StatementKind::Assign(Box::new(( + *destination, + Rvalue::AddressOf( + Mutability::Not, + arg.project_deeper( + &[ + PlaceElem::Deref, + PlaceElem::Downcast( + Some(sym::Some), + VariantIdx::from_u32(1), + ), + PlaceElem::Field(FieldIdx::from_u32(0), *dest_ty), + ], + tcx, + ), + ), + ))), + }); + terminator.kind = TerminatorKind::Goto { target }; + } + } + sym::transmute => { + let dst_ty = destination.ty(local_decls, tcx).ty; + let Ok([arg]) = <[_; 1]>::try_from(std::mem::take(args)) else { + span_bug!( + terminator.source_info.span, + "Wrong number of arguments for transmute intrinsic", + ); + }; + + // Always emit the cast, even if we transmute to an uninhabited type, + // because that lets CTFE and codegen generate better error messages + // when such a transmute actually ends up reachable. + block.statements.push(Statement { + source_info: terminator.source_info, + kind: StatementKind::Assign(Box::new(( + *destination, + Rvalue::Cast(CastKind::Transmute, arg, dst_ty), + ))), + }); + + if let Some(target) = *target { + terminator.kind = TerminatorKind::Goto { target }; + } else { + terminator.kind = TerminatorKind::Unreachable; + } + } _ if intrinsic_name.as_str().starts_with("simd_shuffle") => { validate_simd_shuffle(tcx, args, terminator.source_info.span); } diff --git a/compiler/rustc_mir_transform/src/lower_slice_len.rs b/compiler/rustc_mir_transform/src/lower_slice_len.rs index c6e7468aa..7dc5878e0 100644 --- a/compiler/rustc_mir_transform/src/lower_slice_len.rs +++ b/compiler/rustc_mir_transform/src/lower_slice_len.rs @@ -3,7 +3,7 @@ use crate::MirPass; use rustc_hir::def_id::DefId; -use rustc_index::vec::IndexVec; +use rustc_index::vec::IndexSlice; use rustc_middle::mir::*; use rustc_middle::ty::{self, TyCtxt}; @@ -42,7 +42,7 @@ struct SliceLenPatchInformation<'tcx> { fn lower_slice_len_call<'tcx>( tcx: TyCtxt<'tcx>, block: &mut BasicBlockData<'tcx>, - local_decls: &IndexVec<Local, LocalDecl<'tcx>>, + local_decls: &IndexSlice<Local, LocalDecl<'tcx>>, slice_len_fn_item_def_id: DefId, ) { let mut patch_found: Option<SliceLenPatchInformation<'_>> = None; @@ -54,7 +54,6 @@ fn lower_slice_len_call<'tcx>( args, destination, target: Some(bb), - cleanup: None, from_hir_call: true, .. } => { diff --git a/compiler/rustc_mir_transform/src/nrvo.rs b/compiler/rustc_mir_transform/src/nrvo.rs index 4291e81c7..b6e73eaad 100644 --- a/compiler/rustc_mir_transform/src/nrvo.rs +++ b/compiler/rustc_mir_transform/src/nrvo.rs @@ -102,7 +102,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> { mir::LocalKind::Arg => return None, mir::LocalKind::ReturnPointer => bug!("Return place was assigned to itself?"), - mir::LocalKind::Var | mir::LocalKind::Temp => {} + mir::LocalKind::Temp => {} } // If multiple different locals are copied to the return place. We can't pick a diff --git a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs index e3a03aa08..4941c9edc 100644 --- a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs +++ b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs @@ -33,6 +33,7 @@ impl RemoveNoopLandingPads { StatementKind::FakeRead(..) | StatementKind::StorageLive(_) | StatementKind::StorageDead(_) + | StatementKind::PlaceMention(..) | StatementKind::AscribeUserType(..) | StatementKind::Coverage(..) | StatementKind::ConstEvalCounter @@ -71,11 +72,10 @@ impl RemoveNoopLandingPads { TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } | TerminatorKind::Return - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Unreachable | TerminatorKind::Call { .. } | TerminatorKind::Assert { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Drop { .. } | TerminatorKind::InlineAsm { .. } => false, } @@ -103,11 +103,11 @@ impl RemoveNoopLandingPads { for bb in postorder { debug!(" processing {:?}", bb); if let Some(unwind) = body[bb].terminator_mut().unwind_mut() { - if let Some(unwind_bb) = *unwind { + if let UnwindAction::Cleanup(unwind_bb) = *unwind { if nop_landing_pads.contains(unwind_bb) { debug!(" removing noop landing pad"); landing_pads_removed += 1; - *unwind = None; + *unwind = UnwindAction::Continue; } } } diff --git a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs index 78b6f714a..1f9e521d3 100644 --- a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs +++ b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs @@ -1,14 +1,15 @@ use rustc_index::bit_set::ChunkedBitSet; -use rustc_middle::mir::{Body, Field, Rvalue, Statement, StatementKind, TerminatorKind}; +use rustc_middle::mir::{Body, TerminatorKind}; use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, VariantDef}; use rustc_mir_dataflow::impls::MaybeInitializedPlaces; use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex}; use rustc_mir_dataflow::{self, move_path_children_matching, Analysis, MoveDataParamEnv}; +use rustc_target::abi::FieldIdx; use crate::MirPass; -/// Removes `Drop` and `DropAndReplace` terminators whose target is known to be uninitialized at +/// Removes `Drop` terminators whose target is known to be uninitialized at /// that point. /// /// This is redundant with drop elaboration, but we need to do it prior to const-checking, and @@ -37,8 +38,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops { let mut to_remove = vec![]; for (bb, block) in body.basic_blocks.iter_enumerated() { let terminator = block.terminator(); - let (TerminatorKind::Drop { place, .. } | TerminatorKind::DropAndReplace { place, .. }) - = &terminator.kind + let TerminatorKind::Drop { place, .. } = &terminator.kind else { continue }; maybe_inits.seek_before_primary_effect(body.terminator_loc(bb)); @@ -64,24 +64,12 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops { for bb in to_remove { let block = &mut body.basic_blocks_mut()[bb]; - let (TerminatorKind::Drop { target, .. } | TerminatorKind::DropAndReplace { target, .. }) + let TerminatorKind::Drop { target, .. } = &block.terminator().kind else { unreachable!() }; // Replace block terminator with `Goto`. - let target = *target; - let old_terminator_kind = std::mem::replace( - &mut block.terminator_mut().kind, - TerminatorKind::Goto { target }, - ); - - // If this is a `DropAndReplace`, we need to emulate the assignment to the return place. - if let TerminatorKind::DropAndReplace { place, value, .. } = old_terminator_kind { - block.statements.push(Statement { - source_info: block.terminator().source_info, - kind: StatementKind::Assign(Box::new((place, Rvalue::Use(value)))), - }); - } + block.terminator_mut().kind = TerminatorKind::Goto { target: *target }; } } } @@ -143,7 +131,7 @@ fn is_needs_drop_and_init<'tcx>( .fields .iter() .enumerate() - .map(|(f, field)| (Field::from_usize(f), field.ty(tcx, substs), mpi)) + .map(|(f, field)| (FieldIdx::from_usize(f), field.ty(tcx, substs), mpi)) .any(field_needs_drop_and_init) }) } @@ -151,7 +139,7 @@ fn is_needs_drop_and_init<'tcx>( ty::Tuple(fields) => fields .iter() .enumerate() - .map(|(f, f_ty)| (Field::from_usize(f), f_ty, mpi)) + .map(|(f, f_ty)| (FieldIdx::from_usize(f), f_ty, mpi)) .any(field_needs_drop_and_init), _ => true, diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs index 1becfddb2..1f37f03cf 100644 --- a/compiler/rustc_mir_transform/src/remove_zsts.rs +++ b/compiler/rustc_mir_transform/src/remove_zsts.rs @@ -1,7 +1,9 @@ -//! Removes assignments to ZST places. +//! Removes operations on ZST places, and convert ZST operands to constants. use crate::MirPass; -use rustc_middle::mir::{Body, StatementKind}; +use rustc_middle::mir::interpret::ConstValue; +use rustc_middle::mir::visit::*; +use rustc_middle::mir::*; use rustc_middle::ty::{self, Ty, TyCtxt}; pub struct RemoveZsts; @@ -16,38 +18,24 @@ impl<'tcx> MirPass<'tcx> for RemoveZsts { if tcx.type_of(body.source.def_id()).subst_identity().is_generator() { return; } - let param_env = tcx.param_env(body.source.def_id()); - let basic_blocks = body.basic_blocks.as_mut_preserves_cfg(); + let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); let local_decls = &body.local_decls; - for block in basic_blocks { - for statement in block.statements.iter_mut() { - if let StatementKind::Assign(box (place, _)) | StatementKind::Deinit(box place) = - statement.kind - { - let place_ty = place.ty(local_decls, tcx).ty; - if !maybe_zst(place_ty) { - continue; - } - let Ok(layout) = tcx.layout_of(param_env.and(place_ty)) else { - continue; - }; - if !layout.is_zst() { - continue; - } - if tcx.consider_optimizing(|| { - format!( - "RemoveZsts - Place: {:?} SourceInfo: {:?}", - place, statement.source_info - ) - }) { - statement.make_nop(); - } - } - } + let mut replacer = Replacer { tcx, param_env, local_decls }; + for var_debug_info in &mut body.var_debug_info { + replacer.visit_var_debug_info(var_debug_info); + } + for (bb, data) in body.basic_blocks.as_mut_preserves_cfg().iter_enumerated_mut() { + replacer.visit_basic_block_data(bb, data); } } } +struct Replacer<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + local_decls: &'a LocalDecls<'tcx>, +} + /// A cheap, approximate check to avoid unnecessary `layout_of` calls. fn maybe_zst(ty: Ty<'_>) -> bool { match ty.kind() { @@ -63,3 +51,93 @@ fn maybe_zst(ty: Ty<'_>) -> bool { _ => false, } } + +impl<'tcx> Replacer<'_, 'tcx> { + fn known_to_be_zst(&self, ty: Ty<'tcx>) -> bool { + if !maybe_zst(ty) { + return false; + } + let Ok(layout) = self.tcx.layout_of(self.param_env.and(ty)) else { + return false; + }; + layout.is_zst() + } + + fn make_zst(&self, ty: Ty<'tcx>) -> Constant<'tcx> { + debug_assert!(self.known_to_be_zst(ty)); + Constant { + span: rustc_span::DUMMY_SP, + user_ty: None, + literal: ConstantKind::Val(ConstValue::ZeroSized, ty), + } + } +} + +impl<'tcx> MutVisitor<'tcx> for Replacer<'_, 'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_var_debug_info(&mut self, var_debug_info: &mut VarDebugInfo<'tcx>) { + match var_debug_info.value { + VarDebugInfoContents::Const(_) => {} + VarDebugInfoContents::Place(place) => { + let place_ty = place.ty(self.local_decls, self.tcx).ty; + if self.known_to_be_zst(place_ty) { + var_debug_info.value = VarDebugInfoContents::Const(self.make_zst(place_ty)) + } + } + VarDebugInfoContents::Composite { ty, fragments: _ } => { + if self.known_to_be_zst(ty) { + var_debug_info.value = VarDebugInfoContents::Const(self.make_zst(ty)) + } + } + } + } + + fn visit_operand(&mut self, operand: &mut Operand<'tcx>, loc: Location) { + if let Operand::Constant(_) = operand { + return; + } + let op_ty = operand.ty(self.local_decls, self.tcx); + if self.known_to_be_zst(op_ty) + && self.tcx.consider_optimizing(|| { + format!("RemoveZsts - Operand: {:?} Location: {:?}", operand, loc) + }) + { + *operand = Operand::Constant(Box::new(self.make_zst(op_ty))) + } + } + + fn visit_statement(&mut self, statement: &mut Statement<'tcx>, loc: Location) { + let place_for_ty = match statement.kind { + StatementKind::Assign(box (place, ref rvalue)) => { + rvalue.is_safe_to_remove().then_some(place) + } + StatementKind::Deinit(box place) + | StatementKind::SetDiscriminant { box place, variant_index: _ } + | StatementKind::AscribeUserType(box (place, _), _) + | StatementKind::Retag(_, box place) + | StatementKind::PlaceMention(box place) + | StatementKind::FakeRead(box (_, place)) => Some(place), + StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { + Some(local.into()) + } + StatementKind::Coverage(_) + | StatementKind::Intrinsic(_) + | StatementKind::Nop + | StatementKind::ConstEvalCounter => None, + }; + if let Some(place_for_ty) = place_for_ty + && let ty = place_for_ty.ty(self.local_decls, self.tcx).ty + && self.known_to_be_zst(ty) + && self.tcx.consider_optimizing(|| { + format!("RemoveZsts - Place: {:?} SourceInfo: {:?}", place_for_ty, statement.source_info) + }) + { + statement.make_nop(); + } else { + self.super_statement(statement, loc); + } + } +} diff --git a/compiler/rustc_mir_transform/src/separate_const_switch.rs b/compiler/rustc_mir_transform/src/separate_const_switch.rs index a24d2d34d..ef367faf6 100644 --- a/compiler/rustc_mir_transform/src/separate_const_switch.rs +++ b/compiler/rustc_mir_transform/src/separate_const_switch.rs @@ -108,12 +108,11 @@ pub fn separate_const_switch(body: &mut Body<'_>) -> usize { // The following terminators are not allowed TerminatorKind::Resume | TerminatorKind::Drop { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::Call { .. } | TerminatorKind::Assert { .. } | TerminatorKind::FalseUnwind { .. } | TerminatorKind::Yield { .. } - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::InlineAsm { .. } @@ -165,12 +164,11 @@ pub fn separate_const_switch(body: &mut Body<'_>) -> usize { } TerminatorKind::Resume - | TerminatorKind::Abort + | TerminatorKind::Terminate | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::GeneratorDrop | TerminatorKind::Assert { .. } - | TerminatorKind::DropAndReplace { .. } | TerminatorKind::FalseUnwind { .. } | TerminatorKind::Drop { .. } | TerminatorKind::Call { .. } @@ -247,6 +245,7 @@ fn is_likely_const<'tcx>(mut tracked_place: Place<'tcx>, block: &BasicBlockData< | StatementKind::StorageLive(_) | StatementKind::Retag(_, _) | StatementKind::AscribeUserType(_, _) + | StatementKind::PlaceMention(..) | StatementKind::Coverage(_) | StatementKind::StorageDead(_) | StatementKind::Intrinsic(_) @@ -317,6 +316,7 @@ fn find_determining_place<'tcx>( | StatementKind::StorageDead(_) | StatementKind::Retag(_, _) | StatementKind::AscribeUserType(_, _) + | StatementKind::PlaceMention(..) | StatementKind::Coverage(_) | StatementKind::Intrinsic(_) | StatementKind::ConstEvalCounter diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index ebe63d6cb..2787fe2ce 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -5,7 +5,7 @@ use rustc_middle::mir::*; use rustc_middle::ty::query::Providers; use rustc_middle::ty::InternalSubsts; use rustc_middle::ty::{self, EarlyBinder, GeneratorSubsts, Ty, TyCtxt}; -use rustc_target::abi::VariantIdx; +use rustc_target::abi::{FieldIdx, VariantIdx, FIRST_VARIANT}; use rustc_index::vec::{Idx, IndexVec}; @@ -76,7 +76,9 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<' build_drop_shim(tcx, def_id, ty) } + ty::InstanceDef::ThreadLocalShim(..) => build_thread_local_shim(tcx, instance), ty::InstanceDef::CloneShim(def_id, ty) => build_clone_shim(tcx, def_id, ty), + ty::InstanceDef::FnPtrAddrShim(def_id, ty) => build_fn_ptr_addr_shim(tcx, def_id, ty), ty::InstanceDef::Virtual(..) => { bug!("InstanceDef::Virtual ({:?}) is for direct calls only", instance) } @@ -307,7 +309,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> { fn clear_drop_flag(&mut self, _location: Location, _path: Self::Path, _mode: DropFlagMode) {} - fn field_subpath(&self, _path: Self::Path, _field: Field) -> Option<Self::Path> { + fn field_subpath(&self, _path: Self::Path, _field: FieldIdx) -> Option<Self::Path> { None } fn deref_subpath(&self, _path: Self::Path) -> Option<Self::Path> { @@ -321,6 +323,34 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> { } } +fn build_thread_local_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<'tcx> { + let def_id = instance.def_id(); + + let span = tcx.def_span(def_id); + let source_info = SourceInfo::outermost(span); + + let mut blocks = IndexVec::with_capacity(1); + blocks.push(BasicBlockData { + statements: vec![Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::return_place(), + Rvalue::ThreadLocalRef(def_id), + ))), + }], + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }); + + new_body( + MirSource::from_instance(instance), + blocks, + IndexVec::from_raw(vec![LocalDecl::new(tcx.thread_local_ptr_ty(def_id), span)]), + 0, + span, + ) +} + /// Builds a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`. fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> { debug!("build_clone_shim(def_id={:?})", def_id); @@ -469,7 +499,7 @@ impl<'tcx> CloneShimBuilder<'tcx> { args: vec![Operand::Move(ref_loc)], destination: dest, target: Some(next), - cleanup: Some(cleanup), + unwind: UnwindAction::Cleanup(cleanup), from_hir_call: true, fn_span: self.span, }, @@ -500,7 +530,7 @@ impl<'tcx> CloneShimBuilder<'tcx> { // created by block 2*i. We store this block in `unwind` so that the next clone block // will unwind to it if cloning fails. - let field = Field::new(i); + let field = FieldIdx::new(i); let src_field = self.tcx.mk_place_field(src, field, ity); let dest_field = self.tcx.mk_place_field(dest, field, ity); @@ -510,7 +540,11 @@ impl<'tcx> CloneShimBuilder<'tcx> { self.make_clone_call(dest_field, src_field, ity, next_block, unwind); self.block( vec![], - TerminatorKind::Drop { place: dest_field, target: unwind, unwind: None }, + TerminatorKind::Drop { + place: dest_field, + target: unwind, + unwind: UnwindAction::Terminate, + }, true, ); unwind = next_unwind; @@ -723,7 +757,7 @@ fn build_call_shim<'tcx>( if let Some(untuple_args) = untuple_args { let tuple_arg = Local::new(1 + (sig.inputs().len() - 1)); args.extend(untuple_args.iter().enumerate().map(|(i, ity)| { - Operand::Move(tcx.mk_place_field(Place::from(tuple_arg), Field::new(i), *ity)) + Operand::Move(tcx.mk_place_field(Place::from(tuple_arg), FieldIdx::new(i), *ity)) })); } @@ -746,10 +780,10 @@ fn build_call_shim<'tcx>( args, destination: Place::return_place(), target: Some(BasicBlock::new(1)), - cleanup: if let Some(Adjustment::RefMut) = rcvr_adjustment { - Some(BasicBlock::new(3)) + unwind: if let Some(Adjustment::RefMut) = rcvr_adjustment { + UnwindAction::Cleanup(BasicBlock::new(3)) } else { - None + UnwindAction::Continue }, from_hir_call: true, fn_span: span, @@ -762,7 +796,11 @@ fn build_call_shim<'tcx>( block( &mut blocks, vec![], - TerminatorKind::Drop { place: rcvr_place(), target: BasicBlock::new(2), unwind: None }, + TerminatorKind::Drop { + place: rcvr_place(), + target: BasicBlock::new(2), + unwind: UnwindAction::Continue, + }, false, ); } @@ -773,7 +811,11 @@ fn build_call_shim<'tcx>( block( &mut blocks, vec![], - TerminatorKind::Drop { place: rcvr_place(), target: BasicBlock::new(4), unwind: None }, + TerminatorKind::Drop { + place: rcvr_place(), + target: BasicBlock::new(4), + unwind: UnwindAction::Terminate, + }, true, ); @@ -816,11 +858,8 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> { let source_info = SourceInfo::outermost(span); - let variant_index = if adt_def.is_enum() { - adt_def.variant_index_with_ctor_id(ctor_id) - } else { - VariantIdx::new(0) - }; + let variant_index = + if adt_def.is_enum() { adt_def.variant_index_with_ctor_id(ctor_id) } else { FIRST_VARIANT }; // Generate the following MIR: // @@ -864,3 +903,39 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> { body } + +/// ```ignore (pseudo-impl) +/// impl FnPtr for fn(u32) { +/// fn addr(self) -> usize { +/// self as usize +/// } +/// } +/// ``` +fn build_fn_ptr_addr_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> { + assert!(matches!(self_ty.kind(), ty::FnPtr(..)), "expected fn ptr, found {self_ty}"); + let span = tcx.def_span(def_id); + let Some(sig) = tcx.fn_sig(def_id).subst(tcx, &[self_ty.into()]).no_bound_vars() else { + span_bug!(span, "FnPtr::addr with bound vars for `{self_ty}`"); + }; + let locals = local_decls_for_sig(&sig, span); + + let source_info = SourceInfo::outermost(span); + // FIXME: use `expose_addr` once we figure out whether function pointers have meaningful provenance. + let rvalue = Rvalue::Cast( + CastKind::FnPtrToPtr, + Operand::Move(Place::from(Local::new(1))), + tcx.mk_imm_ptr(tcx.types.unit), + ); + let stmt = Statement { + source_info, + kind: StatementKind::Assign(Box::new((Place::return_place(), rvalue))), + }; + let statements = vec![stmt]; + let start_block = BasicBlockData { + statements, + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }; + let source = MirSource::from_instance(ty::InstanceDef::FnPtrAddrShim(def_id, self_ty)); + new_body(source, IndexVec::from_elem_n(start_block, 1), locals, sig.inputs().len(), span) +} diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs index 9ef55c558..c79e1cf08 100644 --- a/compiler/rustc_mir_transform/src/simplify.rs +++ b/compiler/rustc_mir_transform/src/simplify.rs @@ -28,8 +28,8 @@ //! return. use crate::MirPass; -use rustc_data_structures::fx::FxHashSet; -use rustc_index::vec::{Idx, IndexVec}; +use rustc_data_structures::fx::{FxHashSet, FxIndexSet}; +use rustc_index::vec::{Idx, IndexSlice, IndexVec}; use rustc_middle::mir::coverage::*; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -48,6 +48,7 @@ impl SimplifyCfg { pub fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { CfgSimplifier::new(body).simplify(); + remove_duplicate_unreachable_blocks(tcx, body); remove_dead_blocks(tcx, body); // FIXME: Should probably be moved into some kind of pass manager @@ -66,7 +67,7 @@ impl<'tcx> MirPass<'tcx> for SimplifyCfg { } pub struct CfgSimplifier<'a, 'tcx> { - basic_blocks: &'a mut IndexVec<BasicBlock, BasicBlockData<'tcx>>, + basic_blocks: &'a mut IndexSlice<BasicBlock, BasicBlockData<'tcx>>, pred_count: IndexVec<BasicBlock, u32>, } @@ -259,6 +260,49 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { } } +pub fn remove_duplicate_unreachable_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + struct OptApplier<'tcx> { + tcx: TyCtxt<'tcx>, + duplicates: FxIndexSet<BasicBlock>, + } + + impl<'tcx> MutVisitor<'tcx> for OptApplier<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) { + for target in terminator.successors_mut() { + // We don't have to check whether `target` is a cleanup block, because have + // entirely excluded cleanup blocks in building the set of duplicates. + if self.duplicates.contains(target) { + *target = self.duplicates[0]; + } + } + + self.super_terminator(terminator, location); + } + } + + let unreachable_blocks = body + .basic_blocks + .iter_enumerated() + .filter(|(_, bb)| { + // CfgSimplifier::simplify leaves behind some unreachable basic blocks without a + // terminator. Those blocks will be deleted by remove_dead_blocks, but we run just + // before then so we need to handle missing terminators. + // We also need to prevent confusing cleanup and non-cleanup blocks. In practice we + // don't emit empty unreachable cleanup blocks, so this simple check suffices. + bb.terminator.is_some() && bb.is_empty_unreachable() && !bb.is_cleanup + }) + .map(|(block, _)| block) + .collect::<FxIndexSet<_>>(); + + if unreachable_blocks.len() > 1 { + OptApplier { tcx, duplicates: unreachable_blocks }.visit_body(body); + } +} + pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let reachable = traversal::reachable_as_bitset(body); let num_blocks = body.basic_blocks.len(); @@ -325,8 +369,8 @@ pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { /// instances in a single body, so the strategy described above is applied to /// coverage counters from each instance individually. fn save_unreachable_coverage( - basic_blocks: &mut IndexVec<BasicBlock, BasicBlockData<'_>>, - source_scopes: &IndexVec<SourceScope, SourceScopeData<'_>>, + basic_blocks: &mut IndexSlice<BasicBlock, BasicBlockData<'_>>, + source_scopes: &IndexSlice<SourceScope, SourceScopeData<'_>>, first_dead_block: usize, ) { // Identify instances that still have some live coverage counters left. @@ -445,7 +489,7 @@ fn make_local_map<V>( local_decls: &mut IndexVec<Local, V>, used_locals: &UsedLocals, ) -> IndexVec<Local, Option<Local>> { - let mut map: IndexVec<Local, Option<Local>> = IndexVec::from_elem(None, &*local_decls); + let mut map: IndexVec<Local, Option<Local>> = IndexVec::from_elem(None, local_decls); let mut used = Local::new(0); for alive_index in local_decls.indices() { @@ -525,6 +569,7 @@ impl<'tcx> Visitor<'tcx> for UsedLocals { | StatementKind::Retag(..) | StatementKind::Coverage(..) | StatementKind::FakeRead(..) + | StatementKind::PlaceMention(..) | StatementKind::AscribeUserType(..) => { self.super_statement(statement, location); } diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs index 13168e9a2..c798bd053 100644 --- a/compiler/rustc_mir_transform/src/sroa.rs +++ b/compiler/rustc_mir_transform/src/sroa.rs @@ -4,8 +4,9 @@ use rustc_index::vec::IndexVec; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; -use rustc_middle::ty::{Ty, TyCtxt}; +use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_mir_dataflow::value_analysis::{excluded_locals, iter_fields}; +use rustc_target::abi::FieldIdx; pub struct ScalarReplacementOfAggregates; @@ -18,11 +19,12 @@ impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { debug!(def_id = ?body.source.def_id()); let mut excluded = excluded_locals(body); + let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id()); loop { debug!(?excluded); let escaping = escaping_locals(&excluded, body); debug!(?escaping); - let replacements = compute_flattening(tcx, body, escaping); + let replacements = compute_flattening(tcx, param_env, body, escaping); debug!(?replacements); let all_dead_locals = replace_flattened_locals(tcx, body, replacements); if !all_dead_locals.is_empty() { @@ -114,7 +116,7 @@ fn escaping_locals(excluded: &BitSet<Local>, body: &Body<'_>) -> BitSet<Local> { struct ReplacementMap<'tcx> { /// Pre-computed list of all "new" locals for each "old" local. This is used to expand storage /// and deinit statement and debuginfo. - fragments: IndexVec<Local, Option<IndexVec<Field, Option<(Ty<'tcx>, Local)>>>>, + fragments: IndexVec<Local, Option<IndexVec<FieldIdx, Option<(Ty<'tcx>, Local)>>>>, } impl<'tcx> ReplacementMap<'tcx> { @@ -128,7 +130,7 @@ impl<'tcx> ReplacementMap<'tcx> { fn place_fragments( &self, place: Place<'tcx>, - ) -> Option<impl Iterator<Item = (Field, Ty<'tcx>, Local)> + '_> { + ) -> Option<impl Iterator<Item = (FieldIdx, Ty<'tcx>, Local)> + '_> { let local = place.as_local()?; let fields = self.fragments[local].as_ref()?; Some(fields.iter_enumerated().filter_map(|(field, &opt_ty_local)| { @@ -144,6 +146,7 @@ impl<'tcx> ReplacementMap<'tcx> { /// The replacement will be done later in `ReplacementVisitor`. fn compute_flattening<'tcx>( tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, body: &mut Body<'tcx>, escaping: BitSet<Local>, ) -> ReplacementMap<'tcx> { @@ -155,7 +158,7 @@ fn compute_flattening<'tcx>( } let decl = body.local_decls[local].clone(); let ty = decl.ty; - iter_fields(ty, tcx, |variant, field, field_ty| { + iter_fields(ty, tcx, param_env, |variant, field, field_ty| { if variant.is_some() { // Downcasts are currently not supported. return; diff --git a/compiler/rustc_mir_transform/src/ssa.rs b/compiler/rustc_mir_transform/src/ssa.rs index c1e7f62de..be026402d 100644 --- a/compiler/rustc_mir_transform/src/ssa.rs +++ b/compiler/rustc_mir_transform/src/ssa.rs @@ -1,7 +1,7 @@ use either::Either; use rustc_data_structures::graph::dominators::Dominators; use rustc_index::bit_set::BitSet; -use rustc_index::vec::IndexVec; +use rustc_index::vec::{IndexSlice, IndexVec}; use rustc_middle::middle::resolve_bound_vars::Set1; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; @@ -53,7 +53,7 @@ impl SsaLocals { body: &Body<'tcx>, borrowed_locals: &BitSet<Local>, ) -> SsaLocals { - let assignment_order = Vec::new(); + let assignment_order = Vec::with_capacity(body.local_decls.len()); let assignments = IndexVec::from_elem(Set1::Empty, &body.local_decls); let dominators = @@ -135,7 +135,7 @@ impl SsaLocals { /// _d => _a // transitively through _c /// /// Exception: we do not see through the return place, as it cannot be substituted. - pub fn copy_classes(&self) -> &IndexVec<Local, Local> { + pub fn copy_classes(&self) -> &IndexSlice<Local, Local> { &self.copy_classes } @@ -179,12 +179,34 @@ struct SsaVisitor { assignment_order: Vec<Local>, } +impl SsaVisitor { + fn check_assignment_dominates(&mut self, local: Local, loc: Location) { + let set = &mut self.assignments[local]; + let assign_dominates = match *set { + Set1::Empty | Set1::Many => false, + Set1::One(LocationExtended::Arg) => true, + Set1::One(LocationExtended::Plain(assign)) => { + assign.successor_within_block().dominates(loc, &self.dominators) + } + }; + // We are visiting a use that is not dominated by an assignment. + // Either there is a cycle involved, or we are reading for uninitialized local. + // Bail out. + if !assign_dominates { + *set = Set1::Many; + } + } +} + impl<'tcx> Visitor<'tcx> for SsaVisitor { fn visit_local(&mut self, local: Local, ctxt: PlaceContext, loc: Location) { match ctxt { PlaceContext::MutatingUse(MutatingUseContext::Store) => { self.assignments[local].insert(LocationExtended::Plain(loc)); - self.assignment_order.push(local); + if let Set1::One(_) = self.assignments[local] { + // Only record if SSA-like, to avoid growing the vector needlessly. + self.assignment_order.push(local); + } } // Anything can happen with raw pointers, so remove them. PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf) @@ -192,24 +214,26 @@ impl<'tcx> Visitor<'tcx> for SsaVisitor { // Immutable borrows are taken into account in `SsaLocals::new` by // removing non-freeze locals. PlaceContext::NonMutatingUse(_) => { - let set = &mut self.assignments[local]; - let assign_dominates = match *set { - Set1::Empty | Set1::Many => false, - Set1::One(LocationExtended::Arg) => true, - Set1::One(LocationExtended::Plain(assign)) => { - assign.successor_within_block().dominates(loc, &self.dominators) - } - }; - // We are visiting a use that is not dominated by an assignment. - // Either there is a cycle involved, or we are reading for uninitialized local. - // Bail out. - if !assign_dominates { - *set = Set1::Many; - } + self.check_assignment_dominates(local, loc); } PlaceContext::NonUse(_) => {} } } + + fn visit_place(&mut self, place: &Place<'tcx>, ctxt: PlaceContext, loc: Location) { + if place.projection.first() == Some(&PlaceElem::Deref) { + // Do not do anything for storage statements and debuginfo. + if ctxt.is_use() { + // A use through a `deref` only reads from the local, and cannot write to it. + let new_ctxt = PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection); + + self.visit_projection(place.as_ref(), new_ctxt, loc); + self.check_assignment_dominates(place.local, loc); + } + return; + } + self.super_place(place, ctxt, loc); + } } #[instrument(level = "trace", skip(ssa, body))] diff --git a/compiler/rustc_mir_transform/src/unreachable_prop.rs b/compiler/rustc_mir_transform/src/unreachable_prop.rs index d4b1cfe43..bd1724bf8 100644 --- a/compiler/rustc_mir_transform/src/unreachable_prop.rs +++ b/compiler/rustc_mir_transform/src/unreachable_prop.rs @@ -99,7 +99,7 @@ where // // This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to // turn it into just `x` later. Without the unreachable, such a transformation would be illegal. - // If the otherwise branch is unreachable, we can delete all other unreacahble targets, as they will + // If the otherwise branch is unreachable, we can delete all other unreachable targets, as they will // still point to the unreachable and therefore not lose reachability information. let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb)); |