diff options
Diffstat (limited to 'compiler/rustc_middle/src/mir')
23 files changed, 377 insertions, 448 deletions
diff --git a/compiler/rustc_middle/src/mir/basic_blocks.rs b/compiler/rustc_middle/src/mir/basic_blocks.rs index 752cbdeae..b93871769 100644 --- a/compiler/rustc_middle/src/mir/basic_blocks.rs +++ b/compiler/rustc_middle/src/mir/basic_blocks.rs @@ -1,41 +1,46 @@ -use crate::mir::graph_cyclic_cache::GraphIsCyclicCache; -use crate::mir::predecessors::{PredecessorCache, Predecessors}; -use crate::mir::switch_sources::{SwitchSourceCache, SwitchSources}; -use crate::mir::traversal::PostorderCache; -use crate::mir::{BasicBlock, BasicBlockData, Successors, START_BLOCK}; +use crate::mir::traversal::Postorder; +use crate::mir::{BasicBlock, BasicBlockData, Successors, Terminator, TerminatorKind, START_BLOCK}; +use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph; use rustc_data_structures::graph::dominators::{dominators, Dominators}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::OnceCell; use rustc_index::vec::IndexVec; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; +use smallvec::SmallVec; #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable, TypeVisitable)] pub struct BasicBlocks<'tcx> { basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>, - predecessor_cache: PredecessorCache, - switch_source_cache: SwitchSourceCache, - is_cyclic: GraphIsCyclicCache, - postorder_cache: PostorderCache, + cache: Cache, +} + +// Typically 95%+ of basic blocks have 4 or fewer predecessors. +pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>; + +pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>; + +#[derive(Clone, Default, Debug)] +struct Cache { + predecessors: OnceCell<Predecessors>, + switch_sources: OnceCell<SwitchSources>, + is_cyclic: OnceCell<bool>, + postorder: OnceCell<Vec<BasicBlock>>, } impl<'tcx> BasicBlocks<'tcx> { #[inline] pub fn new(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>) -> Self { - BasicBlocks { - basic_blocks, - predecessor_cache: PredecessorCache::new(), - switch_source_cache: SwitchSourceCache::new(), - is_cyclic: GraphIsCyclicCache::new(), - postorder_cache: PostorderCache::new(), - } + BasicBlocks { basic_blocks, cache: Cache::default() } } /// Returns true if control-flow graph contains a cycle reachable from the `START_BLOCK`. #[inline] pub fn is_cfg_cyclic(&self) -> bool { - self.is_cyclic.is_cyclic(self) + *self.cache.is_cyclic.get_or_init(|| graph::is_cyclic(self)) } - #[inline] pub fn dominators(&self) -> Dominators<BasicBlock> { dominators(&self) } @@ -43,20 +48,46 @@ impl<'tcx> BasicBlocks<'tcx> { /// Returns predecessors for each basic block. #[inline] pub fn predecessors(&self) -> &Predecessors { - self.predecessor_cache.compute(&self.basic_blocks) + self.cache.predecessors.get_or_init(|| { + let mut preds = IndexVec::from_elem(SmallVec::new(), &self.basic_blocks); + for (bb, data) in self.basic_blocks.iter_enumerated() { + if let Some(term) = &data.terminator { + for succ in term.successors() { + preds[succ].push(bb); + } + } + } + preds + }) } /// Returns basic blocks in a postorder. #[inline] pub fn postorder(&self) -> &[BasicBlock] { - self.postorder_cache.compute(&self.basic_blocks) + self.cache.postorder.get_or_init(|| { + Postorder::new(&self.basic_blocks, START_BLOCK).map(|(bb, _)| bb).collect() + }) } /// `switch_sources()[&(target, switch)]` returns a list of switch /// values that lead to a `target` block from a `switch` block. #[inline] pub fn switch_sources(&self) -> &SwitchSources { - self.switch_source_cache.compute(&self.basic_blocks) + self.cache.switch_sources.get_or_init(|| { + let mut switch_sources: SwitchSources = FxHashMap::default(); + for (bb, data) in self.basic_blocks.iter_enumerated() { + if let Some(Terminator { + kind: TerminatorKind::SwitchInt { targets, .. }, .. + }) = &data.terminator + { + for (value, target) in targets.iter() { + switch_sources.entry((target, bb)).or_default().push(Some(value)); + } + switch_sources.entry((targets.otherwise(), bb)).or_default().push(None); + } + } + switch_sources + }) } /// Returns mutable reference to basic blocks. Invalidates CFG cache. @@ -88,10 +119,7 @@ impl<'tcx> BasicBlocks<'tcx> { /// All other methods that allow you to mutate the basic blocks also call this method /// themselves, thereby avoiding any risk of accidentally cache invalidation. pub fn invalidate_cfg_cache(&mut self) { - self.predecessor_cache.invalidate(); - self.switch_source_cache.invalidate(); - self.is_cyclic.invalidate(); - self.postorder_cache.invalidate(); + self.cache = Cache::default(); } } @@ -145,3 +173,24 @@ impl<'tcx> graph::WithPredecessors for BasicBlocks<'tcx> { self.predecessors()[node].iter().copied() } } + +TrivialTypeTraversalAndLiftImpls! { + Cache, +} + +impl<S: Encoder> Encodable<S> for Cache { + #[inline] + fn encode(&self, _s: &mut S) {} +} + +impl<D: Decoder> Decodable<D> for Cache { + #[inline] + fn decode(_: &mut D) -> Self { + Default::default() + } +} + +impl<CTX> HashStable<CTX> for Cache { + #[inline] + fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {} +} diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs index e7bb3ab0b..db24dae11 100644 --- a/compiler/rustc_middle/src/mir/coverage.rs +++ b/compiler/rustc_middle/src/mir/coverage.rs @@ -135,7 +135,10 @@ impl Debug for CoverageKind { "Expression({:?}) = {} {} {}", id.index(), lhs.index(), - if *op == Op::Add { "+" } else { "-" }, + match op { + Op::Add => "+", + Op::Subtract => "-", + }, rhs.index(), ), Unreachable => write!(fmt, "Unreachable"), diff --git a/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs b/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs deleted file mode 100644 index f97bf2883..000000000 --- a/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs +++ /dev/null @@ -1,63 +0,0 @@ -use rustc_data_structures::graph::{ - self, DirectedGraph, WithNumNodes, WithStartNode, WithSuccessors, -}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; - -/// Helper type to cache the result of `graph::is_cyclic`. -#[derive(Clone, Debug)] -pub(super) struct GraphIsCyclicCache { - cache: OnceCell<bool>, -} - -impl GraphIsCyclicCache { - #[inline] - pub(super) fn new() -> Self { - GraphIsCyclicCache { cache: OnceCell::new() } - } - - pub(super) fn is_cyclic<G>(&self, graph: &G) -> bool - where - G: ?Sized + DirectedGraph + WithStartNode + WithSuccessors + WithNumNodes, - { - *self.cache.get_or_init(|| graph::is_cyclic(graph)) - } - - /// Invalidates the cache. - #[inline] - pub(super) fn invalidate(&mut self) { - // Invalidating the cache requires mutating the MIR, which in turn requires a unique - // reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all - // callers of `invalidate` have a unique reference to the MIR and thus to the - // cache. This means we never need to do synchronization when `invalidate` is called, - // we can simply reinitialize the `OnceCell`. - self.cache = OnceCell::new(); - } -} - -impl<S: Encoder> Encodable<S> for GraphIsCyclicCache { - #[inline] - fn encode(&self, s: &mut S) { - Encodable::encode(&(), s); - } -} - -impl<D: Decoder> Decodable<D> for GraphIsCyclicCache { - #[inline] - fn decode(d: &mut D) -> Self { - let () = Decodable::decode(d); - Self::new() - } -} - -impl<CTX> HashStable<CTX> for GraphIsCyclicCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - GraphIsCyclicCache, -} diff --git a/compiler/rustc_middle/src/mir/graphviz.rs b/compiler/rustc_middle/src/mir/graphviz.rs index 5de56dad0..cf6d46e1e 100644 --- a/compiler/rustc_middle/src/mir/graphviz.rs +++ b/compiler/rustc_middle/src/mir/graphviz.rs @@ -110,7 +110,7 @@ fn write_graph_label<'tcx, W: std::fmt::Write>( let decl = &body.local_decls[local]; write!(w, "let ")?; - if decl.mutability == Mutability::Mut { + if decl.mutability.is_mut() { write!(w, "mut ")?; } diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index 221105ac4..48375ed30 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -8,7 +8,8 @@ mod tests; use std::borrow::Cow; use std::fmt; use std::hash; -use std::ops::Range; +use std::hash::Hash; +use std::ops::{Deref, DerefMut, Range}; use std::ptr; use either::{Left, Right}; @@ -29,6 +30,39 @@ use provenance_map::*; pub use init_mask::{InitChunk, InitChunkIter}; +/// Functionality required for the bytes of an `Allocation`. +pub trait AllocBytes: + Clone + fmt::Debug + Eq + PartialEq + Hash + Deref<Target = [u8]> + DerefMut<Target = [u8]> +{ + /// Adjust the bytes to the specified alignment -- by default, this is a no-op. + fn adjust_to_align(self, _align: Align) -> Self; + + /// Create an `AllocBytes` from a slice of `u8`. + fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self; + + /// Create a zeroed `AllocBytes` of the specified size and alignment; + /// call the callback error handler if there is an error in allocating the memory. + fn zeroed(size: Size, _align: Align) -> Option<Self>; +} + +// Default `bytes` for `Allocation` is a `Box<[u8]>`. +impl AllocBytes for Box<[u8]> { + fn adjust_to_align(self, _align: Align) -> Self { + self + } + + fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self { + Box::<[u8]>::from(slice.into()) + } + + fn zeroed(size: Size, _align: Align) -> Option<Self> { + let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).ok()?; + // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]> + let bytes = unsafe { bytes.assume_init() }; + Some(bytes) + } +} + /// This type represents an Allocation in the Miri/CTFE core engine. /// /// Its public API is rather low-level, working directly with allocation offsets and a custom error @@ -38,10 +72,10 @@ pub use init_mask::{InitChunk, InitChunkIter}; // hashed. (see the `Hash` impl below for more details), so the impl is not derived. #[derive(Clone, Eq, PartialEq, TyEncodable, TyDecodable)] #[derive(HashStable)] -pub struct Allocation<Prov: Provenance = AllocId, Extra = ()> { +pub struct Allocation<Prov: Provenance = AllocId, Extra = (), Bytes = Box<[u8]>> { /// The actual bytes of the allocation. /// Note that the bytes of a pointer represent the offset of the pointer. - bytes: Box<[u8]>, + bytes: Bytes, /// Maps from byte addresses to extra provenance data for each pointer. /// Only the first byte of a pointer is inserted into the map; i.e., /// every entry in this map applies to `pointer_size` consecutive bytes starting @@ -220,14 +254,27 @@ impl AllocRange { } // The constructors are all without extra; the extra gets added by a machine hook later. -impl<Prov: Provenance> Allocation<Prov> { +impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> { + /// Creates an allocation from an existing `Bytes` value - this is needed for miri FFI support + pub fn from_raw_bytes(bytes: Bytes, align: Align, mutability: Mutability) -> Self { + let size = Size::from_bytes(bytes.len()); + Self { + bytes, + provenance: ProvenanceMap::new(), + init_mask: InitMask::new(size, true), + align, + mutability, + extra: (), + } + } + /// Creates an allocation initialized by the given bytes pub fn from_bytes<'a>( slice: impl Into<Cow<'a, [u8]>>, align: Align, mutability: Mutability, ) -> Self { - let bytes = Box::<[u8]>::from(slice.into()); + let bytes = Bytes::from_bytes(slice, align); let size = Size::from_bytes(bytes.len()); Self { bytes, @@ -248,7 +295,7 @@ impl<Prov: Provenance> Allocation<Prov> { /// /// If `panic_on_fail` is true, this will never return `Err`. pub fn uninit<'tcx>(size: Size, align: Align, panic_on_fail: bool) -> InterpResult<'tcx, Self> { - let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).map_err(|_| { + let bytes = Bytes::zeroed(size, align).ok_or_else(|| { // This results in an error that can happen non-deterministically, since the memory // available to the compiler can change between runs. Normally queries are always // deterministic. However, we can be non-deterministic here because all uses of const @@ -262,8 +309,7 @@ impl<Prov: Provenance> Allocation<Prov> { }); InterpError::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted) })?; - // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]> - let bytes = unsafe { bytes.assume_init() }; + Ok(Allocation { bytes, provenance: ProvenanceMap::new(), @@ -275,7 +321,7 @@ impl<Prov: Provenance> Allocation<Prov> { } } -impl Allocation { +impl<Bytes: AllocBytes> Allocation<AllocId, (), Bytes> { /// Adjust allocation from the ones in tcx to a custom Machine instance /// with a different Provenance and Extra type. pub fn adjust_from_tcx<Prov: Provenance, Extra, Err>( @@ -283,9 +329,11 @@ impl Allocation { cx: &impl HasDataLayout, extra: Extra, mut adjust_ptr: impl FnMut(Pointer<AllocId>) -> Result<Pointer<Prov>, Err>, - ) -> Result<Allocation<Prov, Extra>, Err> { - // Compute new pointer provenance, which also adjusts the bytes. - let mut bytes = self.bytes; + ) -> Result<Allocation<Prov, Extra, Bytes>, Err> { + // Compute new pointer provenance, which also adjusts the bytes, and realign the pointer if + // necessary. + let mut bytes = self.bytes.adjust_to_align(self.align); + let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len()); let ptr_size = cx.data_layout().pointer_size.bytes_usize(); let endian = cx.data_layout().endian; @@ -311,7 +359,7 @@ impl Allocation { } /// Raw accessors. Provide access to otherwise private bytes. -impl<Prov: Provenance, Extra> Allocation<Prov, Extra> { +impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> { pub fn len(&self) -> usize { self.bytes.len() } @@ -340,7 +388,11 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> { } /// Byte accessors. -impl<Prov: Provenance, Extra> Allocation<Prov, Extra> { +impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> { + pub fn base_addr(&self) -> *const u8 { + self.bytes.as_ptr() + } + /// This is the entirely abstraction-violating way to just grab the raw bytes without /// caring about provenance or initialization. /// @@ -412,7 +464,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> { } /// Reading and writing. -impl<Prov: Provenance, Extra> Allocation<Prov, Extra> { +impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> { /// Sets the init bit for the given range. fn mark_init(&mut self, range: AllocRange, is_init: bool) { if range.size.bytes() == 0 { diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs index bd9cd53e1..c5137cf06 100644 --- a/compiler/rustc_middle/src/mir/interpret/error.rs +++ b/compiler/rustc_middle/src/mir/interpret/error.rs @@ -323,7 +323,7 @@ impl fmt::Display for UndefinedBehaviorInfo { write!( f, "{msg}{pointer} is a dangling pointer (it has no provenance)", - pointer = Pointer::<Option<AllocId>>::from_addr(*i), + pointer = Pointer::<Option<AllocId>>::from_addr_invalid(*i), ) } AlignmentCheckFailed { required, has } => write!( @@ -430,8 +430,10 @@ pub enum ResourceExhaustionInfo { /// /// The exact limit is set by the `const_eval_limit` attribute. StepLimitReached, - /// There is not enough memory to perform an allocation. + /// There is not enough memory (on the host) to perform an allocation. MemoryExhausted, + /// The address space (of the target) is full. + AddressSpaceFull, } impl fmt::Display for ResourceExhaustionInfo { @@ -447,6 +449,9 @@ impl fmt::Display for ResourceExhaustionInfo { MemoryExhausted => { write!(f, "tried to allocate more memory than available to compiler") } + AddressSpaceFull => { + write!(f, "there are no more free addresses in the address space") + } } } } diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index 5f425a287..1766d7a66 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -110,7 +110,7 @@ use rustc_hir::def_id::DefId; use rustc_macros::HashStable; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_serialize::{Decodable, Encodable}; -use rustc_target::abi::Endian; +use rustc_target::abi::{AddressSpace, Endian, HasDataLayout}; use crate::mir; use crate::ty::codec::{TyDecoder, TyEncoder}; @@ -127,8 +127,8 @@ pub use self::error::{ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar}; pub use self::allocation::{ - alloc_range, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, InitChunk, - InitChunkIter, + alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, + InitChunk, InitChunkIter, }; pub use self::pointer::{Pointer, PointerArithmetic, Provenance}; @@ -438,6 +438,17 @@ impl<'tcx> GlobalAlloc<'tcx> { _ => bug!("expected vtable, got {:?}", self), } } + + /// The address space that this `GlobalAlloc` should be placed in. + #[inline] + pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace { + match self { + GlobalAlloc::Function(..) => cx.data_layout().instruction_address_space, + GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => { + AddressSpace::DATA + } + } + } } pub(crate) struct AllocMap<'tcx> { diff --git a/compiler/rustc_middle/src/mir/interpret/pointer.rs b/compiler/rustc_middle/src/mir/interpret/pointer.rs index b08309910..60927eed8 100644 --- a/compiler/rustc_middle/src/mir/interpret/pointer.rs +++ b/compiler/rustc_middle/src/mir/interpret/pointer.rs @@ -19,29 +19,29 @@ pub trait PointerArithmetic: HasDataLayout { #[inline(always)] fn max_size_of_val(&self) -> Size { - Size::from_bytes(self.machine_isize_max()) + Size::from_bytes(self.target_isize_max()) } #[inline] - fn machine_usize_max(&self) -> u64 { + fn target_usize_max(&self) -> u64 { self.pointer_size().unsigned_int_max().try_into().unwrap() } #[inline] - fn machine_isize_min(&self) -> i64 { + fn target_isize_min(&self) -> i64 { self.pointer_size().signed_int_min().try_into().unwrap() } #[inline] - fn machine_isize_max(&self) -> i64 { + fn target_isize_max(&self) -> i64 { self.pointer_size().signed_int_max().try_into().unwrap() } #[inline] - fn machine_usize_to_isize(&self, val: u64) -> i64 { + fn target_usize_to_isize(&self, val: u64) -> i64 { let val = val as i64; // Now wrap-around into the machine_isize range. - if val > self.machine_isize_max() { + if val > self.target_isize_max() { // This can only happen if the ptr size is < 64, so we know max_usize_plus_1 fits into // i64. debug_assert!(self.pointer_size().bits() < 64); @@ -76,11 +76,11 @@ pub trait PointerArithmetic: HasDataLayout { let n = i.unsigned_abs(); if i >= 0 { let (val, over) = self.overflowing_offset(val, n); - (val, over || i > self.machine_isize_max()) + (val, over || i > self.target_isize_max()) } else { let res = val.overflowing_sub(n); let (val, over) = self.truncate_to_ptr(res); - (val, over || i < self.machine_isize_min()) + (val, over || i < self.target_isize_min()) } } @@ -251,14 +251,16 @@ impl<Prov> Pointer<Option<Prov>> { } impl<Prov> Pointer<Option<Prov>> { + /// Creates a pointer to the given address, with invalid provenance (i.e., cannot be used for + /// any memory access). #[inline(always)] - pub fn from_addr(addr: u64) -> Self { + pub fn from_addr_invalid(addr: u64) -> Self { Pointer { provenance: None, offset: Size::from_bytes(addr) } } #[inline(always)] pub fn null() -> Self { - Pointer::from_addr(0) + Pointer::from_addr_invalid(0) } } diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs index b6c6e9d55..856d821a5 100644 --- a/compiler/rustc_middle/src/mir/interpret/queries.rs +++ b/compiler/rustc_middle/src/mir/interpret/queries.rs @@ -2,7 +2,7 @@ use super::{ErrorHandled, EvalToConstValueResult, EvalToValTreeResult, GlobalId} use crate::mir; use crate::ty::subst::InternalSubsts; -use crate::ty::visit::TypeVisitable; +use crate::ty::visit::TypeVisitableExt; use crate::ty::{self, query::TyCtxtAt, query::TyCtxtEnsure, TyCtxt}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; diff --git a/compiler/rustc_middle/src/mir/interpret/value.rs b/compiler/rustc_middle/src/mir/interpret/value.rs index 88fb14eb3..36dbbe4bf 100644 --- a/compiler/rustc_middle/src/mir/interpret/value.rs +++ b/compiler/rustc_middle/src/mir/interpret/value.rs @@ -75,8 +75,8 @@ impl<'tcx> ConstValue<'tcx> { self.try_to_scalar_int()?.try_into().ok() } - pub fn try_to_machine_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> { - self.try_to_scalar_int()?.try_to_machine_usize(tcx).ok() + pub fn try_to_target_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> { + self.try_to_scalar_int()?.try_to_target_usize(tcx).ok() } pub fn try_to_bits_for_ty( @@ -97,8 +97,8 @@ impl<'tcx> ConstValue<'tcx> { ConstValue::Scalar(Scalar::from_u64(i)) } - pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self { - ConstValue::Scalar(Scalar::from_machine_usize(i, cx)) + pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self { + ConstValue::Scalar(Scalar::from_target_usize(i, cx)) } } @@ -241,7 +241,7 @@ impl<Prov> Scalar<Prov> { } #[inline] - pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self { + pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self { Self::from_uint(i, cx.data_layout().pointer_size) } @@ -268,7 +268,7 @@ impl<Prov> Scalar<Prov> { } #[inline] - pub fn from_machine_isize(i: i64, cx: &impl HasDataLayout) -> Self { + pub fn from_target_isize(i: i64, cx: &impl HasDataLayout) -> Self { Self::from_int(i, cx.data_layout().pointer_size) } @@ -322,7 +322,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> { Right(ptr) => Ok(ptr.into()), Left(bits) => { let addr = u64::try_from(bits).unwrap(); - Ok(Pointer::from_addr(addr)) + Ok(Pointer::from_addr_invalid(addr)) } } } @@ -429,7 +429,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> { /// Converts the scalar to produce a machine-pointer-sized unsigned integer. /// Fails if the scalar is a pointer. - pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> { + pub fn to_target_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> { let b = self.to_uint(cx.data_layout().pointer_size)?; Ok(u64::try_from(b).unwrap()) } @@ -469,7 +469,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> { /// Converts the scalar to produce a machine-pointer-sized signed integer. /// Fails if the scalar is a pointer. - pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> { + pub fn to_target_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> { let b = self.to_int(cx.data_layout().pointer_size)?; Ok(i64::try_from(b).unwrap()) } diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index e52b243ec..99cdb769d 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -9,7 +9,7 @@ use crate::mir::visit::MirVisitable; use crate::ty::codec::{TyDecoder, TyEncoder}; use crate::ty::fold::{FallibleTypeFolder, TypeFoldable}; use crate::ty::print::{FmtPrinter, Printer}; -use crate::ty::visit::{TypeVisitable, TypeVisitor}; +use crate::ty::visit::{TypeVisitable, TypeVisitableExt, TypeVisitor}; use crate::ty::{self, DefIdTree, List, Ty, TyCtxt}; use crate::ty::{AdtDef, InstanceDef, ScalarInt, UserTypeAnnotationIndex}; use crate::ty::{GenericArg, InternalSubsts, SubstsRef}; @@ -27,7 +27,6 @@ use polonius_engine::Atom; pub use rustc_ast::Mutability; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::dominators::Dominators; -use rustc_index::bit_set::BitMatrix; use rustc_index::vec::{Idx, IndexVec}; use rustc_serialize::{Decodable, Encodable}; use rustc_span::symbol::Symbol; @@ -47,25 +46,21 @@ mod basic_blocks; pub mod coverage; mod generic_graph; pub mod generic_graphviz; -mod graph_cyclic_cache; pub mod graphviz; pub mod interpret; pub mod mono; pub mod patch; -mod predecessors; pub mod pretty; mod query; pub mod spanview; mod syntax; pub use syntax::*; -mod switch_sources; pub mod tcx; pub mod terminator; pub use terminator::*; pub mod traversal; mod type_foldable; -mod type_visitable; pub mod visit; pub use self::generic_graph::graphviz_safe_def_name; @@ -419,11 +414,7 @@ impl<'tcx> Body<'tcx> { (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| { let local = Local::new(index); let decl = &self.local_decls[local]; - if decl.is_user_variable() && decl.mutability == Mutability::Mut { - Some(local) - } else { - None - } + (decl.is_user_variable() && decl.mutability.is_mut()).then_some(local) }) } @@ -712,7 +703,11 @@ pub enum BindingForm<'tcx> { RefForGuard, } -TrivialTypeTraversalAndLiftImpls! { BindingForm<'tcx>, } +TrivialTypeTraversalAndLiftImpls! { + for<'tcx> { + BindingForm<'tcx>, + } +} mod binding_form_impl { use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -905,6 +900,8 @@ pub enum LocalInfo<'tcx> { AggregateTemp, /// A temporary created during the pass `Derefer` to avoid it's retagging DerefTemp, + /// A temporary created for borrow checking. + FakeBorrow, } impl<'tcx> LocalDecl<'tcx> { @@ -1464,6 +1461,7 @@ impl Debug for Statement<'_> { } Coverage(box ref coverage) => write!(fmt, "Coverage::{:?}", coverage.kind), Intrinsic(box ref intrinsic) => write!(fmt, "{intrinsic}"), + ConstEvalCounter => write!(fmt, "ConstEvalCounter"), Nop => write!(fmt, "nop"), } } @@ -1622,7 +1620,7 @@ impl<'tcx> Place<'tcx> { &v }; - Place { local: self.local, projection: tcx.intern_place_elems(new_projections) } + Place { local: self.local, projection: tcx.mk_place_elems(new_projections) } } } @@ -1644,6 +1642,14 @@ impl<'tcx> PlaceRef<'tcx> { } } + /// Returns `true` if this `Place` contains a `Deref` projection. + /// + /// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the + /// same region of memory as its base. + pub fn is_indirect(&self) -> bool { + self.projection.iter().any(|elem| elem.is_indirect()) + } + /// If MirPhase >= Derefered and if projection contains Deref, /// It's guaranteed to be in the first place pub fn has_deref(&self) -> bool { @@ -2102,10 +2108,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { AggregateKind::Closure(def_id, substs) => ty::tls::with(|tcx| { let name = if tcx.sess.opts.unstable_opts.span_free_formats { let substs = tcx.lift(substs).unwrap(); - format!( - "[closure@{}]", - tcx.def_path_str_with_substs(def_id.to_def_id(), substs), - ) + format!("[closure@{}]", tcx.def_path_str_with_substs(def_id, substs),) } else { let span = tcx.def_span(def_id); format!( @@ -2116,11 +2119,17 @@ impl<'tcx> Debug for Rvalue<'tcx> { let mut struct_fmt = fmt.debug_struct(&name); // FIXME(project-rfc-2229#48): This should be a list of capture names/places - if let Some(upvars) = tcx.upvars_mentioned(def_id) { + if let Some(def_id) = def_id.as_local() + && let Some(upvars) = tcx.upvars_mentioned(def_id) + { for (&var_id, place) in iter::zip(upvars.keys(), places) { let var_name = tcx.hir().name(var_id); struct_fmt.field(var_name.as_str(), place); } + } else { + for (index, place) in places.iter().enumerate() { + struct_fmt.field(&format!("{index}"), place); + } } struct_fmt.finish() @@ -2131,11 +2140,17 @@ impl<'tcx> Debug for Rvalue<'tcx> { let mut struct_fmt = fmt.debug_struct(&name); // FIXME(project-rfc-2229#48): This should be a list of capture names/places - if let Some(upvars) = tcx.upvars_mentioned(def_id) { + if let Some(def_id) = def_id.as_local() + && let Some(upvars) = tcx.upvars_mentioned(def_id) + { for (&var_id, place) in iter::zip(upvars.keys(), places) { let var_name = tcx.hir().name(var_id); struct_fmt.field(var_name.as_str(), place); } + } else { + for (index, place) in places.iter().enumerate() { + struct_fmt.field(&format!("{index}"), place); + } } struct_fmt.finish() @@ -2335,13 +2350,17 @@ impl<'tcx> ConstantKind<'tcx> { } #[inline] - pub fn try_eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Option<u64> { + pub fn try_eval_target_usize( + &self, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Option<u64> { match self { - Self::Ty(ct) => ct.try_eval_usize(tcx, param_env), - Self::Val(val, _) => val.try_to_machine_usize(tcx), + Self::Ty(ct) => ct.try_eval_target_usize(tcx, param_env), + Self::Val(val, _) => val.try_to_target_usize(tcx), Self::Unevaluated(uneval, _) => { match tcx.const_eval_resolve(param_env, *uneval, None) { - Ok(val) => val.try_to_machine_usize(tcx), + Ok(val) => val.try_to_target_usize(tcx), Err(_) => None, } } @@ -2478,7 +2497,7 @@ impl<'tcx> ConstantKind<'tcx> { }; debug!("expr.kind: {:?}", expr.kind); - let ty = tcx.type_of(def.def_id_for_type_of()); + let ty = tcx.type_of(def.def_id_for_type_of()).subst_identity(); debug!(?ty); // FIXME(const_generics): We currently have to special case parameters because `min_const_generics` @@ -2506,20 +2525,19 @@ impl<'tcx> ConstantKind<'tcx> { } let hir_id = tcx.hir().local_def_id_to_hir_id(def.did); - let parent_substs = if let Some(parent_hir_id) = tcx.hir().opt_parent_id(hir_id) { - if let Some(parent_did) = tcx.hir().opt_local_def_id(parent_hir_id) { - InternalSubsts::identity_for_item(tcx, parent_did.to_def_id()) - } else { - tcx.mk_substs(Vec::<GenericArg<'tcx>>::new().into_iter()) - } + let parent_substs = if let Some(parent_hir_id) = tcx.hir().opt_parent_id(hir_id) + && let Some(parent_did) = parent_hir_id.as_owner() + { + InternalSubsts::identity_for_item(tcx, parent_did.to_def_id()) } else { - tcx.mk_substs(Vec::<GenericArg<'tcx>>::new().into_iter()) + List::empty() }; debug!(?parent_substs); let did = def.did.to_def_id(); let child_substs = InternalSubsts::identity_for_item(tcx, did); - let substs = tcx.mk_substs(parent_substs.into_iter().chain(child_substs.into_iter())); + let substs = + tcx.mk_substs_from_iter(parent_substs.into_iter().chain(child_substs.into_iter())); debug!(?substs); let hir_id = tcx.hir().local_def_id_to_hir_id(def.did); @@ -2737,8 +2755,11 @@ impl UserTypeProjection { } } -impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection { - fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, folder: &mut F) -> Result<Self, F::Error> { +impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for UserTypeProjection { + fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>( + self, + folder: &mut F, + ) -> Result<Self, F::Error> { Ok(UserTypeProjection { base: self.base.try_fold_with(folder)?, projs: self.projs.try_fold_with(folder)?, @@ -2746,8 +2767,11 @@ impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection { } } -impl<'tcx> TypeVisitable<'tcx> for UserTypeProjection { - fn visit_with<Vs: TypeVisitor<'tcx>>(&self, visitor: &mut Vs) -> ControlFlow<Vs::BreakTy> { +impl<'tcx> TypeVisitable<TyCtxt<'tcx>> for UserTypeProjection { + fn visit_with<Vs: TypeVisitor<TyCtxt<'tcx>>>( + &self, + visitor: &mut Vs, + ) -> ControlFlow<Vs::BreakTy> { self.base.visit_with(visitor) // Note: there's nothing in `self.proj` to visit. } @@ -2884,7 +2908,7 @@ fn pretty_print_const_value<'tcx>( // the `destructure_const` query with an empty `ty::ParamEnv` without // introducing ICEs (e.g. via `layout_of`) from missing bounds. // E.g. `transmute([0usize; 2]): (u8, *mut T)` needs to know `T: Sized` - // to be able to destructure the tuple into `(0u8, *mut T) + // to be able to destructure the tuple into `(0u8, *mut T)` // // FIXME(eddyb) for `--emit=mir`/`-Z dump-mir`, we should provide the // correct `ty::ParamEnv` to allow printing *all* constant values. @@ -3049,7 +3073,7 @@ impl Location { if self.block == other.block { self.statement_index <= other.statement_index } else { - dominators.is_dominated_by(other.block, self.block) + dominators.dominates(self.block, other.block) } } } diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index 1e8d5f7ea..7a05ee2ff 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -318,16 +318,19 @@ impl<'tcx> CodegenUnit<'tcx> { base_n::encode(hash, base_n::CASE_INSENSITIVE) } - pub fn estimate_size(&mut self, tcx: TyCtxt<'tcx>) { + pub fn create_size_estimate(&mut self, tcx: TyCtxt<'tcx>) { // Estimate the size of a codegen unit as (approximately) the number of MIR // statements it corresponds to. self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum()); } #[inline] + /// Should only be called if [`create_size_estimate`] has previously been called. + /// + /// [`create_size_estimate`]: Self::create_size_estimate pub fn size_estimate(&self) -> usize { - // Should only be called if `estimate_size` has previously been called. - self.size_estimate.expect("estimate_size must be called before getting a size_estimate") + self.size_estimate + .expect("create_size_estimate must be called before getting a size_estimate") } pub fn modify_size_estimate(&mut self, delta: usize) { diff --git a/compiler/rustc_middle/src/mir/predecessors.rs b/compiler/rustc_middle/src/mir/predecessors.rs deleted file mode 100644 index 5f1fadaf3..000000000 --- a/compiler/rustc_middle/src/mir/predecessors.rs +++ /dev/null @@ -1,78 +0,0 @@ -//! Lazily compute the reverse control-flow graph for the MIR. - -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; -use rustc_index::vec::IndexVec; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; -use smallvec::SmallVec; - -use crate::mir::{BasicBlock, BasicBlockData}; - -// Typically 95%+ of basic blocks have 4 or fewer predecessors. -pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>; - -#[derive(Clone, Debug)] -pub(super) struct PredecessorCache { - cache: OnceCell<Predecessors>, -} - -impl PredecessorCache { - #[inline] - pub(super) fn new() -> Self { - PredecessorCache { cache: OnceCell::new() } - } - - /// Invalidates the predecessor cache. - #[inline] - pub(super) fn invalidate(&mut self) { - // Invalidating the predecessor cache requires mutating the MIR, which in turn requires a - // unique reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all - // callers of `invalidate` have a unique reference to the MIR and thus to the predecessor - // cache. This means we never need to do synchronization when `invalidate` is called, we can - // simply reinitialize the `OnceCell`. - self.cache = OnceCell::new(); - } - - /// Returns the predecessor graph for this MIR. - #[inline] - pub(super) fn compute( - &self, - basic_blocks: &IndexVec<BasicBlock, BasicBlockData<'_>>, - ) -> &Predecessors { - self.cache.get_or_init(|| { - let mut preds = IndexVec::from_elem(SmallVec::new(), basic_blocks); - for (bb, data) in basic_blocks.iter_enumerated() { - if let Some(term) = &data.terminator { - for succ in term.successors() { - preds[succ].push(bb); - } - } - } - - preds - }) - } -} - -impl<S: Encoder> Encodable<S> for PredecessorCache { - #[inline] - fn encode(&self, _s: &mut S) {} -} - -impl<D: Decoder> Decodable<D> for PredecessorCache { - #[inline] - fn decode(_: &mut D) -> Self { - Self::new() - } -} - -impl<CTX> HashStable<CTX> for PredecessorCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - PredecessorCache, -} diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 40289af25..d8829e3e7 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -12,8 +12,8 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; use rustc_index::vec::Idx; use rustc_middle::mir::interpret::{ - alloc_range, read_target_uint, AllocId, Allocation, ConstAllocation, ConstValue, GlobalAlloc, - Pointer, Provenance, + alloc_range, read_target_uint, AllocBytes, AllocId, Allocation, ConstAllocation, ConstValue, + GlobalAlloc, Pointer, Provenance, }; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; @@ -580,7 +580,7 @@ fn write_scope_tree( continue; } - let mut_str = if local_decl.mutability == Mutability::Mut { "mut " } else { "" }; + let mut_str = local_decl.mutability.prefix_str(); let mut indented_decl = format!("{0:1$}let {2}{3:?}: {4:?}", INDENT, indent, mut_str, local, local_decl.ty); @@ -787,21 +787,21 @@ pub fn write_allocations<'tcx>( /// After the hex dump, an ascii dump follows, replacing all unprintable characters (control /// characters or characters whose value is larger than 127) with a `.` /// This also prints provenance adequately. -pub fn display_allocation<'a, 'tcx, Prov: Provenance, Extra>( +pub fn display_allocation<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( tcx: TyCtxt<'tcx>, - alloc: &'a Allocation<Prov, Extra>, -) -> RenderAllocation<'a, 'tcx, Prov, Extra> { + alloc: &'a Allocation<Prov, Extra, Bytes>, +) -> RenderAllocation<'a, 'tcx, Prov, Extra, Bytes> { RenderAllocation { tcx, alloc } } #[doc(hidden)] -pub struct RenderAllocation<'a, 'tcx, Prov: Provenance, Extra> { +pub struct RenderAllocation<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> { tcx: TyCtxt<'tcx>, - alloc: &'a Allocation<Prov, Extra>, + alloc: &'a Allocation<Prov, Extra, Bytes>, } -impl<'a, 'tcx, Prov: Provenance, Extra> std::fmt::Display - for RenderAllocation<'a, 'tcx, Prov, Extra> +impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> std::fmt::Display + for RenderAllocation<'a, 'tcx, Prov, Extra, Bytes> { fn fmt(&self, w: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let RenderAllocation { tcx, alloc } = *self; @@ -845,9 +845,9 @@ fn write_allocation_newline( /// The `prefix` argument allows callers to add an arbitrary prefix before each line (even if there /// is only one line). Note that your prefix should contain a trailing space as the lines are /// printed directly after it. -fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>( +fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( tcx: TyCtxt<'tcx>, - alloc: &Allocation<Prov, Extra>, + alloc: &Allocation<Prov, Extra, Bytes>, w: &mut dyn std::fmt::Write, prefix: &str, ) -> std::fmt::Result { diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs index a8a453222..b964c1852 100644 --- a/compiler/rustc_middle/src/mir/query.rs +++ b/compiler/rustc_middle/src/mir/query.rs @@ -8,7 +8,7 @@ use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_index::bit_set::BitMatrix; -use rustc_index::vec::IndexVec; +use rustc_index::vec::{Idx, IndexVec}; use rustc_span::Span; use rustc_target::abi::VariantIdx; use smallvec::SmallVec; @@ -135,11 +135,20 @@ rustc_index::newtype_index! { pub struct GeneratorSavedLocal {} } +#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] +pub struct GeneratorSavedTy<'tcx> { + pub ty: Ty<'tcx>, + /// Source info corresponding to the local in the original MIR body. + pub source_info: SourceInfo, + /// Whether the local should be ignored for trait bound computations. + pub ignore_for_traits: bool, +} + /// The layout of generator state. #[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] pub struct GeneratorLayout<'tcx> { /// The type of every local stored inside the generator. - pub field_tys: IndexVec<GeneratorSavedLocal, Ty<'tcx>>, + pub field_tys: IndexVec<GeneratorSavedLocal, GeneratorSavedTy<'tcx>>, /// Which of the above fields are in each variant. Note that one field may /// be stored in multiple variants. @@ -152,6 +161,8 @@ pub struct GeneratorLayout<'tcx> { /// Which saved locals are storage-live at the same time. Locals that do not /// have conflicts with each other are allowed to overlap in the computed /// layout. + #[type_foldable(identity)] + #[type_visitable(ignore)] pub storage_conflicts: BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>, } @@ -278,13 +289,6 @@ pub struct ConstQualifs { /// instance of the closure is created, the corresponding free regions /// can be extracted from its type and constrained to have the given /// outlives relationship. -/// -/// In some cases, we have to record outlives requirements between types and -/// regions as well. In that case, if those types include any regions, those -/// regions are recorded using their external names (`ReStatic`, -/// `ReEarlyBound`, `ReFree`). We use these because in a query response we -/// cannot use `ReVar` (which is what we use internally within the rest of the -/// NLL code). #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)] pub struct ClosureRegionRequirements<'tcx> { /// The number of external regions defined on the closure. In our @@ -381,16 +385,59 @@ pub enum ClosureOutlivesSubject<'tcx> { /// Subject is a type, typically a type parameter, but could also /// be a projection. Indicates a requirement like `T: 'a` being /// passed to the caller, where the type here is `T`. - /// - /// The type here is guaranteed not to contain any free regions at - /// present. - Ty(Ty<'tcx>), + Ty(ClosureOutlivesSubjectTy<'tcx>), /// Subject is a free region from the closure. Indicates a requirement /// like `'a: 'b` being passed to the caller; the region here is `'a`. Region(ty::RegionVid), } +/// Represents a `ty::Ty` for use in [`ClosureOutlivesSubject`]. +/// +/// This abstraction is necessary because the type may include `ReVar` regions, +/// which is what we use internally within NLL code, and they can't be used in +/// a query response. +/// +/// DO NOT implement `TypeVisitable` or `TypeFoldable` traits, because this +/// type is not recognized as a binder for late-bound region. +#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] +pub struct ClosureOutlivesSubjectTy<'tcx> { + inner: Ty<'tcx>, +} + +impl<'tcx> ClosureOutlivesSubjectTy<'tcx> { + /// All regions of `ty` must be of kind `ReVar` and must represent + /// universal regions *external* to the closure. + pub fn bind(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Self { + let inner = tcx.fold_regions(ty, |r, depth| match r.kind() { + ty::ReVar(vid) => { + let br = ty::BoundRegion { + var: ty::BoundVar::new(vid.index()), + kind: ty::BrAnon(vid.as_u32(), None), + }; + tcx.mk_re_late_bound(depth, br) + } + _ => bug!("unexpected region in ClosureOutlivesSubjectTy: {r:?}"), + }); + + Self { inner } + } + + pub fn instantiate( + self, + tcx: TyCtxt<'tcx>, + mut map: impl FnMut(ty::RegionVid) -> ty::Region<'tcx>, + ) -> Ty<'tcx> { + tcx.fold_regions(self.inner, |r, depth| match r.kind() { + ty::ReLateBound(debruijn, br) => { + debug_assert_eq!(debruijn, depth); + map(ty::RegionVid::new(br.var.index())) + } + _ => bug!("unexpected region {r:?}"), + }) + } +} + /// The constituent parts of a mir constant of kind ADT or array. #[derive(Copy, Clone, Debug, HashStable)] pub struct DestructuredConstant<'tcx> { diff --git a/compiler/rustc_middle/src/mir/spanview.rs b/compiler/rustc_middle/src/mir/spanview.rs index 887ee5715..28a3b51b7 100644 --- a/compiler/rustc_middle/src/mir/spanview.rs +++ b/compiler/rustc_middle/src/mir/spanview.rs @@ -11,7 +11,7 @@ use std::io::{self, Write}; pub const TOOLTIP_INDENT: &str = " "; const CARET: char = '\u{2038}'; // Unicode `CARET` -const ANNOTATION_LEFT_BRACKET: char = '\u{298a}'; // Unicode `Z NOTATION RIGHT BINDING BRACKET +const ANNOTATION_LEFT_BRACKET: char = '\u{298a}'; // Unicode `Z NOTATION RIGHT BINDING BRACKET` const ANNOTATION_RIGHT_BRACKET: char = '\u{2989}'; // Unicode `Z NOTATION LEFT BINDING BRACKET` const NEW_LINE_SPAN: &str = "</span>\n<span class=\"line\">"; const HEADER: &str = r#"<!DOCTYPE html> @@ -250,6 +250,7 @@ pub fn statement_kind_name(statement: &Statement<'_>) -> &'static str { AscribeUserType(..) => "AscribeUserType", Coverage(..) => "Coverage", Intrinsic(..) => "Intrinsic", + ConstEvalCounter => "ConstEvalCounter", Nop => "Nop", } } @@ -670,7 +671,7 @@ fn fn_span(tcx: TyCtxt<'_>, def_id: DefId) -> Span { fn hir_body(tcx: TyCtxt<'_>, def_id: DefId) -> Option<&rustc_hir::Body<'_>> { let hir_node = tcx.hir().get_if_local(def_id).expect("expected DefId is local"); - hir::map::associated_body(hir_node).map(|fn_body_id| tcx.hir().body(fn_body_id)) + hir::map::associated_body(hir_node).map(|(_, fn_body_id)| tcx.hir().body(fn_body_id)) } fn escape_html(s: &str) -> String { diff --git a/compiler/rustc_middle/src/mir/switch_sources.rs b/compiler/rustc_middle/src/mir/switch_sources.rs deleted file mode 100644 index b91c0c257..000000000 --- a/compiler/rustc_middle/src/mir/switch_sources.rs +++ /dev/null @@ -1,78 +0,0 @@ -//! Lazily compute the inverse of each `SwitchInt`'s switch targets. Modeled after -//! `Predecessors`/`PredecessorCache`. - -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; -use rustc_index::vec::IndexVec; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; -use smallvec::SmallVec; - -use crate::mir::{BasicBlock, BasicBlockData, Terminator, TerminatorKind}; - -pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>; - -#[derive(Clone, Debug)] -pub(super) struct SwitchSourceCache { - cache: OnceCell<SwitchSources>, -} - -impl SwitchSourceCache { - #[inline] - pub(super) fn new() -> Self { - SwitchSourceCache { cache: OnceCell::new() } - } - - /// Invalidates the switch source cache. - #[inline] - pub(super) fn invalidate(&mut self) { - self.cache = OnceCell::new(); - } - - /// Returns the switch sources for this MIR. - #[inline] - pub(super) fn compute( - &self, - basic_blocks: &IndexVec<BasicBlock, BasicBlockData<'_>>, - ) -> &SwitchSources { - self.cache.get_or_init(|| { - let mut switch_sources: SwitchSources = FxHashMap::default(); - for (bb, data) in basic_blocks.iter_enumerated() { - if let Some(Terminator { - kind: TerminatorKind::SwitchInt { targets, .. }, .. - }) = &data.terminator - { - for (value, target) in targets.iter() { - switch_sources.entry((target, bb)).or_default().push(Some(value)); - } - switch_sources.entry((targets.otherwise(), bb)).or_default().push(None); - } - } - - switch_sources - }) - } -} - -impl<S: Encoder> Encodable<S> for SwitchSourceCache { - #[inline] - fn encode(&self, _s: &mut S) {} -} - -impl<D: Decoder> Decodable<D> for SwitchSourceCache { - #[inline] - fn decode(_: &mut D) -> Self { - Self::new() - } -} - -impl<CTX> HashStable<CTX> for SwitchSourceCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - SwitchSourceCache, -} diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 52c2b10cb..ae09562a8 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -355,6 +355,12 @@ pub enum StatementKind<'tcx> { /// This avoids adding a new block and a terminator for simple intrinsics. Intrinsic(Box<NonDivergingIntrinsic<'tcx>>), + /// Instructs the const eval interpreter to increment a counter; this counter is used to track + /// how many steps the interpreter has taken. It is used to prevent the user from writing const + /// code that runs for too long or infinitely. Other than in the const eval interpreter, this + /// is a no-op. + ConstEvalCounter, + /// No-op. Useful for deleting instructions without affecting statement indices. Nop, } @@ -665,6 +671,12 @@ pub enum TerminatorKind<'tcx> { /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the /// assertion does not fail, execution continues at the specified basic block. + /// + /// When overflow checking is disabled and this is run-time MIR (as opposed to compile-time MIR + /// that is used for CTFE), the following variants of this terminator behave as `goto target`: + /// - `OverflowNeg(..)`, + /// - `Overflow(op, ..)` if op is a "checkable" operation (add, sub, mul, shl, shr, but NOT + /// div or rem). Assert { cond: Operand<'tcx>, expected: bool, @@ -1097,10 +1109,6 @@ pub enum Rvalue<'tcx> { /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition. /// - /// When overflow checking is disabled and we are generating run-time code, the error condition - /// is false. Otherwise, and always during CTFE, the error condition is determined as described - /// below. - /// /// For addition, subtraction, and multiplication on integers the error condition is set when /// the infinite precision result would be unequal to the actual result. /// @@ -1197,10 +1205,8 @@ pub enum AggregateKind<'tcx> { /// active field index would identity the field `c` Adt(DefId, VariantIdx, SubstsRef<'tcx>, Option<UserTypeAnnotationIndex>, Option<usize>), - // Note: We can use LocalDefId since closures and generators a deaggregated - // before codegen. - Closure(LocalDefId, SubstsRef<'tcx>), - Generator(LocalDefId, SubstsRef<'tcx>, hir::Movability), + Closure(DefId, SubstsRef<'tcx>), + Generator(DefId, SubstsRef<'tcx>, hir::Movability), } #[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)] diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs index 599f0b9d3..0aa2c500f 100644 --- a/compiler/rustc_middle/src/mir/tcx.rs +++ b/compiler/rustc_middle/src/mir/tcx.rs @@ -97,7 +97,7 @@ impl<'tcx> PlaceTy<'tcx> { ty::Slice(..) => self.ty, ty::Array(inner, _) if !from_end => tcx.mk_array(*inner, (to - from) as u64), ty::Array(inner, size) if from_end => { - let size = size.eval_usize(tcx, param_env); + let size = size.eval_target_usize(tcx, param_env); let len = size - (from as u64) - (to as u64); tcx.mk_array(*inner, len) } @@ -162,10 +162,10 @@ impl<'tcx> Rvalue<'tcx> { match *self { Rvalue::Use(ref operand) => operand.ty(local_decls, tcx), Rvalue::Repeat(ref operand, count) => { - tcx.mk_ty(ty::Array(operand.ty(local_decls, tcx), count)) + tcx.mk_array_with_const_len(operand.ty(local_decls, tcx), count) } Rvalue::ThreadLocalRef(did) => { - let static_ty = tcx.type_of(did); + let static_ty = tcx.type_of(did).subst_identity(); if tcx.is_mutable_static(did) { tcx.mk_mut_ptr(static_ty) } else if tcx.is_foreign_item(did) { @@ -194,20 +194,20 @@ impl<'tcx> Rvalue<'tcx> { let lhs_ty = lhs.ty(local_decls, tcx); let rhs_ty = rhs.ty(local_decls, tcx); let ty = op.ty(tcx, lhs_ty, rhs_ty); - tcx.intern_tup(&[ty, tcx.types.bool]) + tcx.mk_tup(&[ty, tcx.types.bool]) } Rvalue::UnaryOp(UnOp::Not | UnOp::Neg, ref operand) => operand.ty(local_decls, tcx), Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx), Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => tcx.types.usize, Rvalue::Aggregate(ref ak, ref ops) => match **ak { AggregateKind::Array(ty) => tcx.mk_array(ty, ops.len() as u64), - AggregateKind::Tuple => tcx.mk_tup(ops.iter().map(|op| op.ty(local_decls, tcx))), - AggregateKind::Adt(did, _, substs, _, _) => { - tcx.bound_type_of(did).subst(tcx, substs) + AggregateKind::Tuple => { + tcx.mk_tup_from_iter(ops.iter().map(|op| op.ty(local_decls, tcx))) } - AggregateKind::Closure(did, substs) => tcx.mk_closure(did.to_def_id(), substs), + AggregateKind::Adt(did, _, substs, _, _) => tcx.type_of(did).subst(tcx, substs), + AggregateKind::Closure(did, substs) => tcx.mk_closure(did, substs), AggregateKind::Generator(did, substs, movability) => { - tcx.mk_generator(did.to_def_id(), substs, movability) + tcx.mk_generator(did, substs, movability) } }, Rvalue::ShallowInitBox(_, ty) => tcx.mk_box(ty), diff --git a/compiler/rustc_middle/src/mir/traversal.rs b/compiler/rustc_middle/src/mir/traversal.rs index 0b461d1ce..f37222cb2 100644 --- a/compiler/rustc_middle/src/mir/traversal.rs +++ b/compiler/rustc_middle/src/mir/traversal.rs @@ -1,7 +1,4 @@ -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; use rustc_index::bit_set::BitSet; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use super::*; @@ -339,50 +336,3 @@ pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorderIter let len = blocks.len(); ReversePostorderIter { body, blocks, idx: len } } - -#[derive(Clone, Debug)] -pub(super) struct PostorderCache { - cache: OnceCell<Vec<BasicBlock>>, -} - -impl PostorderCache { - #[inline] - pub(super) fn new() -> Self { - PostorderCache { cache: OnceCell::new() } - } - - /// Invalidates the postorder cache. - #[inline] - pub(super) fn invalidate(&mut self) { - self.cache = OnceCell::new(); - } - - /// Returns the `&[BasicBlocks]` represents the postorder graph for this MIR. - #[inline] - pub(super) fn compute(&self, body: &IndexVec<BasicBlock, BasicBlockData<'_>>) -> &[BasicBlock] { - self.cache.get_or_init(|| Postorder::new(body, START_BLOCK).map(|(bb, _)| bb).collect()) - } -} - -impl<S: Encoder> Encodable<S> for PostorderCache { - #[inline] - fn encode(&self, _s: &mut S) {} -} - -impl<D: Decoder> Decodable<D> for PostorderCache { - #[inline] - fn decode(_: &mut D) -> Self { - Self::new() - } -} - -impl<CTX> HashStable<CTX> for PostorderCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - PostorderCache, -} diff --git a/compiler/rustc_middle/src/mir/type_foldable.rs b/compiler/rustc_middle/src/mir/type_foldable.rs index 0705b4cff..988158321 100644 --- a/compiler/rustc_middle/src/mir/type_foldable.rs +++ b/compiler/rustc_middle/src/mir/type_foldable.rs @@ -30,26 +30,29 @@ TrivialTypeTraversalImpls! { } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx [InlineAsmTemplatePiece] { - fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, _folder: &mut F) -> Result<Self, F::Error> { +impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for &'tcx [InlineAsmTemplatePiece] { + fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>( + self, + _folder: &mut F, + ) -> Result<Self, F::Error> { Ok(self) } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx [Span] { - fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, _folder: &mut F) -> Result<Self, F::Error> { +impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for &'tcx [Span] { + fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>( + self, + _folder: &mut F, + ) -> Result<Self, F::Error> { Ok(self) } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<PlaceElem<'tcx>> { - fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, folder: &mut F) -> Result<Self, F::Error> { - ty::util::fold_list(self, folder, |tcx, v| tcx.intern_place_elems(v)) - } -} - -impl<'tcx, R: Idx, C: Idx> TypeFoldable<'tcx> for BitMatrix<R, C> { - fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, _: &mut F) -> Result<Self, F::Error> { - Ok(self) +impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for &'tcx ty::List<PlaceElem<'tcx>> { + fn try_fold_with<F: FallibleTypeFolder<TyCtxt<'tcx>>>( + self, + folder: &mut F, + ) -> Result<Self, F::Error> { + ty::util::fold_list(self, folder, |tcx, v| tcx.mk_place_elems(v)) } } diff --git a/compiler/rustc_middle/src/mir/type_visitable.rs b/compiler/rustc_middle/src/mir/type_visitable.rs deleted file mode 100644 index d44c6809b..000000000 --- a/compiler/rustc_middle/src/mir/type_visitable.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! `TypeVisitable` implementations for MIR types - -use super::*; - -impl<'tcx, R: Idx, C: Idx> TypeVisitable<'tcx> for BitMatrix<R, C> { - fn visit_with<V: TypeVisitor<'tcx>>(&self, _: &mut V) -> ControlFlow<V::BreakTy> { - ControlFlow::Continue(()) - } -} diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 1a264d2d5..5c056b299 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -323,7 +323,7 @@ macro_rules! make_mir_visitor { self.visit_source_scope($(& $mutability)? *parent_scope); } if let Some((callee, callsite_span)) = inlined { - let location = START_BLOCK.start_location(); + let location = Location::START; self.visit_span($(& $mutability)? *callsite_span); @@ -427,6 +427,7 @@ macro_rules! make_mir_visitor { } } } + StatementKind::ConstEvalCounter => {} StatementKind::Nop => {} } } @@ -836,7 +837,7 @@ macro_rules! make_mir_visitor { } = var_debug_info; self.visit_source_info(source_info); - let location = START_BLOCK.start_location(); + let location = Location::START; match value { VarDebugInfoContents::Const(c) => self.visit_constant(c, location), VarDebugInfoContents::Place(place) => @@ -1025,7 +1026,7 @@ macro_rules! super_body { $self.visit_span($(& $mutability)? $body.span); for const_ in &$($mutability)? $body.required_consts { - let location = START_BLOCK.start_location(); + let location = Location::START; $self.visit_constant(const_, location); } } @@ -1044,7 +1045,7 @@ macro_rules! visit_place_fns { self.visit_local(&mut place.local, context, location); if let Some(new_projection) = self.process_projection(&place.projection, location) { - place.projection = self.tcx().intern_place_elems(&new_projection); + place.projection = self.tcx().mk_place_elems(&new_projection); } } @@ -1213,7 +1214,7 @@ impl<'tcx> MirVisitable<'tcx> for Option<Terminator<'tcx>> { /// Extra information passed to `visit_ty` and friends to give context /// about where the type etc appears. -#[derive(Debug)] +#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)] pub enum TyContext { LocalDecl { /// The index of the local variable we are visiting. |