From 3e3e70d529d8c7d7c4d7bc4fefc9f109393b9245 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:19:43 +0200 Subject: Merging upstream version 1.69.0+dfsg1. Signed-off-by: Daniel Baumann --- compiler/rustc_middle/src/mir/basic_blocks.rs | 99 ++++++++++++++------ compiler/rustc_middle/src/mir/coverage.rs | 5 +- .../rustc_middle/src/mir/graph_cyclic_cache.rs | 63 ------------- compiler/rustc_middle/src/mir/graphviz.rs | 2 +- .../rustc_middle/src/mir/interpret/allocation.rs | 82 +++++++++++++---- compiler/rustc_middle/src/mir/interpret/error.rs | 9 +- compiler/rustc_middle/src/mir/interpret/mod.rs | 17 +++- compiler/rustc_middle/src/mir/interpret/pointer.rs | 22 ++--- compiler/rustc_middle/src/mir/interpret/queries.rs | 2 +- compiler/rustc_middle/src/mir/interpret/value.rs | 18 ++-- compiler/rustc_middle/src/mir/mod.rs | 100 +++++++++++++-------- compiler/rustc_middle/src/mir/mono.rs | 9 +- compiler/rustc_middle/src/mir/predecessors.rs | 78 ---------------- compiler/rustc_middle/src/mir/pretty.rs | 24 ++--- compiler/rustc_middle/src/mir/query.rs | 73 ++++++++++++--- compiler/rustc_middle/src/mir/spanview.rs | 5 +- compiler/rustc_middle/src/mir/switch_sources.rs | 78 ---------------- compiler/rustc_middle/src/mir/syntax.rs | 22 +++-- compiler/rustc_middle/src/mir/tcx.rs | 18 ++-- compiler/rustc_middle/src/mir/traversal.rs | 50 ----------- compiler/rustc_middle/src/mir/type_foldable.rs | 29 +++--- compiler/rustc_middle/src/mir/type_visitable.rs | 9 -- compiler/rustc_middle/src/mir/visit.rs | 11 +-- 23 files changed, 377 insertions(+), 448 deletions(-) delete mode 100644 compiler/rustc_middle/src/mir/graph_cyclic_cache.rs delete mode 100644 compiler/rustc_middle/src/mir/predecessors.rs delete mode 100644 compiler/rustc_middle/src/mir/switch_sources.rs delete mode 100644 compiler/rustc_middle/src/mir/type_visitable.rs (limited to 'compiler/rustc_middle/src/mir') diff --git a/compiler/rustc_middle/src/mir/basic_blocks.rs b/compiler/rustc_middle/src/mir/basic_blocks.rs index 752cbdeae..b93871769 100644 --- a/compiler/rustc_middle/src/mir/basic_blocks.rs +++ b/compiler/rustc_middle/src/mir/basic_blocks.rs @@ -1,41 +1,46 @@ -use crate::mir::graph_cyclic_cache::GraphIsCyclicCache; -use crate::mir::predecessors::{PredecessorCache, Predecessors}; -use crate::mir::switch_sources::{SwitchSourceCache, SwitchSources}; -use crate::mir::traversal::PostorderCache; -use crate::mir::{BasicBlock, BasicBlockData, Successors, START_BLOCK}; +use crate::mir::traversal::Postorder; +use crate::mir::{BasicBlock, BasicBlockData, Successors, Terminator, TerminatorKind, START_BLOCK}; +use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph; use rustc_data_structures::graph::dominators::{dominators, Dominators}; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::OnceCell; use rustc_index::vec::IndexVec; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; +use smallvec::SmallVec; #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable, TypeVisitable)] pub struct BasicBlocks<'tcx> { basic_blocks: IndexVec>, - predecessor_cache: PredecessorCache, - switch_source_cache: SwitchSourceCache, - is_cyclic: GraphIsCyclicCache, - postorder_cache: PostorderCache, + cache: Cache, +} + +// Typically 95%+ of basic blocks have 4 or fewer predecessors. +pub type Predecessors = IndexVec>; + +pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option; 1]>>; + +#[derive(Clone, Default, Debug)] +struct Cache { + predecessors: OnceCell, + switch_sources: OnceCell, + is_cyclic: OnceCell, + postorder: OnceCell>, } impl<'tcx> BasicBlocks<'tcx> { #[inline] pub fn new(basic_blocks: IndexVec>) -> Self { - BasicBlocks { - basic_blocks, - predecessor_cache: PredecessorCache::new(), - switch_source_cache: SwitchSourceCache::new(), - is_cyclic: GraphIsCyclicCache::new(), - postorder_cache: PostorderCache::new(), - } + BasicBlocks { basic_blocks, cache: Cache::default() } } /// Returns true if control-flow graph contains a cycle reachable from the `START_BLOCK`. #[inline] pub fn is_cfg_cyclic(&self) -> bool { - self.is_cyclic.is_cyclic(self) + *self.cache.is_cyclic.get_or_init(|| graph::is_cyclic(self)) } - #[inline] pub fn dominators(&self) -> Dominators { dominators(&self) } @@ -43,20 +48,46 @@ impl<'tcx> BasicBlocks<'tcx> { /// Returns predecessors for each basic block. #[inline] pub fn predecessors(&self) -> &Predecessors { - self.predecessor_cache.compute(&self.basic_blocks) + self.cache.predecessors.get_or_init(|| { + let mut preds = IndexVec::from_elem(SmallVec::new(), &self.basic_blocks); + for (bb, data) in self.basic_blocks.iter_enumerated() { + if let Some(term) = &data.terminator { + for succ in term.successors() { + preds[succ].push(bb); + } + } + } + preds + }) } /// Returns basic blocks in a postorder. #[inline] pub fn postorder(&self) -> &[BasicBlock] { - self.postorder_cache.compute(&self.basic_blocks) + self.cache.postorder.get_or_init(|| { + Postorder::new(&self.basic_blocks, START_BLOCK).map(|(bb, _)| bb).collect() + }) } /// `switch_sources()[&(target, switch)]` returns a list of switch /// values that lead to a `target` block from a `switch` block. #[inline] pub fn switch_sources(&self) -> &SwitchSources { - self.switch_source_cache.compute(&self.basic_blocks) + self.cache.switch_sources.get_or_init(|| { + let mut switch_sources: SwitchSources = FxHashMap::default(); + for (bb, data) in self.basic_blocks.iter_enumerated() { + if let Some(Terminator { + kind: TerminatorKind::SwitchInt { targets, .. }, .. + }) = &data.terminator + { + for (value, target) in targets.iter() { + switch_sources.entry((target, bb)).or_default().push(Some(value)); + } + switch_sources.entry((targets.otherwise(), bb)).or_default().push(None); + } + } + switch_sources + }) } /// Returns mutable reference to basic blocks. Invalidates CFG cache. @@ -88,10 +119,7 @@ impl<'tcx> BasicBlocks<'tcx> { /// All other methods that allow you to mutate the basic blocks also call this method /// themselves, thereby avoiding any risk of accidentally cache invalidation. pub fn invalidate_cfg_cache(&mut self) { - self.predecessor_cache.invalidate(); - self.switch_source_cache.invalidate(); - self.is_cyclic.invalidate(); - self.postorder_cache.invalidate(); + self.cache = Cache::default(); } } @@ -145,3 +173,24 @@ impl<'tcx> graph::WithPredecessors for BasicBlocks<'tcx> { self.predecessors()[node].iter().copied() } } + +TrivialTypeTraversalAndLiftImpls! { + Cache, +} + +impl Encodable for Cache { + #[inline] + fn encode(&self, _s: &mut S) {} +} + +impl Decodable for Cache { + #[inline] + fn decode(_: &mut D) -> Self { + Default::default() + } +} + +impl HashStable for Cache { + #[inline] + fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {} +} diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs index e7bb3ab0b..db24dae11 100644 --- a/compiler/rustc_middle/src/mir/coverage.rs +++ b/compiler/rustc_middle/src/mir/coverage.rs @@ -135,7 +135,10 @@ impl Debug for CoverageKind { "Expression({:?}) = {} {} {}", id.index(), lhs.index(), - if *op == Op::Add { "+" } else { "-" }, + match op { + Op::Add => "+", + Op::Subtract => "-", + }, rhs.index(), ), Unreachable => write!(fmt, "Unreachable"), diff --git a/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs b/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs deleted file mode 100644 index f97bf2883..000000000 --- a/compiler/rustc_middle/src/mir/graph_cyclic_cache.rs +++ /dev/null @@ -1,63 +0,0 @@ -use rustc_data_structures::graph::{ - self, DirectedGraph, WithNumNodes, WithStartNode, WithSuccessors, -}; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; - -/// Helper type to cache the result of `graph::is_cyclic`. -#[derive(Clone, Debug)] -pub(super) struct GraphIsCyclicCache { - cache: OnceCell, -} - -impl GraphIsCyclicCache { - #[inline] - pub(super) fn new() -> Self { - GraphIsCyclicCache { cache: OnceCell::new() } - } - - pub(super) fn is_cyclic(&self, graph: &G) -> bool - where - G: ?Sized + DirectedGraph + WithStartNode + WithSuccessors + WithNumNodes, - { - *self.cache.get_or_init(|| graph::is_cyclic(graph)) - } - - /// Invalidates the cache. - #[inline] - pub(super) fn invalidate(&mut self) { - // Invalidating the cache requires mutating the MIR, which in turn requires a unique - // reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all - // callers of `invalidate` have a unique reference to the MIR and thus to the - // cache. This means we never need to do synchronization when `invalidate` is called, - // we can simply reinitialize the `OnceCell`. - self.cache = OnceCell::new(); - } -} - -impl Encodable for GraphIsCyclicCache { - #[inline] - fn encode(&self, s: &mut S) { - Encodable::encode(&(), s); - } -} - -impl Decodable for GraphIsCyclicCache { - #[inline] - fn decode(d: &mut D) -> Self { - let () = Decodable::decode(d); - Self::new() - } -} - -impl HashStable for GraphIsCyclicCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - GraphIsCyclicCache, -} diff --git a/compiler/rustc_middle/src/mir/graphviz.rs b/compiler/rustc_middle/src/mir/graphviz.rs index 5de56dad0..cf6d46e1e 100644 --- a/compiler/rustc_middle/src/mir/graphviz.rs +++ b/compiler/rustc_middle/src/mir/graphviz.rs @@ -110,7 +110,7 @@ fn write_graph_label<'tcx, W: std::fmt::Write>( let decl = &body.local_decls[local]; write!(w, "let ")?; - if decl.mutability == Mutability::Mut { + if decl.mutability.is_mut() { write!(w, "mut ")?; } diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index 221105ac4..48375ed30 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -8,7 +8,8 @@ mod tests; use std::borrow::Cow; use std::fmt; use std::hash; -use std::ops::Range; +use std::hash::Hash; +use std::ops::{Deref, DerefMut, Range}; use std::ptr; use either::{Left, Right}; @@ -29,6 +30,39 @@ use provenance_map::*; pub use init_mask::{InitChunk, InitChunkIter}; +/// Functionality required for the bytes of an `Allocation`. +pub trait AllocBytes: + Clone + fmt::Debug + Eq + PartialEq + Hash + Deref + DerefMut +{ + /// Adjust the bytes to the specified alignment -- by default, this is a no-op. + fn adjust_to_align(self, _align: Align) -> Self; + + /// Create an `AllocBytes` from a slice of `u8`. + fn from_bytes<'a>(slice: impl Into>, _align: Align) -> Self; + + /// Create a zeroed `AllocBytes` of the specified size and alignment; + /// call the callback error handler if there is an error in allocating the memory. + fn zeroed(size: Size, _align: Align) -> Option; +} + +// Default `bytes` for `Allocation` is a `Box<[u8]>`. +impl AllocBytes for Box<[u8]> { + fn adjust_to_align(self, _align: Align) -> Self { + self + } + + fn from_bytes<'a>(slice: impl Into>, _align: Align) -> Self { + Box::<[u8]>::from(slice.into()) + } + + fn zeroed(size: Size, _align: Align) -> Option { + let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).ok()?; + // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]> + let bytes = unsafe { bytes.assume_init() }; + Some(bytes) + } +} + /// This type represents an Allocation in the Miri/CTFE core engine. /// /// Its public API is rather low-level, working directly with allocation offsets and a custom error @@ -38,10 +72,10 @@ pub use init_mask::{InitChunk, InitChunkIter}; // hashed. (see the `Hash` impl below for more details), so the impl is not derived. #[derive(Clone, Eq, PartialEq, TyEncodable, TyDecodable)] #[derive(HashStable)] -pub struct Allocation { +pub struct Allocation> { /// The actual bytes of the allocation. /// Note that the bytes of a pointer represent the offset of the pointer. - bytes: Box<[u8]>, + bytes: Bytes, /// Maps from byte addresses to extra provenance data for each pointer. /// Only the first byte of a pointer is inserted into the map; i.e., /// every entry in this map applies to `pointer_size` consecutive bytes starting @@ -220,14 +254,27 @@ impl AllocRange { } // The constructors are all without extra; the extra gets added by a machine hook later. -impl Allocation { +impl Allocation { + /// Creates an allocation from an existing `Bytes` value - this is needed for miri FFI support + pub fn from_raw_bytes(bytes: Bytes, align: Align, mutability: Mutability) -> Self { + let size = Size::from_bytes(bytes.len()); + Self { + bytes, + provenance: ProvenanceMap::new(), + init_mask: InitMask::new(size, true), + align, + mutability, + extra: (), + } + } + /// Creates an allocation initialized by the given bytes pub fn from_bytes<'a>( slice: impl Into>, align: Align, mutability: Mutability, ) -> Self { - let bytes = Box::<[u8]>::from(slice.into()); + let bytes = Bytes::from_bytes(slice, align); let size = Size::from_bytes(bytes.len()); Self { bytes, @@ -248,7 +295,7 @@ impl Allocation { /// /// If `panic_on_fail` is true, this will never return `Err`. pub fn uninit<'tcx>(size: Size, align: Align, panic_on_fail: bool) -> InterpResult<'tcx, Self> { - let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).map_err(|_| { + let bytes = Bytes::zeroed(size, align).ok_or_else(|| { // This results in an error that can happen non-deterministically, since the memory // available to the compiler can change between runs. Normally queries are always // deterministic. However, we can be non-deterministic here because all uses of const @@ -262,8 +309,7 @@ impl Allocation { }); InterpError::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted) })?; - // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]> - let bytes = unsafe { bytes.assume_init() }; + Ok(Allocation { bytes, provenance: ProvenanceMap::new(), @@ -275,7 +321,7 @@ impl Allocation { } } -impl Allocation { +impl Allocation { /// Adjust allocation from the ones in tcx to a custom Machine instance /// with a different Provenance and Extra type. pub fn adjust_from_tcx( @@ -283,9 +329,11 @@ impl Allocation { cx: &impl HasDataLayout, extra: Extra, mut adjust_ptr: impl FnMut(Pointer) -> Result, Err>, - ) -> Result, Err> { - // Compute new pointer provenance, which also adjusts the bytes. - let mut bytes = self.bytes; + ) -> Result, Err> { + // Compute new pointer provenance, which also adjusts the bytes, and realign the pointer if + // necessary. + let mut bytes = self.bytes.adjust_to_align(self.align); + let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len()); let ptr_size = cx.data_layout().pointer_size.bytes_usize(); let endian = cx.data_layout().endian; @@ -311,7 +359,7 @@ impl Allocation { } /// Raw accessors. Provide access to otherwise private bytes. -impl Allocation { +impl Allocation { pub fn len(&self) -> usize { self.bytes.len() } @@ -340,7 +388,11 @@ impl Allocation { } /// Byte accessors. -impl Allocation { +impl Allocation { + pub fn base_addr(&self) -> *const u8 { + self.bytes.as_ptr() + } + /// This is the entirely abstraction-violating way to just grab the raw bytes without /// caring about provenance or initialization. /// @@ -412,7 +464,7 @@ impl Allocation { } /// Reading and writing. -impl Allocation { +impl Allocation { /// Sets the init bit for the given range. fn mark_init(&mut self, range: AllocRange, is_init: bool) { if range.size.bytes() == 0 { diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs index bd9cd53e1..c5137cf06 100644 --- a/compiler/rustc_middle/src/mir/interpret/error.rs +++ b/compiler/rustc_middle/src/mir/interpret/error.rs @@ -323,7 +323,7 @@ impl fmt::Display for UndefinedBehaviorInfo { write!( f, "{msg}{pointer} is a dangling pointer (it has no provenance)", - pointer = Pointer::>::from_addr(*i), + pointer = Pointer::>::from_addr_invalid(*i), ) } AlignmentCheckFailed { required, has } => write!( @@ -430,8 +430,10 @@ pub enum ResourceExhaustionInfo { /// /// The exact limit is set by the `const_eval_limit` attribute. StepLimitReached, - /// There is not enough memory to perform an allocation. + /// There is not enough memory (on the host) to perform an allocation. MemoryExhausted, + /// The address space (of the target) is full. + AddressSpaceFull, } impl fmt::Display for ResourceExhaustionInfo { @@ -447,6 +449,9 @@ impl fmt::Display for ResourceExhaustionInfo { MemoryExhausted => { write!(f, "tried to allocate more memory than available to compiler") } + AddressSpaceFull => { + write!(f, "there are no more free addresses in the address space") + } } } } diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index 5f425a287..1766d7a66 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -110,7 +110,7 @@ use rustc_hir::def_id::DefId; use rustc_macros::HashStable; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_serialize::{Decodable, Encodable}; -use rustc_target::abi::Endian; +use rustc_target::abi::{AddressSpace, Endian, HasDataLayout}; use crate::mir; use crate::ty::codec::{TyDecoder, TyEncoder}; @@ -127,8 +127,8 @@ pub use self::error::{ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar}; pub use self::allocation::{ - alloc_range, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, InitChunk, - InitChunkIter, + alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, + InitChunk, InitChunkIter, }; pub use self::pointer::{Pointer, PointerArithmetic, Provenance}; @@ -438,6 +438,17 @@ impl<'tcx> GlobalAlloc<'tcx> { _ => bug!("expected vtable, got {:?}", self), } } + + /// The address space that this `GlobalAlloc` should be placed in. + #[inline] + pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace { + match self { + GlobalAlloc::Function(..) => cx.data_layout().instruction_address_space, + GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => { + AddressSpace::DATA + } + } + } } pub(crate) struct AllocMap<'tcx> { diff --git a/compiler/rustc_middle/src/mir/interpret/pointer.rs b/compiler/rustc_middle/src/mir/interpret/pointer.rs index b08309910..60927eed8 100644 --- a/compiler/rustc_middle/src/mir/interpret/pointer.rs +++ b/compiler/rustc_middle/src/mir/interpret/pointer.rs @@ -19,29 +19,29 @@ pub trait PointerArithmetic: HasDataLayout { #[inline(always)] fn max_size_of_val(&self) -> Size { - Size::from_bytes(self.machine_isize_max()) + Size::from_bytes(self.target_isize_max()) } #[inline] - fn machine_usize_max(&self) -> u64 { + fn target_usize_max(&self) -> u64 { self.pointer_size().unsigned_int_max().try_into().unwrap() } #[inline] - fn machine_isize_min(&self) -> i64 { + fn target_isize_min(&self) -> i64 { self.pointer_size().signed_int_min().try_into().unwrap() } #[inline] - fn machine_isize_max(&self) -> i64 { + fn target_isize_max(&self) -> i64 { self.pointer_size().signed_int_max().try_into().unwrap() } #[inline] - fn machine_usize_to_isize(&self, val: u64) -> i64 { + fn target_usize_to_isize(&self, val: u64) -> i64 { let val = val as i64; // Now wrap-around into the machine_isize range. - if val > self.machine_isize_max() { + if val > self.target_isize_max() { // This can only happen if the ptr size is < 64, so we know max_usize_plus_1 fits into // i64. debug_assert!(self.pointer_size().bits() < 64); @@ -76,11 +76,11 @@ pub trait PointerArithmetic: HasDataLayout { let n = i.unsigned_abs(); if i >= 0 { let (val, over) = self.overflowing_offset(val, n); - (val, over || i > self.machine_isize_max()) + (val, over || i > self.target_isize_max()) } else { let res = val.overflowing_sub(n); let (val, over) = self.truncate_to_ptr(res); - (val, over || i < self.machine_isize_min()) + (val, over || i < self.target_isize_min()) } } @@ -251,14 +251,16 @@ impl Pointer> { } impl Pointer> { + /// Creates a pointer to the given address, with invalid provenance (i.e., cannot be used for + /// any memory access). #[inline(always)] - pub fn from_addr(addr: u64) -> Self { + pub fn from_addr_invalid(addr: u64) -> Self { Pointer { provenance: None, offset: Size::from_bytes(addr) } } #[inline(always)] pub fn null() -> Self { - Pointer::from_addr(0) + Pointer::from_addr_invalid(0) } } diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs index b6c6e9d55..856d821a5 100644 --- a/compiler/rustc_middle/src/mir/interpret/queries.rs +++ b/compiler/rustc_middle/src/mir/interpret/queries.rs @@ -2,7 +2,7 @@ use super::{ErrorHandled, EvalToConstValueResult, EvalToValTreeResult, GlobalId} use crate::mir; use crate::ty::subst::InternalSubsts; -use crate::ty::visit::TypeVisitable; +use crate::ty::visit::TypeVisitableExt; use crate::ty::{self, query::TyCtxtAt, query::TyCtxtEnsure, TyCtxt}; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; diff --git a/compiler/rustc_middle/src/mir/interpret/value.rs b/compiler/rustc_middle/src/mir/interpret/value.rs index 88fb14eb3..36dbbe4bf 100644 --- a/compiler/rustc_middle/src/mir/interpret/value.rs +++ b/compiler/rustc_middle/src/mir/interpret/value.rs @@ -75,8 +75,8 @@ impl<'tcx> ConstValue<'tcx> { self.try_to_scalar_int()?.try_into().ok() } - pub fn try_to_machine_usize(&self, tcx: TyCtxt<'tcx>) -> Option { - self.try_to_scalar_int()?.try_to_machine_usize(tcx).ok() + pub fn try_to_target_usize(&self, tcx: TyCtxt<'tcx>) -> Option { + self.try_to_scalar_int()?.try_to_target_usize(tcx).ok() } pub fn try_to_bits_for_ty( @@ -97,8 +97,8 @@ impl<'tcx> ConstValue<'tcx> { ConstValue::Scalar(Scalar::from_u64(i)) } - pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self { - ConstValue::Scalar(Scalar::from_machine_usize(i, cx)) + pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self { + ConstValue::Scalar(Scalar::from_target_usize(i, cx)) } } @@ -241,7 +241,7 @@ impl Scalar { } #[inline] - pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self { + pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self { Self::from_uint(i, cx.data_layout().pointer_size) } @@ -268,7 +268,7 @@ impl Scalar { } #[inline] - pub fn from_machine_isize(i: i64, cx: &impl HasDataLayout) -> Self { + pub fn from_target_isize(i: i64, cx: &impl HasDataLayout) -> Self { Self::from_int(i, cx.data_layout().pointer_size) } @@ -322,7 +322,7 @@ impl<'tcx, Prov: Provenance> Scalar { Right(ptr) => Ok(ptr.into()), Left(bits) => { let addr = u64::try_from(bits).unwrap(); - Ok(Pointer::from_addr(addr)) + Ok(Pointer::from_addr_invalid(addr)) } } } @@ -429,7 +429,7 @@ impl<'tcx, Prov: Provenance> Scalar { /// Converts the scalar to produce a machine-pointer-sized unsigned integer. /// Fails if the scalar is a pointer. - pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> { + pub fn to_target_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> { let b = self.to_uint(cx.data_layout().pointer_size)?; Ok(u64::try_from(b).unwrap()) } @@ -469,7 +469,7 @@ impl<'tcx, Prov: Provenance> Scalar { /// Converts the scalar to produce a machine-pointer-sized signed integer. /// Fails if the scalar is a pointer. - pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> { + pub fn to_target_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> { let b = self.to_int(cx.data_layout().pointer_size)?; Ok(i64::try_from(b).unwrap()) } diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index e52b243ec..99cdb769d 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -9,7 +9,7 @@ use crate::mir::visit::MirVisitable; use crate::ty::codec::{TyDecoder, TyEncoder}; use crate::ty::fold::{FallibleTypeFolder, TypeFoldable}; use crate::ty::print::{FmtPrinter, Printer}; -use crate::ty::visit::{TypeVisitable, TypeVisitor}; +use crate::ty::visit::{TypeVisitable, TypeVisitableExt, TypeVisitor}; use crate::ty::{self, DefIdTree, List, Ty, TyCtxt}; use crate::ty::{AdtDef, InstanceDef, ScalarInt, UserTypeAnnotationIndex}; use crate::ty::{GenericArg, InternalSubsts, SubstsRef}; @@ -27,7 +27,6 @@ use polonius_engine::Atom; pub use rustc_ast::Mutability; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::dominators::Dominators; -use rustc_index::bit_set::BitMatrix; use rustc_index::vec::{Idx, IndexVec}; use rustc_serialize::{Decodable, Encodable}; use rustc_span::symbol::Symbol; @@ -47,25 +46,21 @@ mod basic_blocks; pub mod coverage; mod generic_graph; pub mod generic_graphviz; -mod graph_cyclic_cache; pub mod graphviz; pub mod interpret; pub mod mono; pub mod patch; -mod predecessors; pub mod pretty; mod query; pub mod spanview; mod syntax; pub use syntax::*; -mod switch_sources; pub mod tcx; pub mod terminator; pub use terminator::*; pub mod traversal; mod type_foldable; -mod type_visitable; pub mod visit; pub use self::generic_graph::graphviz_safe_def_name; @@ -419,11 +414,7 @@ impl<'tcx> Body<'tcx> { (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| { let local = Local::new(index); let decl = &self.local_decls[local]; - if decl.is_user_variable() && decl.mutability == Mutability::Mut { - Some(local) - } else { - None - } + (decl.is_user_variable() && decl.mutability.is_mut()).then_some(local) }) } @@ -712,7 +703,11 @@ pub enum BindingForm<'tcx> { RefForGuard, } -TrivialTypeTraversalAndLiftImpls! { BindingForm<'tcx>, } +TrivialTypeTraversalAndLiftImpls! { + for<'tcx> { + BindingForm<'tcx>, + } +} mod binding_form_impl { use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; @@ -905,6 +900,8 @@ pub enum LocalInfo<'tcx> { AggregateTemp, /// A temporary created during the pass `Derefer` to avoid it's retagging DerefTemp, + /// A temporary created for borrow checking. + FakeBorrow, } impl<'tcx> LocalDecl<'tcx> { @@ -1464,6 +1461,7 @@ impl Debug for Statement<'_> { } Coverage(box ref coverage) => write!(fmt, "Coverage::{:?}", coverage.kind), Intrinsic(box ref intrinsic) => write!(fmt, "{intrinsic}"), + ConstEvalCounter => write!(fmt, "ConstEvalCounter"), Nop => write!(fmt, "nop"), } } @@ -1622,7 +1620,7 @@ impl<'tcx> Place<'tcx> { &v }; - Place { local: self.local, projection: tcx.intern_place_elems(new_projections) } + Place { local: self.local, projection: tcx.mk_place_elems(new_projections) } } } @@ -1644,6 +1642,14 @@ impl<'tcx> PlaceRef<'tcx> { } } + /// Returns `true` if this `Place` contains a `Deref` projection. + /// + /// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the + /// same region of memory as its base. + pub fn is_indirect(&self) -> bool { + self.projection.iter().any(|elem| elem.is_indirect()) + } + /// If MirPhase >= Derefered and if projection contains Deref, /// It's guaranteed to be in the first place pub fn has_deref(&self) -> bool { @@ -2102,10 +2108,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { AggregateKind::Closure(def_id, substs) => ty::tls::with(|tcx| { let name = if tcx.sess.opts.unstable_opts.span_free_formats { let substs = tcx.lift(substs).unwrap(); - format!( - "[closure@{}]", - tcx.def_path_str_with_substs(def_id.to_def_id(), substs), - ) + format!("[closure@{}]", tcx.def_path_str_with_substs(def_id, substs),) } else { let span = tcx.def_span(def_id); format!( @@ -2116,11 +2119,17 @@ impl<'tcx> Debug for Rvalue<'tcx> { let mut struct_fmt = fmt.debug_struct(&name); // FIXME(project-rfc-2229#48): This should be a list of capture names/places - if let Some(upvars) = tcx.upvars_mentioned(def_id) { + if let Some(def_id) = def_id.as_local() + && let Some(upvars) = tcx.upvars_mentioned(def_id) + { for (&var_id, place) in iter::zip(upvars.keys(), places) { let var_name = tcx.hir().name(var_id); struct_fmt.field(var_name.as_str(), place); } + } else { + for (index, place) in places.iter().enumerate() { + struct_fmt.field(&format!("{index}"), place); + } } struct_fmt.finish() @@ -2131,11 +2140,17 @@ impl<'tcx> Debug for Rvalue<'tcx> { let mut struct_fmt = fmt.debug_struct(&name); // FIXME(project-rfc-2229#48): This should be a list of capture names/places - if let Some(upvars) = tcx.upvars_mentioned(def_id) { + if let Some(def_id) = def_id.as_local() + && let Some(upvars) = tcx.upvars_mentioned(def_id) + { for (&var_id, place) in iter::zip(upvars.keys(), places) { let var_name = tcx.hir().name(var_id); struct_fmt.field(var_name.as_str(), place); } + } else { + for (index, place) in places.iter().enumerate() { + struct_fmt.field(&format!("{index}"), place); + } } struct_fmt.finish() @@ -2335,13 +2350,17 @@ impl<'tcx> ConstantKind<'tcx> { } #[inline] - pub fn try_eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Option { + pub fn try_eval_target_usize( + &self, + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + ) -> Option { match self { - Self::Ty(ct) => ct.try_eval_usize(tcx, param_env), - Self::Val(val, _) => val.try_to_machine_usize(tcx), + Self::Ty(ct) => ct.try_eval_target_usize(tcx, param_env), + Self::Val(val, _) => val.try_to_target_usize(tcx), Self::Unevaluated(uneval, _) => { match tcx.const_eval_resolve(param_env, *uneval, None) { - Ok(val) => val.try_to_machine_usize(tcx), + Ok(val) => val.try_to_target_usize(tcx), Err(_) => None, } } @@ -2478,7 +2497,7 @@ impl<'tcx> ConstantKind<'tcx> { }; debug!("expr.kind: {:?}", expr.kind); - let ty = tcx.type_of(def.def_id_for_type_of()); + let ty = tcx.type_of(def.def_id_for_type_of()).subst_identity(); debug!(?ty); // FIXME(const_generics): We currently have to special case parameters because `min_const_generics` @@ -2506,20 +2525,19 @@ impl<'tcx> ConstantKind<'tcx> { } let hir_id = tcx.hir().local_def_id_to_hir_id(def.did); - let parent_substs = if let Some(parent_hir_id) = tcx.hir().opt_parent_id(hir_id) { - if let Some(parent_did) = tcx.hir().opt_local_def_id(parent_hir_id) { - InternalSubsts::identity_for_item(tcx, parent_did.to_def_id()) - } else { - tcx.mk_substs(Vec::>::new().into_iter()) - } + let parent_substs = if let Some(parent_hir_id) = tcx.hir().opt_parent_id(hir_id) + && let Some(parent_did) = parent_hir_id.as_owner() + { + InternalSubsts::identity_for_item(tcx, parent_did.to_def_id()) } else { - tcx.mk_substs(Vec::>::new().into_iter()) + List::empty() }; debug!(?parent_substs); let did = def.did.to_def_id(); let child_substs = InternalSubsts::identity_for_item(tcx, did); - let substs = tcx.mk_substs(parent_substs.into_iter().chain(child_substs.into_iter())); + let substs = + tcx.mk_substs_from_iter(parent_substs.into_iter().chain(child_substs.into_iter())); debug!(?substs); let hir_id = tcx.hir().local_def_id_to_hir_id(def.did); @@ -2737,8 +2755,11 @@ impl UserTypeProjection { } } -impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection { - fn try_fold_with>(self, folder: &mut F) -> Result { +impl<'tcx> TypeFoldable> for UserTypeProjection { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { Ok(UserTypeProjection { base: self.base.try_fold_with(folder)?, projs: self.projs.try_fold_with(folder)?, @@ -2746,8 +2767,11 @@ impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection { } } -impl<'tcx> TypeVisitable<'tcx> for UserTypeProjection { - fn visit_with>(&self, visitor: &mut Vs) -> ControlFlow { +impl<'tcx> TypeVisitable> for UserTypeProjection { + fn visit_with>>( + &self, + visitor: &mut Vs, + ) -> ControlFlow { self.base.visit_with(visitor) // Note: there's nothing in `self.proj` to visit. } @@ -2884,7 +2908,7 @@ fn pretty_print_const_value<'tcx>( // the `destructure_const` query with an empty `ty::ParamEnv` without // introducing ICEs (e.g. via `layout_of`) from missing bounds. // E.g. `transmute([0usize; 2]): (u8, *mut T)` needs to know `T: Sized` - // to be able to destructure the tuple into `(0u8, *mut T) + // to be able to destructure the tuple into `(0u8, *mut T)` // // FIXME(eddyb) for `--emit=mir`/`-Z dump-mir`, we should provide the // correct `ty::ParamEnv` to allow printing *all* constant values. @@ -3049,7 +3073,7 @@ impl Location { if self.block == other.block { self.statement_index <= other.statement_index } else { - dominators.is_dominated_by(other.block, self.block) + dominators.dominates(self.block, other.block) } } } diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index 1e8d5f7ea..7a05ee2ff 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -318,16 +318,19 @@ impl<'tcx> CodegenUnit<'tcx> { base_n::encode(hash, base_n::CASE_INSENSITIVE) } - pub fn estimate_size(&mut self, tcx: TyCtxt<'tcx>) { + pub fn create_size_estimate(&mut self, tcx: TyCtxt<'tcx>) { // Estimate the size of a codegen unit as (approximately) the number of MIR // statements it corresponds to. self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum()); } #[inline] + /// Should only be called if [`create_size_estimate`] has previously been called. + /// + /// [`create_size_estimate`]: Self::create_size_estimate pub fn size_estimate(&self) -> usize { - // Should only be called if `estimate_size` has previously been called. - self.size_estimate.expect("estimate_size must be called before getting a size_estimate") + self.size_estimate + .expect("create_size_estimate must be called before getting a size_estimate") } pub fn modify_size_estimate(&mut self, delta: usize) { diff --git a/compiler/rustc_middle/src/mir/predecessors.rs b/compiler/rustc_middle/src/mir/predecessors.rs deleted file mode 100644 index 5f1fadaf3..000000000 --- a/compiler/rustc_middle/src/mir/predecessors.rs +++ /dev/null @@ -1,78 +0,0 @@ -//! Lazily compute the reverse control-flow graph for the MIR. - -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; -use rustc_index::vec::IndexVec; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; -use smallvec::SmallVec; - -use crate::mir::{BasicBlock, BasicBlockData}; - -// Typically 95%+ of basic blocks have 4 or fewer predecessors. -pub type Predecessors = IndexVec>; - -#[derive(Clone, Debug)] -pub(super) struct PredecessorCache { - cache: OnceCell, -} - -impl PredecessorCache { - #[inline] - pub(super) fn new() -> Self { - PredecessorCache { cache: OnceCell::new() } - } - - /// Invalidates the predecessor cache. - #[inline] - pub(super) fn invalidate(&mut self) { - // Invalidating the predecessor cache requires mutating the MIR, which in turn requires a - // unique reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all - // callers of `invalidate` have a unique reference to the MIR and thus to the predecessor - // cache. This means we never need to do synchronization when `invalidate` is called, we can - // simply reinitialize the `OnceCell`. - self.cache = OnceCell::new(); - } - - /// Returns the predecessor graph for this MIR. - #[inline] - pub(super) fn compute( - &self, - basic_blocks: &IndexVec>, - ) -> &Predecessors { - self.cache.get_or_init(|| { - let mut preds = IndexVec::from_elem(SmallVec::new(), basic_blocks); - for (bb, data) in basic_blocks.iter_enumerated() { - if let Some(term) = &data.terminator { - for succ in term.successors() { - preds[succ].push(bb); - } - } - } - - preds - }) - } -} - -impl Encodable for PredecessorCache { - #[inline] - fn encode(&self, _s: &mut S) {} -} - -impl Decodable for PredecessorCache { - #[inline] - fn decode(_: &mut D) -> Self { - Self::new() - } -} - -impl HashStable for PredecessorCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - PredecessorCache, -} diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 40289af25..d8829e3e7 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -12,8 +12,8 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; use rustc_index::vec::Idx; use rustc_middle::mir::interpret::{ - alloc_range, read_target_uint, AllocId, Allocation, ConstAllocation, ConstValue, GlobalAlloc, - Pointer, Provenance, + alloc_range, read_target_uint, AllocBytes, AllocId, Allocation, ConstAllocation, ConstValue, + GlobalAlloc, Pointer, Provenance, }; use rustc_middle::mir::visit::Visitor; use rustc_middle::mir::*; @@ -580,7 +580,7 @@ fn write_scope_tree( continue; } - let mut_str = if local_decl.mutability == Mutability::Mut { "mut " } else { "" }; + let mut_str = local_decl.mutability.prefix_str(); let mut indented_decl = format!("{0:1$}let {2}{3:?}: {4:?}", INDENT, indent, mut_str, local, local_decl.ty); @@ -787,21 +787,21 @@ pub fn write_allocations<'tcx>( /// After the hex dump, an ascii dump follows, replacing all unprintable characters (control /// characters or characters whose value is larger than 127) with a `.` /// This also prints provenance adequately. -pub fn display_allocation<'a, 'tcx, Prov: Provenance, Extra>( +pub fn display_allocation<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( tcx: TyCtxt<'tcx>, - alloc: &'a Allocation, -) -> RenderAllocation<'a, 'tcx, Prov, Extra> { + alloc: &'a Allocation, +) -> RenderAllocation<'a, 'tcx, Prov, Extra, Bytes> { RenderAllocation { tcx, alloc } } #[doc(hidden)] -pub struct RenderAllocation<'a, 'tcx, Prov: Provenance, Extra> { +pub struct RenderAllocation<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> { tcx: TyCtxt<'tcx>, - alloc: &'a Allocation, + alloc: &'a Allocation, } -impl<'a, 'tcx, Prov: Provenance, Extra> std::fmt::Display - for RenderAllocation<'a, 'tcx, Prov, Extra> +impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> std::fmt::Display + for RenderAllocation<'a, 'tcx, Prov, Extra, Bytes> { fn fmt(&self, w: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let RenderAllocation { tcx, alloc } = *self; @@ -845,9 +845,9 @@ fn write_allocation_newline( /// The `prefix` argument allows callers to add an arbitrary prefix before each line (even if there /// is only one line). Note that your prefix should contain a trailing space as the lines are /// printed directly after it. -fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>( +fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>( tcx: TyCtxt<'tcx>, - alloc: &Allocation, + alloc: &Allocation, w: &mut dyn std::fmt::Write, prefix: &str, ) -> std::fmt::Result { diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs index a8a453222..b964c1852 100644 --- a/compiler/rustc_middle/src/mir/query.rs +++ b/compiler/rustc_middle/src/mir/query.rs @@ -8,7 +8,7 @@ use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_index::bit_set::BitMatrix; -use rustc_index::vec::IndexVec; +use rustc_index::vec::{Idx, IndexVec}; use rustc_span::Span; use rustc_target::abi::VariantIdx; use smallvec::SmallVec; @@ -135,11 +135,20 @@ rustc_index::newtype_index! { pub struct GeneratorSavedLocal {} } +#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] +pub struct GeneratorSavedTy<'tcx> { + pub ty: Ty<'tcx>, + /// Source info corresponding to the local in the original MIR body. + pub source_info: SourceInfo, + /// Whether the local should be ignored for trait bound computations. + pub ignore_for_traits: bool, +} + /// The layout of generator state. #[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)] pub struct GeneratorLayout<'tcx> { /// The type of every local stored inside the generator. - pub field_tys: IndexVec>, + pub field_tys: IndexVec>, /// Which of the above fields are in each variant. Note that one field may /// be stored in multiple variants. @@ -152,6 +161,8 @@ pub struct GeneratorLayout<'tcx> { /// Which saved locals are storage-live at the same time. Locals that do not /// have conflicts with each other are allowed to overlap in the computed /// layout. + #[type_foldable(identity)] + #[type_visitable(ignore)] pub storage_conflicts: BitMatrix, } @@ -278,13 +289,6 @@ pub struct ConstQualifs { /// instance of the closure is created, the corresponding free regions /// can be extracted from its type and constrained to have the given /// outlives relationship. -/// -/// In some cases, we have to record outlives requirements between types and -/// regions as well. In that case, if those types include any regions, those -/// regions are recorded using their external names (`ReStatic`, -/// `ReEarlyBound`, `ReFree`). We use these because in a query response we -/// cannot use `ReVar` (which is what we use internally within the rest of the -/// NLL code). #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)] pub struct ClosureRegionRequirements<'tcx> { /// The number of external regions defined on the closure. In our @@ -381,16 +385,59 @@ pub enum ClosureOutlivesSubject<'tcx> { /// Subject is a type, typically a type parameter, but could also /// be a projection. Indicates a requirement like `T: 'a` being /// passed to the caller, where the type here is `T`. - /// - /// The type here is guaranteed not to contain any free regions at - /// present. - Ty(Ty<'tcx>), + Ty(ClosureOutlivesSubjectTy<'tcx>), /// Subject is a free region from the closure. Indicates a requirement /// like `'a: 'b` being passed to the caller; the region here is `'a`. Region(ty::RegionVid), } +/// Represents a `ty::Ty` for use in [`ClosureOutlivesSubject`]. +/// +/// This abstraction is necessary because the type may include `ReVar` regions, +/// which is what we use internally within NLL code, and they can't be used in +/// a query response. +/// +/// DO NOT implement `TypeVisitable` or `TypeFoldable` traits, because this +/// type is not recognized as a binder for late-bound region. +#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)] +pub struct ClosureOutlivesSubjectTy<'tcx> { + inner: Ty<'tcx>, +} + +impl<'tcx> ClosureOutlivesSubjectTy<'tcx> { + /// All regions of `ty` must be of kind `ReVar` and must represent + /// universal regions *external* to the closure. + pub fn bind(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Self { + let inner = tcx.fold_regions(ty, |r, depth| match r.kind() { + ty::ReVar(vid) => { + let br = ty::BoundRegion { + var: ty::BoundVar::new(vid.index()), + kind: ty::BrAnon(vid.as_u32(), None), + }; + tcx.mk_re_late_bound(depth, br) + } + _ => bug!("unexpected region in ClosureOutlivesSubjectTy: {r:?}"), + }); + + Self { inner } + } + + pub fn instantiate( + self, + tcx: TyCtxt<'tcx>, + mut map: impl FnMut(ty::RegionVid) -> ty::Region<'tcx>, + ) -> Ty<'tcx> { + tcx.fold_regions(self.inner, |r, depth| match r.kind() { + ty::ReLateBound(debruijn, br) => { + debug_assert_eq!(debruijn, depth); + map(ty::RegionVid::new(br.var.index())) + } + _ => bug!("unexpected region {r:?}"), + }) + } +} + /// The constituent parts of a mir constant of kind ADT or array. #[derive(Copy, Clone, Debug, HashStable)] pub struct DestructuredConstant<'tcx> { diff --git a/compiler/rustc_middle/src/mir/spanview.rs b/compiler/rustc_middle/src/mir/spanview.rs index 887ee5715..28a3b51b7 100644 --- a/compiler/rustc_middle/src/mir/spanview.rs +++ b/compiler/rustc_middle/src/mir/spanview.rs @@ -11,7 +11,7 @@ use std::io::{self, Write}; pub const TOOLTIP_INDENT: &str = " "; const CARET: char = '\u{2038}'; // Unicode `CARET` -const ANNOTATION_LEFT_BRACKET: char = '\u{298a}'; // Unicode `Z NOTATION RIGHT BINDING BRACKET +const ANNOTATION_LEFT_BRACKET: char = '\u{298a}'; // Unicode `Z NOTATION RIGHT BINDING BRACKET` const ANNOTATION_RIGHT_BRACKET: char = '\u{2989}'; // Unicode `Z NOTATION LEFT BINDING BRACKET` const NEW_LINE_SPAN: &str = "\n"; const HEADER: &str = r#" @@ -250,6 +250,7 @@ pub fn statement_kind_name(statement: &Statement<'_>) -> &'static str { AscribeUserType(..) => "AscribeUserType", Coverage(..) => "Coverage", Intrinsic(..) => "Intrinsic", + ConstEvalCounter => "ConstEvalCounter", Nop => "Nop", } } @@ -670,7 +671,7 @@ fn fn_span(tcx: TyCtxt<'_>, def_id: DefId) -> Span { fn hir_body(tcx: TyCtxt<'_>, def_id: DefId) -> Option<&rustc_hir::Body<'_>> { let hir_node = tcx.hir().get_if_local(def_id).expect("expected DefId is local"); - hir::map::associated_body(hir_node).map(|fn_body_id| tcx.hir().body(fn_body_id)) + hir::map::associated_body(hir_node).map(|(_, fn_body_id)| tcx.hir().body(fn_body_id)) } fn escape_html(s: &str) -> String { diff --git a/compiler/rustc_middle/src/mir/switch_sources.rs b/compiler/rustc_middle/src/mir/switch_sources.rs deleted file mode 100644 index b91c0c257..000000000 --- a/compiler/rustc_middle/src/mir/switch_sources.rs +++ /dev/null @@ -1,78 +0,0 @@ -//! Lazily compute the inverse of each `SwitchInt`'s switch targets. Modeled after -//! `Predecessors`/`PredecessorCache`. - -use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; -use rustc_index::vec::IndexVec; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; -use smallvec::SmallVec; - -use crate::mir::{BasicBlock, BasicBlockData, Terminator, TerminatorKind}; - -pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option; 1]>>; - -#[derive(Clone, Debug)] -pub(super) struct SwitchSourceCache { - cache: OnceCell, -} - -impl SwitchSourceCache { - #[inline] - pub(super) fn new() -> Self { - SwitchSourceCache { cache: OnceCell::new() } - } - - /// Invalidates the switch source cache. - #[inline] - pub(super) fn invalidate(&mut self) { - self.cache = OnceCell::new(); - } - - /// Returns the switch sources for this MIR. - #[inline] - pub(super) fn compute( - &self, - basic_blocks: &IndexVec>, - ) -> &SwitchSources { - self.cache.get_or_init(|| { - let mut switch_sources: SwitchSources = FxHashMap::default(); - for (bb, data) in basic_blocks.iter_enumerated() { - if let Some(Terminator { - kind: TerminatorKind::SwitchInt { targets, .. }, .. - }) = &data.terminator - { - for (value, target) in targets.iter() { - switch_sources.entry((target, bb)).or_default().push(Some(value)); - } - switch_sources.entry((targets.otherwise(), bb)).or_default().push(None); - } - } - - switch_sources - }) - } -} - -impl Encodable for SwitchSourceCache { - #[inline] - fn encode(&self, _s: &mut S) {} -} - -impl Decodable for SwitchSourceCache { - #[inline] - fn decode(_: &mut D) -> Self { - Self::new() - } -} - -impl HashStable for SwitchSourceCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - SwitchSourceCache, -} diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 52c2b10cb..ae09562a8 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -355,6 +355,12 @@ pub enum StatementKind<'tcx> { /// This avoids adding a new block and a terminator for simple intrinsics. Intrinsic(Box>), + /// Instructs the const eval interpreter to increment a counter; this counter is used to track + /// how many steps the interpreter has taken. It is used to prevent the user from writing const + /// code that runs for too long or infinitely. Other than in the const eval interpreter, this + /// is a no-op. + ConstEvalCounter, + /// No-op. Useful for deleting instructions without affecting statement indices. Nop, } @@ -665,6 +671,12 @@ pub enum TerminatorKind<'tcx> { /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the /// assertion does not fail, execution continues at the specified basic block. + /// + /// When overflow checking is disabled and this is run-time MIR (as opposed to compile-time MIR + /// that is used for CTFE), the following variants of this terminator behave as `goto target`: + /// - `OverflowNeg(..)`, + /// - `Overflow(op, ..)` if op is a "checkable" operation (add, sub, mul, shl, shr, but NOT + /// div or rem). Assert { cond: Operand<'tcx>, expected: bool, @@ -1097,10 +1109,6 @@ pub enum Rvalue<'tcx> { /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition. /// - /// When overflow checking is disabled and we are generating run-time code, the error condition - /// is false. Otherwise, and always during CTFE, the error condition is determined as described - /// below. - /// /// For addition, subtraction, and multiplication on integers the error condition is set when /// the infinite precision result would be unequal to the actual result. /// @@ -1197,10 +1205,8 @@ pub enum AggregateKind<'tcx> { /// active field index would identity the field `c` Adt(DefId, VariantIdx, SubstsRef<'tcx>, Option, Option), - // Note: We can use LocalDefId since closures and generators a deaggregated - // before codegen. - Closure(LocalDefId, SubstsRef<'tcx>), - Generator(LocalDefId, SubstsRef<'tcx>, hir::Movability), + Closure(DefId, SubstsRef<'tcx>), + Generator(DefId, SubstsRef<'tcx>, hir::Movability), } #[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)] diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs index 599f0b9d3..0aa2c500f 100644 --- a/compiler/rustc_middle/src/mir/tcx.rs +++ b/compiler/rustc_middle/src/mir/tcx.rs @@ -97,7 +97,7 @@ impl<'tcx> PlaceTy<'tcx> { ty::Slice(..) => self.ty, ty::Array(inner, _) if !from_end => tcx.mk_array(*inner, (to - from) as u64), ty::Array(inner, size) if from_end => { - let size = size.eval_usize(tcx, param_env); + let size = size.eval_target_usize(tcx, param_env); let len = size - (from as u64) - (to as u64); tcx.mk_array(*inner, len) } @@ -162,10 +162,10 @@ impl<'tcx> Rvalue<'tcx> { match *self { Rvalue::Use(ref operand) => operand.ty(local_decls, tcx), Rvalue::Repeat(ref operand, count) => { - tcx.mk_ty(ty::Array(operand.ty(local_decls, tcx), count)) + tcx.mk_array_with_const_len(operand.ty(local_decls, tcx), count) } Rvalue::ThreadLocalRef(did) => { - let static_ty = tcx.type_of(did); + let static_ty = tcx.type_of(did).subst_identity(); if tcx.is_mutable_static(did) { tcx.mk_mut_ptr(static_ty) } else if tcx.is_foreign_item(did) { @@ -194,20 +194,20 @@ impl<'tcx> Rvalue<'tcx> { let lhs_ty = lhs.ty(local_decls, tcx); let rhs_ty = rhs.ty(local_decls, tcx); let ty = op.ty(tcx, lhs_ty, rhs_ty); - tcx.intern_tup(&[ty, tcx.types.bool]) + tcx.mk_tup(&[ty, tcx.types.bool]) } Rvalue::UnaryOp(UnOp::Not | UnOp::Neg, ref operand) => operand.ty(local_decls, tcx), Rvalue::Discriminant(ref place) => place.ty(local_decls, tcx).ty.discriminant_ty(tcx), Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => tcx.types.usize, Rvalue::Aggregate(ref ak, ref ops) => match **ak { AggregateKind::Array(ty) => tcx.mk_array(ty, ops.len() as u64), - AggregateKind::Tuple => tcx.mk_tup(ops.iter().map(|op| op.ty(local_decls, tcx))), - AggregateKind::Adt(did, _, substs, _, _) => { - tcx.bound_type_of(did).subst(tcx, substs) + AggregateKind::Tuple => { + tcx.mk_tup_from_iter(ops.iter().map(|op| op.ty(local_decls, tcx))) } - AggregateKind::Closure(did, substs) => tcx.mk_closure(did.to_def_id(), substs), + AggregateKind::Adt(did, _, substs, _, _) => tcx.type_of(did).subst(tcx, substs), + AggregateKind::Closure(did, substs) => tcx.mk_closure(did, substs), AggregateKind::Generator(did, substs, movability) => { - tcx.mk_generator(did.to_def_id(), substs, movability) + tcx.mk_generator(did, substs, movability) } }, Rvalue::ShallowInitBox(_, ty) => tcx.mk_box(ty), diff --git a/compiler/rustc_middle/src/mir/traversal.rs b/compiler/rustc_middle/src/mir/traversal.rs index 0b461d1ce..f37222cb2 100644 --- a/compiler/rustc_middle/src/mir/traversal.rs +++ b/compiler/rustc_middle/src/mir/traversal.rs @@ -1,7 +1,4 @@ -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::OnceCell; use rustc_index::bit_set::BitSet; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use super::*; @@ -339,50 +336,3 @@ pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorderIter let len = blocks.len(); ReversePostorderIter { body, blocks, idx: len } } - -#[derive(Clone, Debug)] -pub(super) struct PostorderCache { - cache: OnceCell>, -} - -impl PostorderCache { - #[inline] - pub(super) fn new() -> Self { - PostorderCache { cache: OnceCell::new() } - } - - /// Invalidates the postorder cache. - #[inline] - pub(super) fn invalidate(&mut self) { - self.cache = OnceCell::new(); - } - - /// Returns the `&[BasicBlocks]` represents the postorder graph for this MIR. - #[inline] - pub(super) fn compute(&self, body: &IndexVec>) -> &[BasicBlock] { - self.cache.get_or_init(|| Postorder::new(body, START_BLOCK).map(|(bb, _)| bb).collect()) - } -} - -impl Encodable for PostorderCache { - #[inline] - fn encode(&self, _s: &mut S) {} -} - -impl Decodable for PostorderCache { - #[inline] - fn decode(_: &mut D) -> Self { - Self::new() - } -} - -impl HashStable for PostorderCache { - #[inline] - fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { - // do nothing - } -} - -TrivialTypeTraversalAndLiftImpls! { - PostorderCache, -} diff --git a/compiler/rustc_middle/src/mir/type_foldable.rs b/compiler/rustc_middle/src/mir/type_foldable.rs index 0705b4cff..988158321 100644 --- a/compiler/rustc_middle/src/mir/type_foldable.rs +++ b/compiler/rustc_middle/src/mir/type_foldable.rs @@ -30,26 +30,29 @@ TrivialTypeTraversalImpls! { } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx [InlineAsmTemplatePiece] { - fn try_fold_with>(self, _folder: &mut F) -> Result { +impl<'tcx> TypeFoldable> for &'tcx [InlineAsmTemplatePiece] { + fn try_fold_with>>( + self, + _folder: &mut F, + ) -> Result { Ok(self) } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx [Span] { - fn try_fold_with>(self, _folder: &mut F) -> Result { +impl<'tcx> TypeFoldable> for &'tcx [Span] { + fn try_fold_with>>( + self, + _folder: &mut F, + ) -> Result { Ok(self) } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { - fn try_fold_with>(self, folder: &mut F) -> Result { - ty::util::fold_list(self, folder, |tcx, v| tcx.intern_place_elems(v)) - } -} - -impl<'tcx, R: Idx, C: Idx> TypeFoldable<'tcx> for BitMatrix { - fn try_fold_with>(self, _: &mut F) -> Result { - Ok(self) +impl<'tcx> TypeFoldable> for &'tcx ty::List> { + fn try_fold_with>>( + self, + folder: &mut F, + ) -> Result { + ty::util::fold_list(self, folder, |tcx, v| tcx.mk_place_elems(v)) } } diff --git a/compiler/rustc_middle/src/mir/type_visitable.rs b/compiler/rustc_middle/src/mir/type_visitable.rs deleted file mode 100644 index d44c6809b..000000000 --- a/compiler/rustc_middle/src/mir/type_visitable.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! `TypeVisitable` implementations for MIR types - -use super::*; - -impl<'tcx, R: Idx, C: Idx> TypeVisitable<'tcx> for BitMatrix { - fn visit_with>(&self, _: &mut V) -> ControlFlow { - ControlFlow::Continue(()) - } -} diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 1a264d2d5..5c056b299 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -323,7 +323,7 @@ macro_rules! make_mir_visitor { self.visit_source_scope($(& $mutability)? *parent_scope); } if let Some((callee, callsite_span)) = inlined { - let location = START_BLOCK.start_location(); + let location = Location::START; self.visit_span($(& $mutability)? *callsite_span); @@ -427,6 +427,7 @@ macro_rules! make_mir_visitor { } } } + StatementKind::ConstEvalCounter => {} StatementKind::Nop => {} } } @@ -836,7 +837,7 @@ macro_rules! make_mir_visitor { } = var_debug_info; self.visit_source_info(source_info); - let location = START_BLOCK.start_location(); + let location = Location::START; match value { VarDebugInfoContents::Const(c) => self.visit_constant(c, location), VarDebugInfoContents::Place(place) => @@ -1025,7 +1026,7 @@ macro_rules! super_body { $self.visit_span($(& $mutability)? $body.span); for const_ in &$($mutability)? $body.required_consts { - let location = START_BLOCK.start_location(); + let location = Location::START; $self.visit_constant(const_, location); } } @@ -1044,7 +1045,7 @@ macro_rules! visit_place_fns { self.visit_local(&mut place.local, context, location); if let Some(new_projection) = self.process_projection(&place.projection, location) { - place.projection = self.tcx().intern_place_elems(&new_projection); + place.projection = self.tcx().mk_place_elems(&new_projection); } } @@ -1213,7 +1214,7 @@ impl<'tcx> MirVisitable<'tcx> for Option> { /// Extra information passed to `visit_ty` and friends to give context /// about where the type etc appears. -#[derive(Debug)] +#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)] pub enum TyContext { LocalDecl { /// The index of the local variable we are visiting. -- cgit v1.2.3