summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_const_eval/src/interpret/intern.rs
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs30
1 files changed, 14 insertions, 16 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 376b8872c..6809a42dc 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -15,7 +15,7 @@
//! that contains allocations whose mutability we cannot identify.)
use super::validity::RefTracking;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_middle::mir::interpret::InterpResult;
@@ -37,7 +37,7 @@ pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
ExtraFnVal = !,
FrameExtra = (),
AllocExtra = (),
- MemoryMap = FxHashMap<AllocId, (MemoryKind<T>, Allocation)>,
+ MemoryMap = FxIndexMap<AllocId, (MemoryKind<T>, Allocation)>,
>;
struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> {
@@ -47,7 +47,7 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_ev
ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
/// A list of all encountered allocations. After type-based interning, we traverse this list to
/// also intern allocations that are only referenced by a raw pointer or inside a union.
- leftover_allocations: &'rt mut FxHashSet<AllocId>,
+ leftover_allocations: &'rt mut FxIndexSet<AllocId>,
/// The root kind of the value that we're looking at. This field is never mutated for a
/// particular allocation. It is primarily used to make as many allocations as possible
/// read-only so LLVM can place them in const memory.
@@ -79,7 +79,7 @@ struct IsStaticOrFn;
/// to account for (e.g. for vtables).
fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>(
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
- leftover_allocations: &'rt mut FxHashSet<AllocId>,
+ leftover_allocations: &'rt mut FxIndexSet<AllocId>,
alloc_id: AllocId,
mode: InternMode,
ty: Option<Ty<'tcx>>,
@@ -114,7 +114,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
if let InternMode::Static(mutability) = mode {
// For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
// no interior mutability.
- let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env));
+ let frozen = ty.map_or(true, |ty| ty.is_freeze(*ecx.tcx, ecx.param_env));
// For statics, allocation mutability is the combination of place mutability and
// type mutability.
// The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
@@ -134,7 +134,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
- leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
+ leftover_allocations.extend(alloc.provenance().iter().map(|&(_, alloc_id)| alloc_id));
let alloc = tcx.intern_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
None
@@ -191,10 +191,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
return Ok(true);
};
- // If there are no relocations in this allocation, it does not contain references
+ // If there is no provenance in this allocation, it does not contain references
// that point to another allocation, and we can avoid the interning walk.
if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
- if !alloc.has_relocations() {
+ if !alloc.has_provenance() {
return Ok(false);
}
} else {
@@ -233,8 +233,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
}
fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
- // Handle Reference types, as these are the only relocations supported by const eval.
- // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
+ // Handle Reference types, as these are the only types with provenance supported by const eval.
+ // Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
let tcx = self.ecx.tcx;
let ty = mplace.layout.ty;
if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
@@ -332,9 +332,7 @@ pub enum InternKind {
///
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
/// tracks where in the value we are and thus can show much better error messages.
-/// Any errors here would anyway be turned into `const_err` lints, whereas validation failures
-/// are hard errors.
-#[tracing::instrument(level = "debug", skip(ecx))]
+#[instrument(level = "debug", skip(ecx))]
pub fn intern_const_alloc_recursive<
'mir,
'tcx: 'mir,
@@ -357,7 +355,7 @@ pub fn intern_const_alloc_recursive<
// `leftover_allocations` collects *all* allocations we see, because some might not
// be available in a typed way. They get interned at the end.
let mut ref_tracking = RefTracking::empty();
- let leftover_allocations = &mut FxHashSet::default();
+ let leftover_allocations = &mut FxIndexSet::default();
// start with the outermost allocation
intern_shallow(
@@ -410,7 +408,7 @@ pub fn intern_const_alloc_recursive<
// references and a `leftover_allocations` set (where we only have a todo-list here).
// So we hand-roll the interning logic here again.
match intern_kind {
- // Statics may contain mutable allocations even behind relocations.
+ // Statics may point to mutable allocations.
// Even for immutable statics it would be ok to have mutable allocations behind
// raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
InternKind::Static(_) => {}
@@ -441,7 +439,7 @@ pub fn intern_const_alloc_recursive<
}
let alloc = tcx.intern_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
- for &(_, alloc_id) in alloc.inner().relocations().iter() {
+ for &(_, alloc_id) in alloc.inner().provenance().iter() {
if leftover_allocations.insert(alloc_id) {
todo.push(alloc_id);
}