summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_middle/src/mir/interpret
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_middle/src/mir/interpret')
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation.rs82
-rw-r--r--compiler/rustc_middle/src/mir/interpret/error.rs9
-rw-r--r--compiler/rustc_middle/src/mir/interpret/mod.rs17
-rw-r--r--compiler/rustc_middle/src/mir/interpret/pointer.rs22
-rw-r--r--compiler/rustc_middle/src/mir/interpret/queries.rs2
-rw-r--r--compiler/rustc_middle/src/mir/interpret/value.rs18
6 files changed, 110 insertions, 40 deletions
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs
index 221105ac4..48375ed30 100644
--- a/compiler/rustc_middle/src/mir/interpret/allocation.rs
+++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs
@@ -8,7 +8,8 @@ mod tests;
use std::borrow::Cow;
use std::fmt;
use std::hash;
-use std::ops::Range;
+use std::hash::Hash;
+use std::ops::{Deref, DerefMut, Range};
use std::ptr;
use either::{Left, Right};
@@ -29,6 +30,39 @@ use provenance_map::*;
pub use init_mask::{InitChunk, InitChunkIter};
+/// Functionality required for the bytes of an `Allocation`.
+pub trait AllocBytes:
+ Clone + fmt::Debug + Eq + PartialEq + Hash + Deref<Target = [u8]> + DerefMut<Target = [u8]>
+{
+ /// Adjust the bytes to the specified alignment -- by default, this is a no-op.
+ fn adjust_to_align(self, _align: Align) -> Self;
+
+ /// Create an `AllocBytes` from a slice of `u8`.
+ fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self;
+
+ /// Create a zeroed `AllocBytes` of the specified size and alignment;
+ /// call the callback error handler if there is an error in allocating the memory.
+ fn zeroed(size: Size, _align: Align) -> Option<Self>;
+}
+
+// Default `bytes` for `Allocation` is a `Box<[u8]>`.
+impl AllocBytes for Box<[u8]> {
+ fn adjust_to_align(self, _align: Align) -> Self {
+ self
+ }
+
+ fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self {
+ Box::<[u8]>::from(slice.into())
+ }
+
+ fn zeroed(size: Size, _align: Align) -> Option<Self> {
+ let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).ok()?;
+ // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]>
+ let bytes = unsafe { bytes.assume_init() };
+ Some(bytes)
+ }
+}
+
/// This type represents an Allocation in the Miri/CTFE core engine.
///
/// Its public API is rather low-level, working directly with allocation offsets and a custom error
@@ -38,10 +72,10 @@ pub use init_mask::{InitChunk, InitChunkIter};
// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
#[derive(Clone, Eq, PartialEq, TyEncodable, TyDecodable)]
#[derive(HashStable)]
-pub struct Allocation<Prov: Provenance = AllocId, Extra = ()> {
+pub struct Allocation<Prov: Provenance = AllocId, Extra = (), Bytes = Box<[u8]>> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer.
- bytes: Box<[u8]>,
+ bytes: Bytes,
/// Maps from byte addresses to extra provenance data for each pointer.
/// Only the first byte of a pointer is inserted into the map; i.e.,
/// every entry in this map applies to `pointer_size` consecutive bytes starting
@@ -220,14 +254,27 @@ impl AllocRange {
}
// The constructors are all without extra; the extra gets added by a machine hook later.
-impl<Prov: Provenance> Allocation<Prov> {
+impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
+ /// Creates an allocation from an existing `Bytes` value - this is needed for miri FFI support
+ pub fn from_raw_bytes(bytes: Bytes, align: Align, mutability: Mutability) -> Self {
+ let size = Size::from_bytes(bytes.len());
+ Self {
+ bytes,
+ provenance: ProvenanceMap::new(),
+ init_mask: InitMask::new(size, true),
+ align,
+ mutability,
+ extra: (),
+ }
+ }
+
/// Creates an allocation initialized by the given bytes
pub fn from_bytes<'a>(
slice: impl Into<Cow<'a, [u8]>>,
align: Align,
mutability: Mutability,
) -> Self {
- let bytes = Box::<[u8]>::from(slice.into());
+ let bytes = Bytes::from_bytes(slice, align);
let size = Size::from_bytes(bytes.len());
Self {
bytes,
@@ -248,7 +295,7 @@ impl<Prov: Provenance> Allocation<Prov> {
///
/// If `panic_on_fail` is true, this will never return `Err`.
pub fn uninit<'tcx>(size: Size, align: Align, panic_on_fail: bool) -> InterpResult<'tcx, Self> {
- let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).map_err(|_| {
+ let bytes = Bytes::zeroed(size, align).ok_or_else(|| {
// This results in an error that can happen non-deterministically, since the memory
// available to the compiler can change between runs. Normally queries are always
// deterministic. However, we can be non-deterministic here because all uses of const
@@ -262,8 +309,7 @@ impl<Prov: Provenance> Allocation<Prov> {
});
InterpError::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted)
})?;
- // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]>
- let bytes = unsafe { bytes.assume_init() };
+
Ok(Allocation {
bytes,
provenance: ProvenanceMap::new(),
@@ -275,7 +321,7 @@ impl<Prov: Provenance> Allocation<Prov> {
}
}
-impl Allocation {
+impl<Bytes: AllocBytes> Allocation<AllocId, (), Bytes> {
/// Adjust allocation from the ones in tcx to a custom Machine instance
/// with a different Provenance and Extra type.
pub fn adjust_from_tcx<Prov: Provenance, Extra, Err>(
@@ -283,9 +329,11 @@ impl Allocation {
cx: &impl HasDataLayout,
extra: Extra,
mut adjust_ptr: impl FnMut(Pointer<AllocId>) -> Result<Pointer<Prov>, Err>,
- ) -> Result<Allocation<Prov, Extra>, Err> {
- // Compute new pointer provenance, which also adjusts the bytes.
- let mut bytes = self.bytes;
+ ) -> Result<Allocation<Prov, Extra, Bytes>, Err> {
+ // Compute new pointer provenance, which also adjusts the bytes, and realign the pointer if
+ // necessary.
+ let mut bytes = self.bytes.adjust_to_align(self.align);
+
let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
let endian = cx.data_layout().endian;
@@ -311,7 +359,7 @@ impl Allocation {
}
/// Raw accessors. Provide access to otherwise private bytes.
-impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
+impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
pub fn len(&self) -> usize {
self.bytes.len()
}
@@ -340,7 +388,11 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
}
/// Byte accessors.
-impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
+impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
+ pub fn base_addr(&self) -> *const u8 {
+ self.bytes.as_ptr()
+ }
+
/// This is the entirely abstraction-violating way to just grab the raw bytes without
/// caring about provenance or initialization.
///
@@ -412,7 +464,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
}
/// Reading and writing.
-impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
+impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes> {
/// Sets the init bit for the given range.
fn mark_init(&mut self, range: AllocRange, is_init: bool) {
if range.size.bytes() == 0 {
diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs
index bd9cd53e1..c5137cf06 100644
--- a/compiler/rustc_middle/src/mir/interpret/error.rs
+++ b/compiler/rustc_middle/src/mir/interpret/error.rs
@@ -323,7 +323,7 @@ impl fmt::Display for UndefinedBehaviorInfo {
write!(
f,
"{msg}{pointer} is a dangling pointer (it has no provenance)",
- pointer = Pointer::<Option<AllocId>>::from_addr(*i),
+ pointer = Pointer::<Option<AllocId>>::from_addr_invalid(*i),
)
}
AlignmentCheckFailed { required, has } => write!(
@@ -430,8 +430,10 @@ pub enum ResourceExhaustionInfo {
///
/// The exact limit is set by the `const_eval_limit` attribute.
StepLimitReached,
- /// There is not enough memory to perform an allocation.
+ /// There is not enough memory (on the host) to perform an allocation.
MemoryExhausted,
+ /// The address space (of the target) is full.
+ AddressSpaceFull,
}
impl fmt::Display for ResourceExhaustionInfo {
@@ -447,6 +449,9 @@ impl fmt::Display for ResourceExhaustionInfo {
MemoryExhausted => {
write!(f, "tried to allocate more memory than available to compiler")
}
+ AddressSpaceFull => {
+ write!(f, "there are no more free addresses in the address space")
+ }
}
}
}
diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs
index 5f425a287..1766d7a66 100644
--- a/compiler/rustc_middle/src/mir/interpret/mod.rs
+++ b/compiler/rustc_middle/src/mir/interpret/mod.rs
@@ -110,7 +110,7 @@ use rustc_hir::def_id::DefId;
use rustc_macros::HashStable;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_serialize::{Decodable, Encodable};
-use rustc_target::abi::Endian;
+use rustc_target::abi::{AddressSpace, Endian, HasDataLayout};
use crate::mir;
use crate::ty::codec::{TyDecoder, TyEncoder};
@@ -127,8 +127,8 @@ pub use self::error::{
pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
pub use self::allocation::{
- alloc_range, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, InitChunk,
- InitChunkIter,
+ alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation,
+ InitChunk, InitChunkIter,
};
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
@@ -438,6 +438,17 @@ impl<'tcx> GlobalAlloc<'tcx> {
_ => bug!("expected vtable, got {:?}", self),
}
}
+
+ /// The address space that this `GlobalAlloc` should be placed in.
+ #[inline]
+ pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace {
+ match self {
+ GlobalAlloc::Function(..) => cx.data_layout().instruction_address_space,
+ GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => {
+ AddressSpace::DATA
+ }
+ }
+ }
}
pub(crate) struct AllocMap<'tcx> {
diff --git a/compiler/rustc_middle/src/mir/interpret/pointer.rs b/compiler/rustc_middle/src/mir/interpret/pointer.rs
index b08309910..60927eed8 100644
--- a/compiler/rustc_middle/src/mir/interpret/pointer.rs
+++ b/compiler/rustc_middle/src/mir/interpret/pointer.rs
@@ -19,29 +19,29 @@ pub trait PointerArithmetic: HasDataLayout {
#[inline(always)]
fn max_size_of_val(&self) -> Size {
- Size::from_bytes(self.machine_isize_max())
+ Size::from_bytes(self.target_isize_max())
}
#[inline]
- fn machine_usize_max(&self) -> u64 {
+ fn target_usize_max(&self) -> u64 {
self.pointer_size().unsigned_int_max().try_into().unwrap()
}
#[inline]
- fn machine_isize_min(&self) -> i64 {
+ fn target_isize_min(&self) -> i64 {
self.pointer_size().signed_int_min().try_into().unwrap()
}
#[inline]
- fn machine_isize_max(&self) -> i64 {
+ fn target_isize_max(&self) -> i64 {
self.pointer_size().signed_int_max().try_into().unwrap()
}
#[inline]
- fn machine_usize_to_isize(&self, val: u64) -> i64 {
+ fn target_usize_to_isize(&self, val: u64) -> i64 {
let val = val as i64;
// Now wrap-around into the machine_isize range.
- if val > self.machine_isize_max() {
+ if val > self.target_isize_max() {
// This can only happen if the ptr size is < 64, so we know max_usize_plus_1 fits into
// i64.
debug_assert!(self.pointer_size().bits() < 64);
@@ -76,11 +76,11 @@ pub trait PointerArithmetic: HasDataLayout {
let n = i.unsigned_abs();
if i >= 0 {
let (val, over) = self.overflowing_offset(val, n);
- (val, over || i > self.machine_isize_max())
+ (val, over || i > self.target_isize_max())
} else {
let res = val.overflowing_sub(n);
let (val, over) = self.truncate_to_ptr(res);
- (val, over || i < self.machine_isize_min())
+ (val, over || i < self.target_isize_min())
}
}
@@ -251,14 +251,16 @@ impl<Prov> Pointer<Option<Prov>> {
}
impl<Prov> Pointer<Option<Prov>> {
+ /// Creates a pointer to the given address, with invalid provenance (i.e., cannot be used for
+ /// any memory access).
#[inline(always)]
- pub fn from_addr(addr: u64) -> Self {
+ pub fn from_addr_invalid(addr: u64) -> Self {
Pointer { provenance: None, offset: Size::from_bytes(addr) }
}
#[inline(always)]
pub fn null() -> Self {
- Pointer::from_addr(0)
+ Pointer::from_addr_invalid(0)
}
}
diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs
index b6c6e9d55..856d821a5 100644
--- a/compiler/rustc_middle/src/mir/interpret/queries.rs
+++ b/compiler/rustc_middle/src/mir/interpret/queries.rs
@@ -2,7 +2,7 @@ use super::{ErrorHandled, EvalToConstValueResult, EvalToValTreeResult, GlobalId}
use crate::mir;
use crate::ty::subst::InternalSubsts;
-use crate::ty::visit::TypeVisitable;
+use crate::ty::visit::TypeVisitableExt;
use crate::ty::{self, query::TyCtxtAt, query::TyCtxtEnsure, TyCtxt};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
diff --git a/compiler/rustc_middle/src/mir/interpret/value.rs b/compiler/rustc_middle/src/mir/interpret/value.rs
index 88fb14eb3..36dbbe4bf 100644
--- a/compiler/rustc_middle/src/mir/interpret/value.rs
+++ b/compiler/rustc_middle/src/mir/interpret/value.rs
@@ -75,8 +75,8 @@ impl<'tcx> ConstValue<'tcx> {
self.try_to_scalar_int()?.try_into().ok()
}
- pub fn try_to_machine_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
- self.try_to_scalar_int()?.try_to_machine_usize(tcx).ok()
+ pub fn try_to_target_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
+ self.try_to_scalar_int()?.try_to_target_usize(tcx).ok()
}
pub fn try_to_bits_for_ty(
@@ -97,8 +97,8 @@ impl<'tcx> ConstValue<'tcx> {
ConstValue::Scalar(Scalar::from_u64(i))
}
- pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
- ConstValue::Scalar(Scalar::from_machine_usize(i, cx))
+ pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self {
+ ConstValue::Scalar(Scalar::from_target_usize(i, cx))
}
}
@@ -241,7 +241,7 @@ impl<Prov> Scalar<Prov> {
}
#[inline]
- pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
+ pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self {
Self::from_uint(i, cx.data_layout().pointer_size)
}
@@ -268,7 +268,7 @@ impl<Prov> Scalar<Prov> {
}
#[inline]
- pub fn from_machine_isize(i: i64, cx: &impl HasDataLayout) -> Self {
+ pub fn from_target_isize(i: i64, cx: &impl HasDataLayout) -> Self {
Self::from_int(i, cx.data_layout().pointer_size)
}
@@ -322,7 +322,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
Right(ptr) => Ok(ptr.into()),
Left(bits) => {
let addr = u64::try_from(bits).unwrap();
- Ok(Pointer::from_addr(addr))
+ Ok(Pointer::from_addr_invalid(addr))
}
}
}
@@ -429,7 +429,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
/// Converts the scalar to produce a machine-pointer-sized unsigned integer.
/// Fails if the scalar is a pointer.
- pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
+ pub fn to_target_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
let b = self.to_uint(cx.data_layout().pointer_size)?;
Ok(u64::try_from(b).unwrap())
}
@@ -469,7 +469,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
/// Converts the scalar to produce a machine-pointer-sized signed integer.
/// Fails if the scalar is a pointer.
- pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
+ pub fn to_target_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
let b = self.to_int(cx.data_layout().pointer_size)?;
Ok(i64::try_from(b).unwrap())
}