summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_codegen_ssa/src/mir/operand.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_codegen_ssa/src/mir/operand.rs')
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/operand.rs122
1 files changed, 97 insertions, 25 deletions
diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs
index 2301c3ef1..31c293d7c 100644
--- a/compiler/rustc_codegen_ssa/src/mir/operand.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs
@@ -8,10 +8,10 @@ use crate::traits::*;
use crate::MemFlags;
use rustc_middle::mir;
-use rustc_middle::mir::interpret::{ConstValue, Pointer, Scalar};
+use rustc_middle::mir::interpret::{alloc_range, ConstValue, Pointer, Scalar};
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::Ty;
-use rustc_target::abi::{Abi, Align, Size};
+use rustc_target::abi::{self, Abi, Align, Size};
use std::fmt;
@@ -45,6 +45,14 @@ pub enum OperandValue<V> {
/// as returned by [`LayoutTypeMethods::scalar_pair_element_backend_type`]
/// with `immediate: true`.
Pair(V, V),
+ /// A value taking no bytes, and which therefore needs no LLVM value at all.
+ ///
+ /// If you ever need a `V` to pass to something, get a fresh poison value
+ /// from [`ConstMethods::const_poison`].
+ ///
+ /// An `OperandValue` *must* be this variant for any type for which
+ /// `is_zst` on its `Layout` returns `true`.
+ ZeroSized,
}
/// An `OperandRef` is an "SSA" reference to a Rust value, along with
@@ -71,15 +79,9 @@ impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
}
impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
- pub fn new_zst<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
- bx: &mut Bx,
- layout: TyAndLayout<'tcx>,
- ) -> OperandRef<'tcx, V> {
+ pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
assert!(layout.is_zst());
- OperandRef {
- val: OperandValue::Immediate(bx.const_poison(bx.immediate_backend_type(layout))),
- layout,
- }
+ OperandRef { val: OperandValue::ZeroSized, layout }
}
pub fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
@@ -97,7 +99,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
OperandValue::Immediate(llval)
}
- ConstValue::ZeroSized => return OperandRef::new_zst(bx, layout),
+ ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
ConstValue::Slice { data, start, end } => {
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
@@ -115,13 +117,82 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
OperandValue::Pair(a_llval, b_llval)
}
ConstValue::ByRef { alloc, offset } => {
- return bx.load_operand(bx.from_const_alloc(layout, alloc, offset));
+ return Self::from_const_alloc(bx, layout, alloc, offset);
}
};
OperandRef { val, layout }
}
+ fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
+ bx: &mut Bx,
+ layout: TyAndLayout<'tcx>,
+ alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
+ offset: Size,
+ ) -> Self {
+ let alloc_align = alloc.inner().align;
+ assert_eq!(alloc_align, layout.align.abi);
+ let ty = bx.type_ptr_to(bx.cx().backend_type(layout));
+
+ let read_scalar = |start, size, s: abi::Scalar, ty| {
+ let val = alloc
+ .0
+ .read_scalar(
+ bx,
+ alloc_range(start, size),
+ /*read_provenance*/ matches!(s.primitive(), abi::Pointer(_)),
+ )
+ .unwrap();
+ bx.scalar_to_backend(val, s, ty)
+ };
+
+ // It may seem like all types with `Scalar` or `ScalarPair` ABI are fair game at this point.
+ // However, `MaybeUninit<u64>` is considered a `Scalar` as far as its layout is concerned --
+ // and yet cannot be represented by an interpreter `Scalar`, since we have to handle the
+ // case where some of the bytes are initialized and others are not. So, we need an extra
+ // check that walks over the type of `mplace` to make sure it is truly correct to treat this
+ // like a `Scalar` (or `ScalarPair`).
+ match layout.abi {
+ Abi::Scalar(s @ abi::Scalar::Initialized { .. }) => {
+ let size = s.size(bx);
+ assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
+ let val = read_scalar(Size::ZERO, size, s, ty);
+ OperandRef { val: OperandValue::Immediate(val), layout }
+ }
+ Abi::ScalarPair(
+ a @ abi::Scalar::Initialized { .. },
+ b @ abi::Scalar::Initialized { .. },
+ ) => {
+ let (a_size, b_size) = (a.size(bx), b.size(bx));
+ let b_offset = a_size.align_to(b.align(bx).abi);
+ assert!(b_offset.bytes() > 0);
+ let a_val = read_scalar(
+ Size::ZERO,
+ a_size,
+ a,
+ bx.scalar_pair_element_backend_type(layout, 0, true),
+ );
+ let b_val = read_scalar(
+ b_offset,
+ b_size,
+ b,
+ bx.scalar_pair_element_backend_type(layout, 1, true),
+ );
+ OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
+ }
+ _ if layout.is_zst() => OperandRef::zero_sized(layout),
+ _ => {
+ // Neither a scalar nor scalar pair. Load from a place
+ let init = bx.const_data_from_alloc(alloc);
+ let base_addr = bx.static_addr_of(init, alloc_align, None);
+
+ let llval = bx.const_ptr_byte_offset(base_addr, offset);
+ let llval = bx.const_bitcast(llval, ty);
+ bx.load_operand(PlaceRef::new_sized(llval, layout))
+ }
+ }
+ }
+
/// Asserts that this operand refers to a scalar and returns
/// a reference to its value.
pub fn immediate(self) -> V {
@@ -147,6 +218,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
+ OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self),
};
let layout = cx.layout_of(projected_ty);
PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi }
@@ -204,9 +276,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let mut val = match (self.val, self.layout.abi) {
// If the field is ZST, it has no data.
- _ if field.is_zst() => {
- return OperandRef::new_zst(bx, field);
- }
+ _ if field.is_zst() => OperandValue::ZeroSized,
// Newtype of a scalar, scalar pair or vector.
(OperandValue::Immediate(_) | OperandValue::Pair(..), _)
@@ -237,6 +307,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
};
match (&mut val, field.abi) {
+ (OperandValue::ZeroSized, _) => {}
(
OperandValue::Immediate(llval),
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. },
@@ -290,8 +361,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
/// Returns an `OperandValue` that's generally UB to use in any way.
///
- /// Depending on the `layout`, returns an `Immediate` or `Pair` containing
- /// poison value(s), or a `Ref` containing a poison pointer.
+ /// Depending on the `layout`, returns `ZeroSized` for ZSTs, an `Immediate` or
+ /// `Pair` containing poison value(s), or a `Ref` containing a poison pointer.
///
/// Supports sized types only.
pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
@@ -299,7 +370,9 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
layout: TyAndLayout<'tcx>,
) -> OperandValue<V> {
assert!(layout.is_sized());
- if bx.cx().is_backend_immediate(layout) {
+ if layout.is_zst() {
+ OperandValue::ZeroSized
+ } else if bx.cx().is_backend_immediate(layout) {
let ibty = bx.cx().immediate_backend_type(layout);
OperandValue::Immediate(bx.const_poison(ibty))
} else if bx.cx().is_backend_scalar_pair(layout) {
@@ -352,12 +425,11 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
flags: MemFlags,
) {
debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
- // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
- // value is through `undef`, and store itself is useless.
- if dest.layout.is_zst() {
- return;
- }
match self {
+ OperandValue::ZeroSized => {
+ // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
+ // value is through `undef`/`poison`, and the store itself is useless.
+ }
OperandValue::Ref(r, None, source_align) => {
if flags.contains(MemFlags::NONTEMPORAL) {
// HACK(nox): This is inefficient but there is no nontemporal memcpy.
@@ -458,7 +530,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// checks in `codegen_consume` and `extract_field`.
let elem = o.layout.field(bx.cx(), 0);
if elem.is_zst() {
- o = OperandRef::new_zst(bx, elem);
+ o = OperandRef::zero_sized(elem);
} else {
return None;
}
@@ -492,7 +564,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// ZSTs don't require any actual memory access.
if layout.is_zst() {
- return OperandRef::new_zst(bx, layout);
+ return OperandRef::zero_sized(layout);
}
if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {