summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_const_eval/src/const_eval
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_const_eval/src/const_eval')
-rw-r--r--compiler/rustc_const_eval/src/const_eval/error.rs14
-rw-r--r--compiler/rustc_const_eval/src/const_eval/eval_queries.rs39
-rw-r--r--compiler/rustc_const_eval/src/const_eval/machine.rs188
-rw-r--r--compiler/rustc_const_eval/src/const_eval/mod.rs4
4 files changed, 165 insertions, 80 deletions
diff --git a/compiler/rustc_const_eval/src/const_eval/error.rs b/compiler/rustc_const_eval/src/const_eval/error.rs
index 4977a5d6b..c60d6e4fe 100644
--- a/compiler/rustc_const_eval/src/const_eval/error.rs
+++ b/compiler/rustc_const_eval/src/const_eval/error.rs
@@ -55,7 +55,7 @@ impl Error for ConstEvalErrKind {}
/// When const-evaluation errors, this type is constructed with the resulting information,
/// and then used to emit the error as a lint or hard error.
#[derive(Debug)]
-pub struct ConstEvalErr<'tcx> {
+pub(super) struct ConstEvalErr<'tcx> {
pub span: Span,
pub error: InterpError<'tcx>,
pub stacktrace: Vec<FrameInfo<'tcx>>,
@@ -82,8 +82,8 @@ impl<'tcx> ConstEvalErr<'tcx> {
ConstEvalErr { error: error.into_kind(), stacktrace, span }
}
- pub fn report_as_error(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
- self.struct_error(tcx, message, |_| {})
+ pub(super) fn report(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
+ self.report_decorated(tcx, message, |_| {})
}
/// Create a diagnostic for this const eval error.
@@ -95,7 +95,7 @@ impl<'tcx> ConstEvalErr<'tcx> {
/// If `lint_root.is_some()` report it as a lint, else report it as a hard error.
/// (Except that for some errors, we ignore all that -- see `must_error` below.)
#[instrument(skip(self, tcx, decorate), level = "debug")]
- pub fn struct_error(
+ pub(super) fn report_decorated(
&self,
tcx: TyCtxtAt<'tcx>,
message: &str,
@@ -123,14 +123,14 @@ impl<'tcx> ConstEvalErr<'tcx> {
// Helper closure to print duplicated lines.
let mut flush_last_line = |last_frame, times| {
if let Some((line, span)) = last_frame {
- err.span_label(span, &line);
+ err.span_note(span, &line);
// Don't print [... additional calls ...] if the number of lines is small
if times < 3 {
for _ in 0..times {
- err.span_label(span, &line);
+ err.span_note(span, &line);
}
} else {
- err.span_label(
+ err.span_note(
span,
format!("[... {} additional calls {} ...]", times, &line),
);
diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
index 1b1052fdf..c27790d88 100644
--- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
+++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
@@ -1,10 +1,7 @@
-use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr};
-use crate::interpret::eval_nullary_intrinsic;
-use crate::interpret::{
- intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
- Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
- RefTracking, StackPopCleanup,
-};
+use std::borrow::Cow;
+use std::convert::TryInto;
+
+use either::{Left, Right};
use rustc_hir::def::DefKind;
use rustc_middle::mir;
@@ -16,8 +13,14 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::source_map::Span;
use rustc_target::abi::{self, Abi};
-use std::borrow::Cow;
-use std::convert::TryInto;
+
+use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr};
+use crate::interpret::eval_nullary_intrinsic;
+use crate::interpret::{
+ intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
+ Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
+ RefTracking, StackPopCleanup,
+};
const NOTE_ON_UNDEFINED_BEHAVIOR_ERROR: &str = "The rules on what exactly is undefined behavior aren't clear, \
so this check might be overzealous. Please open an issue on the rustc \
@@ -46,7 +49,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.tcx.def_kind(cid.instance.def_id())
);
let layout = ecx.layout_of(body.bound_return_ty().subst(tcx, cid.instance.substs))?;
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
trace!(
@@ -63,7 +66,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
)?;
// The main interpreter loop.
- ecx.run()?;
+ while ecx.step()? {}
// Intern the result
let intern_kind = if cid.promoted.is_some() {
@@ -135,14 +138,14 @@ pub(super) fn op_to_const<'tcx>(
_ => false,
};
let immediate = if try_as_immediate {
- Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
+ Right(ecx.read_immediate(op).expect("normalization works on validated constants"))
} else {
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
// When we come back from raw const eval, we are always by-ref. The only way our op here is
// by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such.
- op.try_as_mplace()
+ op.as_mplace_or_imm()
};
debug!(?immediate);
@@ -168,9 +171,9 @@ pub(super) fn op_to_const<'tcx>(
}
};
match immediate {
- Ok(ref mplace) => to_const_value(mplace),
+ Left(ref mplace) => to_const_value(mplace),
// see comment on `let try_as_immediate` above
- Err(imm) => match *imm {
+ Right(imm) => match *imm {
_ if imm.layout.is_zst() => ConstValue::ZeroSized,
Immediate::Scalar(x) => ConstValue::Scalar(x),
Immediate::ScalarPair(a, b) => {
@@ -255,7 +258,7 @@ pub fn eval_to_const_value_raw_provider<'tcx>(
return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| {
let span = tcx.def_span(def_id);
let error = ConstEvalErr { error: error.into_kind(), stacktrace: vec![], span };
- error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic")
+ error.report(tcx.at(span), "could not evaluate nullary intrinsic")
});
}
@@ -333,7 +336,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
}
};
- Err(err.report_as_error(ecx.tcx.at(err.span), &msg))
+ Err(err.report(ecx.tcx.at(err.span), &msg))
}
Ok(mplace) => {
// Since evaluation had no errors, validate the resulting constant.
@@ -358,7 +361,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
if let Err(error) = validation {
// Validation failed, report an error. This is always a hard error.
let err = ConstEvalErr::new(&ecx, error, None);
- Err(err.struct_error(
+ Err(err.report_decorated(
ecx.tcx,
"it is undefined behavior to use this value",
|diag| {
diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs
index 35d58d2f6..3dfded2d9 100644
--- a/compiler/rustc_const_eval/src/const_eval/machine.rs
+++ b/compiler/rustc_const_eval/src/const_eval/machine.rs
@@ -1,8 +1,12 @@
use rustc_hir::def::DefKind;
+use rustc_hir::LangItem;
use rustc_middle::mir;
+use rustc_middle::mir::interpret::PointerArithmetic;
+use rustc_middle::ty::layout::FnAbiOf;
use rustc_middle::ty::{self, Ty, TyCtxt};
use std::borrow::Borrow;
use std::hash::Hash;
+use std::ops::ControlFlow;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::fx::IndexEntry;
@@ -17,58 +21,12 @@ use rustc_target::abi::{Align, Size};
use rustc_target::spec::abi::Abi as CallAbi;
use crate::interpret::{
- self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult,
- OpTy, PlaceTy, Pointer, Scalar, StackPopUnwind,
+ self, compile_time_machine, AllocId, ConstAllocation, FnVal, Frame, ImmTy, InterpCx,
+ InterpResult, OpTy, PlaceTy, Pointer, Scalar, StackPopUnwind,
};
use super::error::*;
-impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
- /// "Intercept" a function call to a panic-related function
- /// because we have something special to do for it.
- /// If this returns successfully (`Ok`), the function should just be evaluated normally.
- fn hook_special_const_fn(
- &mut self,
- instance: ty::Instance<'tcx>,
- args: &[OpTy<'tcx>],
- ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
- // All `#[rustc_do_not_const_check]` functions should be hooked here.
- let def_id = instance.def_id();
-
- if Some(def_id) == self.tcx.lang_items().panic_display()
- || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
- {
- // &str or &&str
- assert!(args.len() == 1);
-
- let mut msg_place = self.deref_operand(&args[0])?;
- while msg_place.layout.ty.is_ref() {
- msg_place = self.deref_operand(&msg_place.into())?;
- }
-
- let msg = Symbol::intern(self.read_str(&msg_place)?);
- let span = self.find_closest_untracked_caller_location();
- let (file, line, col) = self.location_triple_for_span(span);
- return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
- } else if Some(def_id) == self.tcx.lang_items().panic_fmt() {
- // For panic_fmt, call const_panic_fmt instead.
- if let Some(const_panic_fmt) = self.tcx.lang_items().const_panic_fmt() {
- return Ok(Some(
- ty::Instance::resolve(
- *self.tcx,
- ty::ParamEnv::reveal_all(),
- const_panic_fmt,
- self.tcx.intern_substs(&[]),
- )
- .unwrap()
- .unwrap(),
- ));
- }
- }
- Ok(None)
- }
-}
-
/// Extra machine state for CTFE, and the Machine instance
pub struct CompileTimeInterpreter<'mir, 'tcx> {
/// For now, the number of terminators that can be evaluated before we throw a resource
@@ -191,6 +149,125 @@ impl interpret::MayLeak for ! {
}
impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
+ /// "Intercept" a function call, because we have something special to do for it.
+ /// All `#[rustc_do_not_const_check]` functions should be hooked here.
+ /// If this returns `Some` function, which may be `instance` or a different function with
+ /// compatible arguments, then evaluation should continue with that function.
+ /// If this returns `None`, the function call has been handled and the function has returned.
+ fn hook_special_const_fn(
+ &mut self,
+ instance: ty::Instance<'tcx>,
+ args: &[OpTy<'tcx>],
+ dest: &PlaceTy<'tcx>,
+ ret: Option<mir::BasicBlock>,
+ ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
+ let def_id = instance.def_id();
+
+ if Some(def_id) == self.tcx.lang_items().panic_display()
+ || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
+ {
+ // &str or &&str
+ assert!(args.len() == 1);
+
+ let mut msg_place = self.deref_operand(&args[0])?;
+ while msg_place.layout.ty.is_ref() {
+ msg_place = self.deref_operand(&msg_place.into())?;
+ }
+
+ let msg = Symbol::intern(self.read_str(&msg_place)?);
+ let span = self.find_closest_untracked_caller_location();
+ let (file, line, col) = self.location_triple_for_span(span);
+ return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
+ } else if Some(def_id) == self.tcx.lang_items().panic_fmt() {
+ // For panic_fmt, call const_panic_fmt instead.
+ let const_def_id = self.tcx.require_lang_item(LangItem::ConstPanicFmt, None);
+ let new_instance = ty::Instance::resolve(
+ *self.tcx,
+ ty::ParamEnv::reveal_all(),
+ const_def_id,
+ instance.substs,
+ )
+ .unwrap()
+ .unwrap();
+
+ return Ok(Some(new_instance));
+ } else if Some(def_id) == self.tcx.lang_items().align_offset_fn() {
+ // For align_offset, we replace the function call if the pointer has no address.
+ match self.align_offset(instance, args, dest, ret)? {
+ ControlFlow::Continue(()) => return Ok(Some(instance)),
+ ControlFlow::Break(()) => return Ok(None),
+ }
+ }
+ Ok(Some(instance))
+ }
+
+ /// `align_offset(ptr, target_align)` needs special handling in const eval, because the pointer
+ /// may not have an address.
+ ///
+ /// If `ptr` does have a known address, then we return `CONTINUE` and the function call should
+ /// proceed as normal.
+ ///
+ /// If `ptr` doesn't have an address, but its underlying allocation's alignment is at most
+ /// `target_align`, then we call the function again with an dummy address relative to the
+ /// allocation.
+ ///
+ /// If `ptr` doesn't have an address and `target_align` is stricter than the underlying
+ /// allocation's alignment, then we return `usize::MAX` immediately.
+ fn align_offset(
+ &mut self,
+ instance: ty::Instance<'tcx>,
+ args: &[OpTy<'tcx>],
+ dest: &PlaceTy<'tcx>,
+ ret: Option<mir::BasicBlock>,
+ ) -> InterpResult<'tcx, ControlFlow<()>> {
+ assert_eq!(args.len(), 2);
+
+ let ptr = self.read_pointer(&args[0])?;
+ let target_align = self.read_scalar(&args[1])?.to_machine_usize(self)?;
+
+ if !target_align.is_power_of_two() {
+ throw_ub_format!("`align_offset` called with non-power-of-two align: {}", target_align);
+ }
+
+ match self.ptr_try_get_alloc_id(ptr) {
+ Ok((alloc_id, offset, _extra)) => {
+ let (_size, alloc_align, _kind) = self.get_alloc_info(alloc_id);
+
+ if target_align <= alloc_align.bytes() {
+ // Extract the address relative to the allocation base that is definitely
+ // sufficiently aligned and call `align_offset` again.
+ let addr = ImmTy::from_uint(offset.bytes(), args[0].layout).into();
+ let align = ImmTy::from_uint(target_align, args[1].layout).into();
+ let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
+
+ // We replace the entire function call with a "tail call".
+ // Note that this happens before the frame of the original function
+ // is pushed on the stack.
+ self.eval_fn_call(
+ FnVal::Instance(instance),
+ (CallAbi::Rust, fn_abi),
+ &[addr, align],
+ /* with_caller_location = */ false,
+ dest,
+ ret,
+ StackPopUnwind::NotAllowed,
+ )?;
+ Ok(ControlFlow::BREAK)
+ } else {
+ // Not alignable in const, return `usize::MAX`.
+ let usize_max = Scalar::from_machine_usize(self.machine_usize_max(), self);
+ self.write_scalar(usize_max, dest)?;
+ self.return_to_block(ret)?;
+ Ok(ControlFlow::BREAK)
+ }
+ }
+ Err(_addr) => {
+ // The pointer has an address, continue with function call.
+ Ok(ControlFlow::CONTINUE)
+ }
+ }
+ }
+
/// See documentation on the `ptr_guaranteed_cmp` intrinsic.
fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
Ok(match (a, b) {
@@ -271,8 +348,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
instance: ty::Instance<'tcx>,
_abi: CallAbi,
args: &[OpTy<'tcx>],
- _dest: &PlaceTy<'tcx>,
- _ret: Option<mir::BasicBlock>,
+ dest: &PlaceTy<'tcx>,
+ ret: Option<mir::BasicBlock>,
_unwind: StackPopUnwind, // unwinding is not supported in consts
) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
debug!("find_mir_or_eval_fn: {:?}", instance);
@@ -291,7 +368,11 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
}
}
- if let Some(new_instance) = ecx.hook_special_const_fn(instance, args)? {
+ let Some(new_instance) = ecx.hook_special_const_fn(instance, args, dest, ret)? else {
+ return Ok(None);
+ };
+
+ if new_instance != instance {
// We call another const fn instead.
// However, we return the *original* instance to make backtraces work out
// (and we hope this does not confuse the FnAbi checks too much).
@@ -300,13 +381,14 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
new_instance,
_abi,
args,
- _dest,
- _ret,
+ dest,
+ ret,
_unwind,
)?
.map(|(body, _instance)| (body, instance)));
}
}
+
// This is a const fn. Call it.
Ok(Some((ecx.load_mir(instance.def, None)?, instance)))
}
diff --git a/compiler/rustc_const_eval/src/const_eval/mod.rs b/compiler/rustc_const_eval/src/const_eval/mod.rs
index 1c33e7845..01b2b4b5d 100644
--- a/compiler/rustc_const_eval/src/const_eval/mod.rs
+++ b/compiler/rustc_const_eval/src/const_eval/mod.rs
@@ -103,7 +103,7 @@ pub(crate) fn try_destructure_mir_constant<'tcx>(
) -> InterpResult<'tcx, mir::DestructuredConstant<'tcx>> {
trace!("destructure_mir_constant: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
- let op = ecx.const_to_op(&val, None)?;
+ let op = ecx.eval_mir_constant(&val, None, None)?;
// We go to `usize` as we cannot allocate anything bigger anyway.
let (field_count, variant, down) = match val.ty().kind() {
@@ -139,7 +139,7 @@ pub(crate) fn deref_mir_constant<'tcx>(
val: mir::ConstantKind<'tcx>,
) -> mir::ConstantKind<'tcx> {
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
- let op = ecx.const_to_op(&val, None).unwrap();
+ let op = ecx.eval_mir_constant(&val, None, None).unwrap();
let mplace = ecx.deref_operand(&op).unwrap();
if let Some(alloc_id) = mplace.ptr.provenance {
assert_eq!(