summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_const_eval/src/interpret
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:42 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-18 02:49:42 +0000
commit837b550238aa671a591ccf282dddeab29cadb206 (patch)
tree914b6b8862bace72bd3245ca184d374b08d8a672 /compiler/rustc_const_eval/src/interpret
parentAdding debian version 1.70.0+dfsg2-1. (diff)
downloadrustc-837b550238aa671a591ccf282dddeab29cadb206.tar.xz
rustc-837b550238aa671a591ccf282dddeab29cadb206.zip
Merging upstream version 1.71.1+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_const_eval/src/interpret')
-rw-r--r--compiler/rustc_const_eval/src/interpret/discriminant.rs17
-rw-r--r--compiler/rustc_const_eval/src/interpret/eval_context.rs51
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/intrinsics.rs10
-rw-r--r--compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs6
-rw-r--r--compiler/rustc_const_eval/src/interpret/machine.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs27
-rw-r--r--compiler/rustc_const_eval/src/interpret/operand.rs8
-rw-r--r--compiler/rustc_const_eval/src/interpret/operator.rs26
-rw-r--r--compiler/rustc_const_eval/src/interpret/place.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/step.rs19
-rw-r--r--compiler/rustc_const_eval/src/interpret/terminator.rs5
-rw-r--r--compiler/rustc_const_eval/src/interpret/util.rs8
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs22
14 files changed, 123 insertions, 82 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/discriminant.rs b/compiler/rustc_const_eval/src/interpret/discriminant.rs
index 557e72124..015a9beab 100644
--- a/compiler/rustc_const_eval/src/interpret/discriminant.rs
+++ b/compiler/rustc_const_eval/src/interpret/discriminant.rs
@@ -211,18 +211,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let variant_index_relative = u32::try_from(variant_index_relative)
.expect("we checked that this fits into a u32");
// Then computing the absolute variant idx should not overflow any more.
- let variant_index = variants_start
- .checked_add(variant_index_relative)
- .expect("overflow computing absolute variant idx");
- let variants_len = op
+ let variant_index = VariantIdx::from_u32(
+ variants_start
+ .checked_add(variant_index_relative)
+ .expect("overflow computing absolute variant idx"),
+ );
+ let variants = op
.layout
.ty
.ty_adt_def()
.expect("tagged layout for non adt")
- .variants()
- .len();
- assert!(usize::try_from(variant_index).unwrap() < variants_len);
- VariantIdx::from_u32(variant_index)
+ .variants();
+ assert!(variant_index < variants.next_index());
+ variant_index
} else {
untagged_variant
}
diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs
index 3e58a58ae..7e9457800 100644
--- a/compiler/rustc_const_eval/src/interpret/eval_context.rs
+++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs
@@ -5,16 +5,15 @@ use std::mem;
use either::{Either, Left, Right};
use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
-use rustc_index::vec::IndexVec;
+use rustc_index::IndexVec;
use rustc_middle::mir;
-use rustc_middle::mir::interpret::{ErrorHandled, InterpError};
+use rustc_middle::mir::interpret::{ErrorHandled, InterpError, ReportedErrorInfo};
+use rustc_middle::query::TyCtxtAt;
use rustc_middle::ty::layout::{
self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
TyAndLayout,
};
-use rustc_middle::ty::{
- self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable,
-};
+use rustc_middle::ty::{self, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable};
use rustc_mir_dataflow::storage::always_storage_live_locals;
use rustc_session::Limit;
use rustc_span::Span;
@@ -132,11 +131,10 @@ pub struct Frame<'mir, 'tcx, Prov: Provenance = AllocId, Extra = ()> {
}
/// What we store about a frame in an interpreter backtrace.
-#[derive(Debug)]
+#[derive(Clone, Debug)]
pub struct FrameInfo<'tcx> {
pub instance: ty::Instance<'tcx>,
pub span: Span,
- pub lint_root: Option<hir::HirId>,
}
#[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these
@@ -462,16 +460,16 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
instance: ty::InstanceDef<'tcx>,
promoted: Option<mir::Promoted>,
) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
- let def = instance.with_opt_param();
trace!("load mir(instance={:?}, promoted={:?})", instance, promoted);
let body = if let Some(promoted) = promoted {
- &self.tcx.promoted_mir_opt_const_arg(def)[promoted]
+ let def = instance.def_id();
+ &self.tcx.promoted_mir(def)[promoted]
} else {
M::load_mir(self, instance)?
};
// do not continue if typeck errors occurred (can only occur in local crate)
if let Some(err) = body.tainted_by_errors {
- throw_inval!(AlreadyReported(err));
+ throw_inval!(AlreadyReported(ReportedErrorInfo::tainted_by_errors(err)));
}
Ok(body)
}
@@ -496,25 +494,29 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> Result<T, InterpError<'tcx>> {
frame
.instance
- .try_subst_mir_and_normalize_erasing_regions(*self.tcx, self.param_env, value)
+ .try_subst_mir_and_normalize_erasing_regions(
+ *self.tcx,
+ self.param_env,
+ ty::EarlyBinder(value),
+ )
.map_err(|_| err_inval!(TooGeneric))
}
/// The `substs` are assumed to already be in our interpreter "universe" (param_env).
pub(super) fn resolve(
&self,
- def: ty::WithOptConstParam<DefId>,
+ def: DefId,
substs: SubstsRef<'tcx>,
) -> InterpResult<'tcx, ty::Instance<'tcx>> {
trace!("resolve: {:?}, {:#?}", def, substs);
trace!("param_env: {:#?}", self.param_env);
trace!("substs: {:#?}", substs);
- match ty::Instance::resolve_opt_const_arg(*self.tcx, self.param_env, def, substs) {
+ match ty::Instance::resolve(*self.tcx, self.param_env, def, substs) {
Ok(Some(instance)) => Ok(instance),
Ok(None) => throw_inval!(TooGeneric),
// FIXME(eddyb) this could be a bit more specific than `AlreadyReported`.
- Err(error_reported) => throw_inval!(AlreadyReported(error_reported)),
+ Err(error_reported) => throw_inval!(AlreadyReported(error_reported.into())),
}
}
@@ -902,7 +904,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
query(self.tcx.at(span.unwrap_or_else(|| self.cur_span()))).map_err(|err| {
match err {
ErrorHandled::Reported(err) => {
- if let Some(span) = span {
+ if !err.is_tainted_by_errors() && let Some(span) = span {
// To make it easier to figure out where this error comes from, also add a note at the current location.
self.tcx.sess.span_note_without_error(span, "erroneous constant used");
}
@@ -947,10 +949,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// This deliberately does *not* honor `requires_caller_location` since it is used for much
// more than just panics.
for frame in stack.iter().rev() {
- let lint_root = frame.lint_root();
- let span = frame.current_span();
-
- frames.push(FrameInfo { span, instance: frame.instance, lint_root });
+ let span = match frame.loc {
+ Left(loc) => {
+ // If the stacktrace passes through MIR-inlined source scopes, add them.
+ let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc);
+ let mut scope_data = &frame.body.source_scopes[scope];
+ while let Some((instance, call_span)) = scope_data.inlined {
+ frames.push(FrameInfo { span, instance });
+ span = call_span;
+ scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()];
+ }
+ span
+ }
+ Right(span) => span,
+ };
+ frames.push(FrameInfo { span, instance: frame.instance });
}
trace!("generate stacktrace: {:#?}", frames);
frames
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index b220d21f6..c2b82ba9b 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -387,7 +387,7 @@ pub fn intern_const_alloc_recursive<
Err(error) => {
ecx.tcx.sess.delay_span_bug(
ecx.tcx.span,
- &format!(
+ format!(
"error during interning should later cause validation failure: {}",
error
),
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
index 26fb041b4..a77c699c2 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs
+++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
@@ -75,7 +75,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
}
sym::type_id => {
ensure_monomorphic_enough(tcx, tp_ty)?;
- ConstValue::from_u64(tcx.type_id_hash(tp_ty))
+ ConstValue::from_u64(tcx.type_id_hash(tp_ty).as_u64())
}
sym::variant_count => match tp_ty.kind() {
// Correctly handles non-monomorphic calls, so there is no need for ensure_monomorphic_enough.
@@ -286,14 +286,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
sym::write_bytes => {
self.write_bytes_intrinsic(&args[0], &args[1], &args[2])?;
}
- sym::offset => {
- let ptr = self.read_pointer(&args[0])?;
- let offset_count = self.read_target_isize(&args[1])?;
- let pointee_ty = substs.type_at(0);
-
- let offset_ptr = self.ptr_offset_inbounds(ptr, pointee_ty, offset_count)?;
- self.write_pointer(offset_ptr, dest)?;
- }
sym::arith_offset => {
let ptr = self.read_pointer(&args[0])?;
let offset_count = self.read_target_isize(&args[1])?;
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs b/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs
index 3701eb93e..df5b58100 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs
+++ b/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs
@@ -111,11 +111,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
location
}
- pub(crate) fn location_triple_for_span(&self, mut span: Span) -> (Symbol, u32, u32) {
- // Remove `Inlined` marks as they pollute `expansion_cause`.
- while span.is_inlined() {
- span.remove_mark();
- }
+ pub(crate) fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) {
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo());
(
diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs
index 0291cca73..b448e3a24 100644
--- a/compiler/rustc_const_eval/src/interpret/machine.rs
+++ b/compiler/rustc_const_eval/src/interpret/machine.rs
@@ -104,7 +104,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
type FrameExtra;
/// Extra data stored in every allocation.
- type AllocExtra: Debug + Clone + 'static;
+ type AllocExtra: Debug + Clone + 'tcx;
/// Type for the bytes of the allocation.
type Bytes: AllocBytes + 'static;
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index a3764a7d1..d5b6a581a 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -215,7 +215,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.allocate_raw_ptr(alloc, kind)
}
- /// This can fail only of `alloc` contains provenance.
+ /// This can fail only if `alloc` contains provenance.
pub fn allocate_raw_ptr(
&mut self,
alloc: Allocation,
@@ -807,9 +807,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
DumpAllocs { ecx: self, allocs }
}
- /// Print leaked memory. Allocations reachable from `static_roots` or a `Global` allocation
- /// are not considered leaked. Leaks whose kind `may_leak()` returns true are not reported.
- pub fn leak_report(&self, static_roots: &[AllocId]) -> usize {
+ /// Find leaked allocations. Allocations reachable from `static_roots` or a `Global` allocation
+ /// are not considered leaked, as well as leaks whose kind's `may_leak()` returns true.
+ pub fn find_leaked_allocations(
+ &self,
+ static_roots: &[AllocId],
+ ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
+ {
// Collect the set of allocations that are *reachable* from `Global` allocations.
let reachable = {
let mut reachable = FxHashSet::default();
@@ -833,14 +837,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
// All allocations that are *not* `reachable` and *not* `may_leak` are considered leaking.
- let leaks: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
- if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
- });
- let n = leaks.len();
- if n > 0 {
- eprintln!("The following memory was leaked: {:?}", self.dump_allocs(leaks));
- }
- n
+ self.memory.alloc_map.filter_map_collect(|id, (kind, alloc)| {
+ if kind.may_leak() || reachable.contains(id) {
+ None
+ } else {
+ Some((*id, *kind, alloc.clone()))
+ }
+ })
}
}
diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs
index 5310ef0bb..e30af1655 100644
--- a/compiler/rustc_const_eval/src/interpret/operand.rs
+++ b/compiler/rustc_const_eval/src/interpret/operand.rs
@@ -245,6 +245,12 @@ impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
pub fn len(&self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
if self.layout.is_unsized() {
+ if matches!(self.op, Operand::Immediate(Immediate::Uninit)) {
+ // Uninit unsized places shouldn't occur. In the interpreter we have them
+ // temporarily for unsized arguments before their value is put in; in ConstProp they
+ // remain uninit and this code can actually be reached.
+ throw_inval!(UninitUnsizedLocal);
+ }
// There are no unsized immediates.
self.assert_mem_place().len(cx)
} else {
@@ -589,7 +595,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// FIXME(generic_const_exprs): `ConstKind::Expr` should be able to be evaluated
ty::ConstKind::Expr(_) => throw_inval!(TooGeneric),
ty::ConstKind::Error(reported) => {
- throw_inval!(AlreadyReported(reported))
+ throw_inval!(AlreadyReported(reported.into()))
}
ty::ConstKind::Unevaluated(uv) => {
let instance = self.resolve(uv.def, uv.substs)?;
diff --git a/compiler/rustc_const_eval/src/interpret/operator.rs b/compiler/rustc_const_eval/src/interpret/operator.rs
index 4decfe863..7186148da 100644
--- a/compiler/rustc_const_eval/src/interpret/operator.rs
+++ b/compiler/rustc_const_eval/src/interpret/operator.rs
@@ -299,6 +299,30 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok((val, false, ty))
}
+ fn binary_ptr_op(
+ &self,
+ bin_op: mir::BinOp,
+ left: &ImmTy<'tcx, M::Provenance>,
+ right: &ImmTy<'tcx, M::Provenance>,
+ ) -> InterpResult<'tcx, (Scalar<M::Provenance>, bool, Ty<'tcx>)> {
+ use rustc_middle::mir::BinOp::*;
+
+ match bin_op {
+ // Pointer ops that are always supported.
+ Offset => {
+ let ptr = left.to_scalar().to_pointer(self)?;
+ let offset_count = right.to_scalar().to_target_isize(self)?;
+ let pointee_ty = left.layout.ty.builtin_deref(true).unwrap().ty;
+
+ let offset_ptr = self.ptr_offset_inbounds(ptr, pointee_ty, offset_count)?;
+ Ok((Scalar::from_maybe_pointer(offset_ptr, self), false, left.layout.ty))
+ }
+
+ // Fall back to machine hook so Miri can support more pointer ops.
+ _ => M::binary_ptr_op(self, bin_op, left, right),
+ }
+ }
+
/// Returns the result of the specified operation, whether it overflowed, and
/// the result type.
pub fn overflowing_binary_op(
@@ -368,7 +392,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
right.layout.ty
);
- M::binary_ptr_op(self, bin_op, left, right)
+ self.binary_ptr_op(bin_op, left, right)
}
_ => span_bug!(
self.cur_span(),
diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs
index 03b09cf83..2a31a59ad 100644
--- a/compiler/rustc_const_eval/src/interpret/place.rs
+++ b/compiler/rustc_const_eval/src/interpret/place.rs
@@ -5,7 +5,7 @@
use either::{Either, Left, Right};
use rustc_ast::Mutability;
-use rustc_index::vec::IndexSlice;
+use rustc_index::IndexSlice;
use rustc_middle::mir;
use rustc_middle::ty;
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs
index 9a366364e..1e60a1e72 100644
--- a/compiler/rustc_const_eval/src/interpret/step.rs
+++ b/compiler/rustc_const_eval/src/interpret/step.rs
@@ -113,8 +113,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Intrinsic(box intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
- // Statements we do not track.
- PlaceMention(..) | AscribeUserType(..) => {}
+ // Evaluate the place expression, without reading from it.
+ PlaceMention(box place) => {
+ let _ = self.eval_place(*place)?;
+ }
+
+ // This exists purely to guide borrowck lifetime inference, and does not have
+ // an operational effect.
+ AscribeUserType(..) => {}
// Currently, Miri discards Coverage statements. Coverage statements are only injected
// via an optional compile time MIR pass and have no side effects. Since Coverage
@@ -280,20 +286,23 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_immediate(*val, &dest)?;
}
- NullaryOp(null_op, ty) => {
+ NullaryOp(ref null_op, ty) => {
let ty = self.subst_from_current_frame_and_normalize_erasing_regions(ty)?;
let layout = self.layout_of(ty)?;
- if layout.is_unsized() {
+ if let mir::NullOp::SizeOf | mir::NullOp::AlignOf = null_op && layout.is_unsized() {
// FIXME: This should be a span_bug (#80742)
self.tcx.sess.delay_span_bug(
self.frame().current_span(),
- &format!("Nullary MIR operator called for unsized type {}", ty),
+ format!("{null_op:?} MIR operator called for unsized type {ty}"),
);
throw_inval!(SizeOfUnsizedType(ty));
}
let val = match null_op {
mir::NullOp::SizeOf => layout.size.bytes(),
mir::NullOp::AlignOf => layout.align.abi.bytes(),
+ mir::NullOp::OffsetOf(fields) => {
+ layout.offset_of_subfield(self, fields.iter().map(|f| f.index())).bytes()
+ }
};
self.write_scalar(Scalar::from_target_usize(val, self), &dest)?;
}
diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs
index a07702f7d..586e8f063 100644
--- a/compiler/rustc_const_eval/src/interpret/terminator.rs
+++ b/compiler/rustc_const_eval/src/interpret/terminator.rs
@@ -83,8 +83,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
(fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
}
ty::FnDef(def_id, substs) => {
- let instance =
- self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?;
+ let instance = self.resolve(def_id, substs)?;
(
FnVal::Instance(instance),
self.fn_abi_of_instance(instance, extra_args)?,
@@ -115,7 +114,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
}
- Drop { place, target, unwind } => {
+ Drop { place, target, unwind, replace: _ } => {
let frame = self.frame();
let ty = place.ty(&frame.body.local_decls, *self.tcx).ty;
let ty = self.subst_from_frame_and_normalize_erasing_regions(frame, ty)?;
diff --git a/compiler/rustc_const_eval/src/interpret/util.rs b/compiler/rustc_const_eval/src/interpret/util.rs
index bf2b4ee69..22bdd4d2c 100644
--- a/compiler/rustc_const_eval/src/interpret/util.rs
+++ b/compiler/rustc_const_eval/src/interpret/util.rs
@@ -14,7 +14,7 @@ where
T: TypeVisitable<TyCtxt<'tcx>>,
{
debug!("ensure_monomorphic_enough: ty={:?}", ty);
- if !ty.needs_subst() {
+ if !ty.has_param() {
return Ok(());
}
@@ -27,7 +27,7 @@ where
type BreakTy = FoundParam;
fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
- if !ty.needs_subst() {
+ if !ty.has_param() {
return ControlFlow::Continue(());
}
@@ -36,7 +36,7 @@ where
ty::Closure(def_id, substs)
| ty::Generator(def_id, substs, ..)
| ty::FnDef(def_id, substs) => {
- let instance = ty::InstanceDef::Item(ty::WithOptConstParam::unknown(def_id));
+ let instance = ty::InstanceDef::Item(def_id);
let unused_params = self.tcx.unused_generic_params(instance);
for (index, subst) in substs.into_iter().enumerate() {
let index = index
@@ -46,7 +46,7 @@ where
// are used and require substitution.
// Just in case there are closures or generators within this subst,
// recurse.
- if unused_params.is_used(index) && subst.needs_subst() {
+ if unused_params.is_used(index) && subst.has_param() {
return subst.visit_with(self);
}
}
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index 93b5273e1..01b772899 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -38,16 +38,14 @@ macro_rules! throw_validation_failure {
msg.push_str(", but expected ");
write!(&mut msg, $($expected_fmt)*).unwrap();
)?
- let path = rustc_middle::ty::print::with_no_trimmed_paths!({
- let where_ = &$where;
- if !where_.is_empty() {
- let mut path = String::new();
- write_path(&mut path, where_);
- Some(path)
- } else {
- None
- }
- });
+ let where_ = &$where;
+ let path = if !where_.is_empty() {
+ let mut path = String::new();
+ write_path(&mut path, where_);
+ Some(path)
+ } else {
+ None
+ };
throw_ub!(ValidationFailure { path, msg })
}};
}
@@ -784,7 +782,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
Abi::Scalar(scalar_layout) => {
if !scalar_layout.is_uninit_valid() {
// There is something to check here.
- let scalar = self.read_scalar(op, "initiailized scalar value")?;
+ let scalar = self.read_scalar(op, "initialized scalar value")?;
self.visit_scalar(scalar, scalar_layout)?;
}
}
@@ -794,7 +792,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// the other must be init.
if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
let (a, b) =
- self.read_immediate(op, "initiailized scalar value")?.to_scalar_pair();
+ self.read_immediate(op, "initialized scalar value")?.to_scalar_pair();
self.visit_scalar(a, a_layout)?;
self.visit_scalar(b, b_layout)?;
}