diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:11:38 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-17 12:12:43 +0000 |
commit | cf94bdc0742c13e2a0cac864c478b8626b266e1b (patch) | |
tree | 044670aa50cc5e2b4229aa0b6b3df6676730c0a6 /compiler/rustc_const_eval/src/interpret | |
parent | Adding debian version 1.65.0+dfsg1-2. (diff) | |
download | rustc-cf94bdc0742c13e2a0cac864c478b8626b266e1b.tar.xz rustc-cf94bdc0742c13e2a0cac864c478b8626b266e1b.zip |
Merging upstream version 1.66.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_const_eval/src/interpret')
12 files changed, 157 insertions, 121 deletions
diff --git a/compiler/rustc_const_eval/src/interpret/cast.rs b/compiler/rustc_const_eval/src/interpret/cast.rs index cbe985480..269ae15d4 100644 --- a/compiler/rustc_const_eval/src/interpret/cast.rs +++ b/compiler/rustc_const_eval/src/interpret/cast.rs @@ -43,9 +43,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { self.write_immediate(res, dest)?; } - Misc => { + IntToInt | IntToFloat => { let src = self.read_immediate(src)?; - let res = self.misc_cast(&src, cast_ty)?; + let res = self.int_to_int_or_float(&src, cast_ty)?; + self.write_immediate(res, dest)?; + } + + FloatToFloat | FloatToInt => { + let src = self.read_immediate(src)?; + let res = self.float_to_float_or_int(&src, cast_ty)?; + self.write_immediate(res, dest)?; + } + + FnPtrToPtr | PtrToPtr => { + let src = self.read_immediate(&src)?; + let res = self.ptr_to_ptr(&src, cast_ty)?; self.write_immediate(res, dest)?; } @@ -126,13 +138,25 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Ok(()) } - pub fn misc_cast( - &mut self, + /// Handles 'IntToInt' and 'IntToFloat' casts. + pub fn int_to_int_or_float( + &self, + src: &ImmTy<'tcx, M::Provenance>, + cast_ty: Ty<'tcx>, + ) -> InterpResult<'tcx, Immediate<M::Provenance>> { + assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool()); + assert!(cast_ty.is_floating_point() || cast_ty.is_integral() || cast_ty.is_char()); + + Ok(self.cast_from_int_like(src.to_scalar(), src.layout, cast_ty)?.into()) + } + + /// Handles 'FloatToFloat' and 'FloatToInt' casts. + pub fn float_to_float_or_int( + &self, src: &ImmTy<'tcx, M::Provenance>, cast_ty: Ty<'tcx>, ) -> InterpResult<'tcx, Immediate<M::Provenance>> { use rustc_type_ir::sty::TyKind::*; - trace!("Casting {:?}: {:?} to {:?}", *src, src.layout.ty, cast_ty); match src.layout.ty.kind() { // Floating point @@ -142,47 +166,42 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Float(FloatTy::F64) => { return Ok(self.cast_from_float(src.to_scalar().to_f64()?, cast_ty).into()); } - // The rest is integer/pointer-"like", including fn ptr casts - _ => assert!( - src.layout.ty.is_bool() - || src.layout.ty.is_char() - || src.layout.ty.is_integral() - || src.layout.ty.is_any_ptr(), - "Unexpected cast from type {:?}", - src.layout.ty - ), + _ => { + bug!("Can't cast 'Float' type into {:?}", cast_ty); + } } + } - // # First handle non-scalar source values. - + /// Handles 'FnPtrToPtr' and 'PtrToPtr' casts. + pub fn ptr_to_ptr( + &self, + src: &ImmTy<'tcx, M::Provenance>, + cast_ty: Ty<'tcx>, + ) -> InterpResult<'tcx, Immediate<M::Provenance>> { + assert!(src.layout.ty.is_any_ptr()); + assert!(cast_ty.is_unsafe_ptr()); // Handle casting any ptr to raw ptr (might be a fat ptr). - if src.layout.ty.is_any_ptr() && cast_ty.is_unsafe_ptr() { - let dest_layout = self.layout_of(cast_ty)?; - if dest_layout.size == src.layout.size { - // Thin or fat pointer that just hast the ptr kind of target type changed. - return Ok(**src); - } else { - // Casting the metadata away from a fat ptr. - assert_eq!(src.layout.size, 2 * self.pointer_size()); - assert_eq!(dest_layout.size, self.pointer_size()); - assert!(src.layout.ty.is_unsafe_ptr()); - return match **src { - Immediate::ScalarPair(data, _) => Ok(data.into()), - Immediate::Scalar(..) => span_bug!( - self.cur_span(), - "{:?} input to a fat-to-thin cast ({:?} -> {:?})", - *src, - src.layout.ty, - cast_ty - ), - Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)), - }; - } + let dest_layout = self.layout_of(cast_ty)?; + if dest_layout.size == src.layout.size { + // Thin or fat pointer that just hast the ptr kind of target type changed. + return Ok(**src); + } else { + // Casting the metadata away from a fat ptr. + assert_eq!(src.layout.size, 2 * self.pointer_size()); + assert_eq!(dest_layout.size, self.pointer_size()); + assert!(src.layout.ty.is_unsafe_ptr()); + return match **src { + Immediate::ScalarPair(data, _) => Ok(data.into()), + Immediate::Scalar(..) => span_bug!( + self.cur_span(), + "{:?} input to a fat-to-thin cast ({:?} -> {:?})", + *src, + src.layout.ty, + cast_ty + ), + Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)), + }; } - - // # The remaining source values are scalar and "int-like". - let scalar = src.to_scalar(); - Ok(self.cast_from_int_like(scalar, src.layout, cast_ty)?.into()) } pub fn pointer_expose_address_cast( @@ -203,7 +222,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } pub fn pointer_from_exposed_address_cast( - &mut self, + &self, src: &ImmTy<'tcx, M::Provenance>, cast_ty: Ty<'tcx>, ) -> InterpResult<'tcx, Immediate<M::Provenance>> { @@ -220,6 +239,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Ok(Scalar::from_maybe_pointer(ptr, self).into()) } + /// Low-level cast helper function. This works directly on scalars and can take 'int-like' input + /// type (basically everything with a scalar layout) to int/float/char types. pub fn cast_from_int_like( &self, scalar: Scalar<M::Provenance>, // input value (there is no ScalarTy so we separate data+layout) @@ -259,6 +280,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { }) } + /// Low-level cast helper function. Converts an apfloat `f` into int or float types. fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance> where F: Float + Into<Scalar<M::Provenance>> + FloatConvert<Single> + FloatConvert<Double>, diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index d37eaeed0..a9063ad31 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -258,6 +258,9 @@ impl<'tcx> fmt::Display for FrameInfo<'tcx> { { write!(f, "inside closure")?; } else { + // Note: this triggers a `good_path_bug` state, which means that if we ever get here + // we must emit a diagnostic. We should never display a `FrameInfo` unless we + // actually want to emit a warning or error to the user. write!(f, "inside `{}`", self.instance)?; } if !self.span.is_dummy() { @@ -465,7 +468,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { #[inline] pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool { - ty.is_freeze(self.tcx, self.param_env) + ty.is_freeze(*self.tcx, self.param_env) } pub fn load_mir( @@ -683,11 +686,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { self.stack_mut().push(frame); // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check). - for const_ in &body.required_consts { - let span = const_.span; - let const_ = - self.subst_from_current_frame_and_normalize_erasing_regions(const_.literal)?; - self.mir_const_to_op(&const_, None).map_err(|err| { + for ct in &body.required_consts { + let span = ct.span; + let ct = self.subst_from_current_frame_and_normalize_erasing_regions(ct.literal)?; + self.const_to_op(&ct, None).map_err(|err| { // If there was an error, set the span of the current frame to this constant. // Avoiding doing this when evaluation succeeds. self.frame_mut().loc = Err(span); @@ -929,11 +931,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } #[must_use] - pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> { + pub fn generate_stacktrace_from_stack( + stack: &[Frame<'mir, 'tcx, M::Provenance, M::FrameExtra>], + ) -> Vec<FrameInfo<'tcx>> { let mut frames = Vec::new(); // This deliberately does *not* honor `requires_caller_location` since it is used for much // more than just panics. - for frame in self.stack().iter().rev() { + for frame in stack.iter().rev() { let lint_root = frame.current_source_info().and_then(|source_info| { match &frame.body.source_scopes[source_info.scope].local_data { mir::ClearCrossCrate::Set(data) => Some(data.lint_root), @@ -947,6 +951,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { trace!("generate stacktrace: {:#?}", frames); frames } + + #[must_use] + pub fn generate_stacktrace(&self) -> Vec<FrameInfo<'tcx>> { + Self::generate_stacktrace_from_stack(self.stack()) + } } #[doc(hidden)] diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index 24dbc7695..6809a42dc 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -15,7 +15,7 @@ //! that contains allocations whose mutability we cannot identify.) use super::validity::RefTracking; -use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_middle::mir::interpret::InterpResult; @@ -37,7 +37,7 @@ pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine< ExtraFnVal = !, FrameExtra = (), AllocExtra = (), - MemoryMap = FxHashMap<AllocId, (MemoryKind<T>, Allocation)>, + MemoryMap = FxIndexMap<AllocId, (MemoryKind<T>, Allocation)>, >; struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> { @@ -47,7 +47,7 @@ struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_ev ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>, /// A list of all encountered allocations. After type-based interning, we traverse this list to /// also intern allocations that are only referenced by a raw pointer or inside a union. - leftover_allocations: &'rt mut FxHashSet<AllocId>, + leftover_allocations: &'rt mut FxIndexSet<AllocId>, /// The root kind of the value that we're looking at. This field is never mutated for a /// particular allocation. It is primarily used to make as many allocations as possible /// read-only so LLVM can place them in const memory. @@ -79,7 +79,7 @@ struct IsStaticOrFn; /// to account for (e.g. for vtables). fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>( ecx: &'rt mut InterpCx<'mir, 'tcx, M>, - leftover_allocations: &'rt mut FxHashSet<AllocId>, + leftover_allocations: &'rt mut FxIndexSet<AllocId>, alloc_id: AllocId, mode: InternMode, ty: Option<Ty<'tcx>>, @@ -114,7 +114,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval: if let InternMode::Static(mutability) = mode { // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume // no interior mutability. - let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env)); + let frozen = ty.map_or(true, |ty| ty.is_freeze(*ecx.tcx, ecx.param_env)); // For statics, allocation mutability is the combination of place mutability and // type mutability. // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere. @@ -332,8 +332,6 @@ pub enum InternKind { /// /// This *cannot raise an interpreter error*. Doing so is left to validation, which /// tracks where in the value we are and thus can show much better error messages. -/// Any errors here would anyway be turned into `const_err` lints, whereas validation failures -/// are hard errors. #[instrument(level = "debug", skip(ecx))] pub fn intern_const_alloc_recursive< 'mir, @@ -357,7 +355,7 @@ pub fn intern_const_alloc_recursive< // `leftover_allocations` collects *all* allocations we see, because some might not // be available in a typed way. They get interned at the end. let mut ref_tracking = RefTracking::empty(); - let leftover_allocations = &mut FxHashSet::default(); + let leftover_allocations = &mut FxIndexSet::default(); // start with the outermost allocation intern_shallow( diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs b/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs index 91f4f0425..0e3867557 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs @@ -4,7 +4,6 @@ use rustc_ast::Mutability; use rustc_hir::lang_items::LangItem; use rustc_middle::mir::TerminatorKind; use rustc_middle::ty::layout::LayoutOf; -use rustc_middle::ty::subst::Subst; use rustc_span::{Span, Symbol}; use crate::interpret::{ diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs b/compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs index 7e4c5fcb0..ffdb8de5b 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs @@ -4,7 +4,7 @@ use rustc_hir::definitions::DisambiguatedDefPathData; use rustc_middle::mir::interpret::{Allocation, ConstAllocation}; use rustc_middle::ty::{ self, - print::{PrettyPrinter, Print, Printer}, + print::{with_no_verbose_constants, PrettyPrinter, Print, Printer}, subst::{GenericArg, GenericArgKind}, Ty, TyCtxt, }; @@ -190,7 +190,9 @@ impl Write for AbsolutePathPrinter<'_> { /// Directly returns an `Allocation` containing an absolute path representation of the given type. pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> { - let path = AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path; + let path = with_no_verbose_constants!( + AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path + ); let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes()); tcx.intern_const_alloc(alloc) } diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index 530e252b7..351152eba 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -426,7 +426,7 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) { type ExtraFnVal = !; type MemoryMap = - rustc_data_structures::fx::FxHashMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>; + rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>; const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory type AllocExtra = (); diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index ed155fbfe..e5e015c1e 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -794,7 +794,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { todo.extend(static_roots); while let Some(id) = todo.pop() { if reachable.insert(id) { - // This is a new allocation, add the allocation it points to to `todo`. + // This is a new allocation, add the allocation it points to `todo`. if let Some((_, alloc)) = self.memory.alloc_map.get(id) { todo.extend( alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()), diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 0e3959b61..0c212cf59 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -534,7 +534,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // * During ConstProp, with `TooGeneric` or since the `required_consts` were not all // checked yet. // * During CTFE, since promoteds in `const`/`static` initializer bodies can fail. - self.mir_const_to_op(&val, layout)? + self.const_to_op(&val, layout)? } }; trace!("{:?}: {:?}", mir_op, *op); @@ -549,45 +549,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ops.iter().map(|op| self.eval_operand(op, None)).collect() } - // Used when the miri-engine runs into a constant and for extracting information from constants - // in patterns via the `const_eval` module - /// The `val` and `layout` are assumed to already be in our interpreter - /// "universe" (param_env). pub fn const_to_op( &self, - c: ty::Const<'tcx>, - layout: Option<TyAndLayout<'tcx>>, - ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { - match c.kind() { - ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(..) => throw_inval!(TooGeneric), - ty::ConstKind::Error(DelaySpanBugEmitted { reported, .. }) => { - throw_inval!(AlreadyReported(reported)) - } - ty::ConstKind::Unevaluated(uv) => { - // NOTE: We evaluate to a `ValTree` here as a check to ensure - // we're working with valid constants, even though we never need it. - let instance = self.resolve(uv.def, uv.substs)?; - let cid = GlobalId { instance, promoted: None }; - let _valtree = self - .tcx - .eval_to_valtree(self.param_env.and(cid))? - .unwrap_or_else(|| bug!("unable to create ValTree for {:?}", uv)); - - Ok(self.eval_to_allocation(cid)?.into()) - } - ty::ConstKind::Bound(..) | ty::ConstKind::Infer(..) => { - span_bug!(self.cur_span(), "const_to_op: Unexpected ConstKind {:?}", c) - } - ty::ConstKind::Value(valtree) => { - let ty = c.ty(); - let const_val = self.tcx.valtree_to_const_val((ty, valtree)); - self.const_val_to_op(const_val, ty, layout) - } - } - } - - pub fn mir_const_to_op( - &self, val: &mir::ConstantKind<'tcx>, layout: Option<TyAndLayout<'tcx>>, ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { @@ -599,7 +562,36 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // manually normalized. let val = self.tcx.normalize_erasing_regions(self.param_env, *val); match val { - mir::ConstantKind::Ty(ct) => self.const_to_op(ct, layout), + mir::ConstantKind::Ty(ct) => { + match ct.kind() { + ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(..) => { + throw_inval!(TooGeneric) + } + ty::ConstKind::Error(DelaySpanBugEmitted { reported, .. }) => { + throw_inval!(AlreadyReported(reported)) + } + ty::ConstKind::Unevaluated(uv) => { + // NOTE: We evaluate to a `ValTree` here as a check to ensure + // we're working with valid constants, even though we never need it. + let instance = self.resolve(uv.def, uv.substs)?; + let cid = GlobalId { instance, promoted: None }; + let _valtree = self + .tcx + .eval_to_valtree(self.param_env.and(cid))? + .unwrap_or_else(|| bug!("unable to create ValTree for {uv:?}")); + + Ok(self.eval_to_allocation(cid)?.into()) + } + ty::ConstKind::Bound(..) | ty::ConstKind::Infer(..) => { + span_bug!(self.cur_span(), "unexpected ConstKind in ctfe: {ct:?}") + } + ty::ConstKind::Value(valtree) => { + let ty = ct.ty(); + let const_val = self.tcx.valtree_to_const_val((ty, valtree)); + self.const_val_to_op(const_val, ty, layout) + } + } + } mir::ConstantKind::Val(val, ty) => self.const_val_to_op(val, ty, layout), mir::ConstantKind::Unevaluated(uv, _) => { let instance = self.resolve(uv.def, uv.substs)?; @@ -799,13 +791,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } // Some nodes are used a lot. Make sure they don't unintentionally get bigger. -#[cfg(all(target_arch = "x86_64", target_pointer_width = "64", not(bootstrap)))] +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; - // These are in alphabetical order, which is easy to maintain. + // tidy-alphabetical-start static_assert_size!(Immediate, 48); static_assert_size!(ImmTy<'_>, 64); static_assert_size!(Operand, 56); static_assert_size!(OpTy<'_>, 80); + // tidy-alphabetical-end } diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index b32889290..b0625b5f4 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -280,7 +280,7 @@ impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> { #[inline(always)] #[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980) - pub fn assert_mem_place(self) -> MPlaceTy<'tcx, Prov> { + pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> { self.try_as_mplace().unwrap() } } @@ -640,11 +640,17 @@ where // avoid force_allocation. let src = match self.read_immediate_raw(src)? { Ok(src_val) => { - assert!(!src.layout.is_unsized(), "cannot copy unsized immediates"); - assert!( - !dest.layout.is_unsized(), - "the src is sized, so the dest must also be sized" - ); + // FIXME(const_prop): Const-prop can possibly evaluate an + // unsized copy operation when it thinks that the type is + // actually sized, due to a trivially false where-clause + // predicate like `where Self: Sized` with `Self = dyn Trait`. + // See #102553 for an example of such a predicate. + if src.layout.is_unsized() { + throw_inval!(SizeOfUnsizedType(src.layout.ty)); + } + if dest.layout.is_unsized() { + throw_inval!(SizeOfUnsizedType(dest.layout.ty)); + } assert_eq!(src.layout.size, dest.layout.size); // Yay, we got a value that we can write directly. return if layout_compat { @@ -886,12 +892,11 @@ where mod size_asserts { use super::*; use rustc_data_structures::static_assert_size; - // These are in alphabetical order, which is easy to maintain. - static_assert_size!(MemPlaceMeta, 24); + // tidy-alphabetical-start static_assert_size!(MemPlace, 40); + static_assert_size!(MemPlaceMeta, 24); static_assert_size!(MPlaceTy<'_>, 64); - #[cfg(not(bootstrap))] static_assert_size!(Place, 40); - #[cfg(not(bootstrap))] static_assert_size!(PlaceTy<'_>, 64); + // tidy-alphabetical-end } diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs index 77da8f104..6b2e2bb8a 100644 --- a/compiler/rustc_const_eval/src/interpret/projection.rs +++ b/compiler/rustc_const_eval/src/interpret/projection.rs @@ -350,6 +350,11 @@ where ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> { use rustc_middle::mir::ProjectionElem::*; Ok(match proj_elem { + OpaqueCast(ty) => { + let mut place = base.clone(); + place.layout = self.layout_of(ty)?; + place + } Field(field, _) => self.place_field(base, field.index())?, Downcast(_, variant) => self.place_downcast(base, variant)?, Deref => self.deref_operand(&self.place_to_op(base)?)?.into(), @@ -374,6 +379,11 @@ where ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { use rustc_middle::mir::ProjectionElem::*; Ok(match proj_elem { + OpaqueCast(ty) => { + let mut op = base.clone(); + op.layout = self.layout_of(ty)?; + op + } Field(field, _) => self.operand_field(base, field.index())?, Downcast(_, variant) => self.operand_downcast(base, variant)?, Deref => self.deref_operand(base)?.into(), diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index 50a82aa0e..57e40e168 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -35,7 +35,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { assert_eq!(discr.layout.ty, switch_ty); // Branch to the `otherwise` case by default, if no match is found. - assert!(!targets.iter().is_empty()); let mut target_block = targets.otherwise(); for (const_int, target) in targets.iter() { diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index 14aaee6ac..8aa56c275 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -15,7 +15,6 @@ use rustc_middle::mir::interpret::InterpError; use rustc_middle::ty; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_span::symbol::{sym, Symbol}; -use rustc_span::DUMMY_SP; use rustc_target::abi::{Abi, Scalar as ScalarAbi, Size, VariantIdx, Variants, WrappingRange}; use std::hash::Hash; @@ -56,7 +55,7 @@ macro_rules! throw_validation_failure { /// This lets you use the patterns as a kind of validation list, asserting which errors /// can possibly happen: /// -/// ``` +/// ```ignore(illustrative) /// let v = try_validation!(some_fn(), some_path, { /// Foo | Bar | Baz => { "some failure" }, /// }); @@ -65,7 +64,7 @@ macro_rules! throw_validation_failure { /// The patterns must be of type `UndefinedBehaviorInfo`. /// An additional expected parameter can also be added to the failure message: /// -/// ``` +/// ```ignore(illustrative) /// let v = try_validation!(some_fn(), some_path, { /// Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" }, /// }); @@ -74,7 +73,7 @@ macro_rules! throw_validation_failure { /// An additional nicety is that both parameters actually take format args, so you can just write /// the format string in directly: /// -/// ``` +/// ```ignore(illustrative) /// let v = try_validation!(some_fn(), some_path, { /// Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value }, /// }); @@ -726,7 +725,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M> ) -> InterpResult<'tcx> { // Special check preventing `UnsafeCell` inside unions in the inner part of constants. if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { inner: true, .. })) { - if !op.layout.ty.is_freeze(self.ecx.tcx.at(DUMMY_SP), self.ecx.param_env) { + if !op.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.param_env) { throw_validation_failure!(self.path, { "`UnsafeCell` in a `const`" }); } } |