summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_middle/src/mir
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-07 05:48:48 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-06-07 05:48:48 +0000
commitef24de24a82fe681581cc130f342363c47c0969a (patch)
tree0d494f7e1a38b95c92426f58fe6eaa877303a86c /compiler/rustc_middle/src/mir
parentReleasing progress-linux version 1.74.1+dfsg1-1~progress7.99u1. (diff)
downloadrustc-ef24de24a82fe681581cc130f342363c47c0969a.tar.xz
rustc-ef24de24a82fe681581cc130f342363c47c0969a.zip
Merging upstream version 1.75.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'compiler/rustc_middle/src/mir')
-rw-r--r--compiler/rustc_middle/src/mir/consts.rs89
-rw-r--r--compiler/rustc_middle/src/mir/coverage.rs115
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation.rs18
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs4
-rw-r--r--compiler/rustc_middle/src/mir/interpret/error.rs39
-rw-r--r--compiler/rustc_middle/src/mir/interpret/mod.rs11
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs153
-rw-r--r--compiler/rustc_middle/src/mir/mono.rs2
-rw-r--r--compiler/rustc_middle/src/mir/patch.rs15
-rw-r--r--compiler/rustc_middle/src/mir/pretty.rs60
-rw-r--r--compiler/rustc_middle/src/mir/query.rs52
-rw-r--r--compiler/rustc_middle/src/mir/syntax.rs93
-rw-r--r--compiler/rustc_middle/src/mir/tcx.rs6
-rw-r--r--compiler/rustc_middle/src/mir/terminator.rs52
-rw-r--r--compiler/rustc_middle/src/mir/type_foldable.rs4
-rw-r--r--compiler/rustc_middle/src/mir/visit.rs13
16 files changed, 440 insertions, 286 deletions
diff --git a/compiler/rustc_middle/src/mir/consts.rs b/compiler/rustc_middle/src/mir/consts.rs
index 7c8a57b84..a9d09709e 100644
--- a/compiler/rustc_middle/src/mir/consts.rs
+++ b/compiler/rustc_middle/src/mir/consts.rs
@@ -3,6 +3,7 @@ use std::fmt::{self, Debug, Display, Formatter};
use rustc_hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::{self as hir};
+use rustc_session::RemapFileNameExt;
use rustc_span::Span;
use rustc_target::abi::{HasDataLayout, Size};
@@ -172,6 +173,24 @@ impl<'tcx> ConstValue<'tcx> {
let end = end.try_into().unwrap();
Some(data.inner().inspect_with_uninit_and_ptr_outside_interpreter(start..end))
}
+
+ /// Check if a constant may contain provenance information. This is used by MIR opts.
+ /// Can return `true` even if there is no provenance.
+ pub fn may_have_provenance(&self, tcx: TyCtxt<'tcx>, size: Size) -> bool {
+ match *self {
+ ConstValue::ZeroSized | ConstValue::Scalar(Scalar::Int(_)) => return false,
+ ConstValue::Scalar(Scalar::Ptr(..)) => return true,
+ // It's hard to find out the part of the allocation we point to;
+ // just conservatively check everything.
+ ConstValue::Slice { data, meta: _ } => !data.inner().provenance().ptrs().is_empty(),
+ ConstValue::Indirect { alloc_id, offset } => !tcx
+ .global_alloc(alloc_id)
+ .unwrap_memory()
+ .inner()
+ .provenance()
+ .range_empty(super::AllocRange::from(offset..offset + size), &tcx),
+ }
+ }
}
///////////////////////////////////////////////////////////////////////////
@@ -213,10 +232,10 @@ impl<'tcx> Const<'tcx> {
pub fn try_to_scalar(self) -> Option<Scalar> {
match self {
Const::Ty(c) => match c.kind() {
- ty::ConstKind::Value(valtree) => match valtree {
- ty::ValTree::Leaf(scalar_int) => Some(Scalar::Int(scalar_int)),
- ty::ValTree::Branch(_) => None,
- },
+ ty::ConstKind::Value(valtree) if c.ty().is_primitive() => {
+ // A valtree of a type where leaves directly represent the scalar const value.
+ Some(valtree.unwrap_leaf().into())
+ }
_ => None,
},
Const::Val(val, _) => val.try_to_scalar(),
@@ -279,7 +298,16 @@ impl<'tcx> Const<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<Scalar> {
- self.eval(tcx, param_env, None).ok()?.try_to_scalar()
+ match self {
+ Const::Ty(c) if c.ty().is_primitive() => {
+ // Avoid the `valtree_to_const_val` query. Can only be done on primitive types that
+ // are valtree leaves, and *not* on references. (References should return the
+ // pointer here, which valtrees don't represent.)
+ let val = c.eval(tcx, param_env, None).ok()?;
+ Some(val.unwrap_leaf().into())
+ }
+ _ => self.eval(tcx, param_env, None).ok()?.try_to_scalar(),
+ }
}
#[inline]
@@ -476,6 +504,40 @@ impl<'tcx> Const<'tcx> {
_ => Self::Ty(c),
}
}
+
+ /// Return true if any evaluation of this constant always returns the same value,
+ /// taking into account even pointer identity tests.
+ pub fn is_deterministic(&self) -> bool {
+ // Some constants may generate fresh allocations for pointers they contain,
+ // so using the same constant twice can yield two different results:
+ // - valtrees purposefully generate new allocations
+ // - ConstValue::Slice also generate new allocations
+ match self {
+ Const::Ty(c) => match c.kind() {
+ ty::ConstKind::Param(..) => true,
+ // A valtree may be a reference. Valtree references correspond to a
+ // different allocation each time they are evaluated. Valtrees for primitive
+ // types are fine though.
+ ty::ConstKind::Value(_) => c.ty().is_primitive(),
+ ty::ConstKind::Unevaluated(..) | ty::ConstKind::Expr(..) => false,
+ // This can happen if evaluation of a constant failed. The result does not matter
+ // much since compilation is doomed.
+ ty::ConstKind::Error(..) => false,
+ // Should not appear in runtime MIR.
+ ty::ConstKind::Infer(..)
+ | ty::ConstKind::Bound(..)
+ | ty::ConstKind::Placeholder(..) => bug!(),
+ },
+ Const::Unevaluated(..) => false,
+ // If the same slice appears twice in the MIR, we cannot guarantee that we will
+ // give the same `AllocId` to the data.
+ Const::Val(ConstValue::Slice { .. }, _) => false,
+ Const::Val(
+ ConstValue::ZeroSized | ConstValue::Scalar(_) | ConstValue::Indirect { .. },
+ _,
+ ) => true,
+ }
+ }
}
/// An unevaluated (potentially generic) constant used in MIR.
@@ -520,3 +582,20 @@ impl<'tcx> Display for Const<'tcx> {
}
}
}
+
+///////////////////////////////////////////////////////////////////////////
+/// Const-related utilities
+
+impl<'tcx> TyCtxt<'tcx> {
+ pub fn span_as_caller_location(self, span: Span) -> ConstValue<'tcx> {
+ let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
+ let caller = self.sess.source_map().lookup_char_pos(topmost.lo());
+ self.const_caller_location(
+ rustc_span::symbol::Symbol::intern(
+ &caller.file.name.for_codegen(&self.sess).to_string_lossy(),
+ ),
+ caller.line as u32,
+ caller.col_display as u32 + 1,
+ )
+ }
+}
diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs
index 9ef673922..08d377a86 100644
--- a/compiler/rustc_middle/src/mir/coverage.rs
+++ b/compiler/rustc_middle/src/mir/coverage.rs
@@ -1,5 +1,6 @@
//! Metadata from source code coverage analysis and instrumentation.
+use rustc_index::IndexVec;
use rustc_macros::HashStable;
use rustc_span::Symbol;
@@ -8,6 +9,11 @@ use std::fmt::{self, Debug, Formatter};
rustc_index::newtype_index! {
/// ID of a coverage counter. Values ascend from 0.
///
+ /// Before MIR inlining, counter IDs are local to their enclosing function.
+ /// After MIR inlining, coverage statements may have been inlined into
+ /// another function, so use the statement's source-scope to find which
+ /// function/instance its IDs are meaningful for.
+ ///
/// Note that LLVM handles counter IDs as `uint32_t`, so there is no need
/// to use a larger representation on the Rust side.
#[derive(HashStable)]
@@ -18,16 +24,16 @@ rustc_index::newtype_index! {
impl CounterId {
pub const START: Self = Self::from_u32(0);
-
- #[inline(always)]
- pub fn next_id(self) -> Self {
- Self::from_u32(self.as_u32() + 1)
- }
}
rustc_index::newtype_index! {
/// ID of a coverage-counter expression. Values ascend from 0.
///
+ /// Before MIR inlining, expression IDs are local to their enclosing function.
+ /// After MIR inlining, coverage statements may have been inlined into
+ /// another function, so use the statement's source-scope to find which
+ /// function/instance its IDs are meaningful for.
+ ///
/// Note that LLVM handles expression IDs as `uint32_t`, so there is no need
/// to use a larger representation on the Rust side.
#[derive(HashStable)]
@@ -38,26 +44,23 @@ rustc_index::newtype_index! {
impl ExpressionId {
pub const START: Self = Self::from_u32(0);
-
- #[inline(always)]
- pub fn next_id(self) -> Self {
- Self::from_u32(self.as_u32() + 1)
- }
}
-/// Operand of a coverage-counter expression.
+/// Enum that can hold a constant zero value, the ID of an physical coverage
+/// counter, or the ID of a coverage-counter expression.
///
-/// Operands can be a constant zero value, an actual coverage counter, or another
-/// expression. Counter/expression operands are referred to by ID.
+/// This was originally only used for expression operands (and named `Operand`),
+/// but the zero/counter/expression distinction is also useful for representing
+/// the value of code/gap mappings, and the true/false arms of branch mappings.
#[derive(Copy, Clone, PartialEq, Eq)]
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
-pub enum Operand {
+pub enum CovTerm {
Zero,
Counter(CounterId),
Expression(ExpressionId),
}
-impl Debug for Operand {
+impl Debug for CovTerm {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Self::Zero => write!(f, "Zero"),
@@ -69,40 +72,31 @@ impl Debug for Operand {
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
pub enum CoverageKind {
- Counter {
- function_source_hash: u64,
- /// ID of this counter within its enclosing function.
- /// Expressions in the same function can refer to it as an operand.
- id: CounterId,
- },
- Expression {
- /// ID of this coverage-counter expression within its enclosing function.
- /// Other expressions in the same function can refer to it as an operand.
- id: ExpressionId,
- lhs: Operand,
- op: Op,
- rhs: Operand,
- },
- Unreachable,
+ /// Marks the point in MIR control flow represented by a coverage counter.
+ ///
+ /// This is eventually lowered to `llvm.instrprof.increment` in LLVM IR.
+ ///
+ /// If this statement does not survive MIR optimizations, any mappings that
+ /// refer to this counter can have those references simplified to zero.
+ CounterIncrement { id: CounterId },
+
+ /// Marks the point in MIR control-flow represented by a coverage expression.
+ ///
+ /// If this statement does not survive MIR optimizations, any mappings that
+ /// refer to this expression can have those references simplified to zero.
+ ///
+ /// (This is only inserted for expression IDs that are directly used by
+ /// mappings. Intermediate expressions with no direct mappings are
+ /// retained/zeroed based on whether they are transitively used.)
+ ExpressionUsed { id: ExpressionId },
}
impl Debug for CoverageKind {
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
use CoverageKind::*;
match self {
- Counter { id, .. } => write!(fmt, "Counter({:?})", id.index()),
- Expression { id, lhs, op, rhs } => write!(
- fmt,
- "Expression({:?}) = {:?} {} {:?}",
- id.index(),
- lhs,
- match op {
- Op::Add => "+",
- Op::Subtract => "-",
- },
- rhs,
- ),
- Unreachable => write!(fmt, "Unreachable"),
+ CounterIncrement { id } => write!(fmt, "CounterIncrement({:?})", id.index()),
+ ExpressionUsed { id } => write!(fmt, "ExpressionUsed({:?})", id.index()),
}
}
}
@@ -143,3 +137,38 @@ impl Op {
matches!(self, Self::Subtract)
}
}
+
+#[derive(Clone, Debug)]
+#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
+pub struct Expression {
+ pub lhs: CovTerm,
+ pub op: Op,
+ pub rhs: CovTerm,
+}
+
+#[derive(Clone, Debug)]
+#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
+pub struct Mapping {
+ pub code_region: CodeRegion,
+
+ /// Indicates whether this mapping uses a counter value, expression value,
+ /// or zero value.
+ ///
+ /// FIXME: When we add support for mapping kinds other than `Code`
+ /// (e.g. branch regions, expansion regions), replace this with a dedicated
+ /// mapping-kind enum.
+ pub term: CovTerm,
+}
+
+/// Stores per-function coverage information attached to a `mir::Body`,
+/// to be used in conjunction with the individual coverage statements injected
+/// into the function's basic blocks.
+#[derive(Clone, Debug)]
+#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
+pub struct FunctionCoverageInfo {
+ pub function_source_hash: u64,
+ pub num_counters: usize,
+
+ pub expressions: IndexVec<ExpressionId, Expression>,
+ pub mappings: Vec<Mapping>,
+}
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs
index c787481bf..aded3e495 100644
--- a/compiler/rustc_middle/src/mir/interpret/allocation.rs
+++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs
@@ -32,23 +32,16 @@ pub use init_mask::{InitChunk, InitChunkIter};
pub trait AllocBytes:
Clone + fmt::Debug + Eq + PartialEq + Hash + Deref<Target = [u8]> + DerefMut<Target = [u8]>
{
- /// Adjust the bytes to the specified alignment -- by default, this is a no-op.
- fn adjust_to_align(self, _align: Align) -> Self;
-
/// Create an `AllocBytes` from a slice of `u8`.
fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self;
- /// Create a zeroed `AllocBytes` of the specified size and alignment;
- /// call the callback error handler if there is an error in allocating the memory.
+ /// Create a zeroed `AllocBytes` of the specified size and alignment.
+ /// Returns `None` if we ran out of memory on the host.
fn zeroed(size: Size, _align: Align) -> Option<Self>;
}
// Default `bytes` for `Allocation` is a `Box<[u8]>`.
impl AllocBytes for Box<[u8]> {
- fn adjust_to_align(self, _align: Align) -> Self {
- self
- }
-
fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, _align: Align) -> Self {
Box::<[u8]>::from(slice.into())
}
@@ -299,6 +292,7 @@ impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
}
fn uninit_inner<R>(size: Size, align: Align, fail: impl FnOnce() -> R) -> Result<Self, R> {
+ // We raise an error if we cannot create the allocation on the host.
// This results in an error that can happen non-deterministically, since the memory
// available to the compiler can change between runs. Normally queries are always
// deterministic. However, we can be non-deterministic here because all uses of const
@@ -351,10 +345,8 @@ impl<Bytes: AllocBytes> Allocation<AllocId, (), Bytes> {
extra: Extra,
mut adjust_ptr: impl FnMut(Pointer<AllocId>) -> Result<Pointer<Prov>, Err>,
) -> Result<Allocation<Prov, Extra, Bytes>, Err> {
- // Compute new pointer provenance, which also adjusts the bytes, and realign the pointer if
- // necessary.
- let mut bytes = self.bytes.adjust_to_align(self.align);
-
+ let mut bytes = self.bytes;
+ // Adjust provenance of pointers stored in this allocation.
let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
let endian = cx.data_layout().endian;
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs
index 0243fc451..d504af6b7 100644
--- a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs
+++ b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs
@@ -315,7 +315,9 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
self.ptrs.insert_presorted(dest_ptrs.into());
}
if Prov::OFFSET_IS_ADDR {
- if let Some(dest_bytes) = copy.dest_bytes && !dest_bytes.is_empty() {
+ if let Some(dest_bytes) = copy.dest_bytes
+ && !dest_bytes.is_empty()
+ {
self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into());
}
} else {
diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs
index bc464aca5..44b22e2d3 100644
--- a/compiler/rustc_middle/src/mir/interpret/error.rs
+++ b/compiler/rustc_middle/src/mir/interpret/error.rs
@@ -43,21 +43,6 @@ impl ErrorHandled {
}
}
- pub fn emit_err(&self, tcx: TyCtxt<'_>) -> ErrorGuaranteed {
- match self {
- &ErrorHandled::Reported(err, span) => {
- if !err.is_tainted_by_errors && !span.is_dummy() {
- tcx.sess.emit_err(error::ErroneousConstant { span });
- }
- err.error
- }
- &ErrorHandled::TooGeneric(span) => tcx.sess.delay_span_bug(
- span,
- "encountered TooGeneric error when monomorphic data was expected",
- ),
- }
- }
-
pub fn emit_note(&self, tcx: TyCtxt<'_>) {
match self {
&ErrorHandled::Reported(err, span) => {
@@ -231,10 +216,8 @@ pub enum InvalidProgramInfo<'tcx> {
}
/// Details of why a pointer had to be in-bounds.
-#[derive(Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)]
+#[derive(Debug, Copy, Clone)]
pub enum CheckInAllocMsg {
- /// We are dereferencing a pointer (i.e., creating a place).
- DerefTest,
/// We are access memory.
MemoryAccessTest,
/// We are doing pointer arithmetic.
@@ -245,7 +228,16 @@ pub enum CheckInAllocMsg {
InboundsTest,
}
-#[derive(Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)]
+/// Details of which pointer is not aligned.
+#[derive(Debug, Copy, Clone)]
+pub enum CheckAlignMsg {
+ /// The accessed pointer did not have proper alignment.
+ AccessedPtr,
+ /// The access ocurred with a place that was based on a misaligned pointer.
+ BasedOn,
+}
+
+#[derive(Debug, Copy, Clone)]
pub enum InvalidMetaKind {
/// Size of a `[T]` is too big
SliceTooBig,
@@ -278,6 +270,13 @@ pub struct ScalarSizeMismatch {
pub data_size: u64,
}
+/// Information about a misaligned pointer.
+#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
+pub struct Misalignment {
+ pub has: Align,
+ pub required: Align,
+}
+
macro_rules! impl_into_diagnostic_arg_through_debug {
($($ty:ty),*$(,)?) => {$(
impl IntoDiagnosticArg for $ty {
@@ -339,7 +338,7 @@ pub enum UndefinedBehaviorInfo<'tcx> {
/// Using an integer as a pointer in the wrong way.
DanglingIntPointer(u64, CheckInAllocMsg),
/// Used a pointer with bad alignment.
- AlignmentCheckFailed { required: Align, has: Align },
+ AlignmentCheckFailed(Misalignment, CheckAlignMsg),
/// Writing to read-only memory.
WriteToReadOnly(AllocId),
/// Trying to access the data behind a function pointer.
diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs
index d21f82f04..e360fb3ea 100644
--- a/compiler/rustc_middle/src/mir/interpret/mod.rs
+++ b/compiler/rustc_middle/src/mir/interpret/mod.rs
@@ -142,11 +142,12 @@ use crate::ty::GenericArgKind;
use crate::ty::{self, Instance, Ty, TyCtxt};
pub use self::error::{
- struct_error, BadBytesAccess, CheckInAllocMsg, ErrorHandled, EvalToAllocationRawResult,
- EvalToConstValueResult, EvalToValTreeResult, ExpectedKind, InterpError, InterpErrorInfo,
- InterpResult, InvalidMetaKind, InvalidProgramInfo, MachineStopType, PointerKind,
- ReportedErrorInfo, ResourceExhaustionInfo, ScalarSizeMismatch, UndefinedBehaviorInfo,
- UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind,
+ struct_error, BadBytesAccess, CheckAlignMsg, CheckInAllocMsg, ErrorHandled,
+ EvalToAllocationRawResult, EvalToConstValueResult, EvalToValTreeResult, ExpectedKind,
+ InterpError, InterpErrorInfo, InterpResult, InvalidMetaKind, InvalidProgramInfo,
+ MachineStopType, Misalignment, PointerKind, ReportedErrorInfo, ResourceExhaustionInfo,
+ ScalarSizeMismatch, UndefinedBehaviorInfo, UnsupportedOpInfo, ValidationErrorInfo,
+ ValidationErrorKind,
};
pub use self::value::Scalar;
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index 0bb1c66da..7054cede2 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -2,7 +2,7 @@
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
-use crate::mir::interpret::{AllocRange, ConstAllocation, ErrorHandled, Scalar};
+use crate::mir::interpret::{AllocRange, ConstAllocation, Scalar};
use crate::mir::visit::MirVisitable;
use crate::ty::codec::{TyDecoder, TyEncoder};
use crate::ty::fold::{FallibleTypeFolder, TypeFoldable};
@@ -17,7 +17,7 @@ use rustc_data_structures::captures::Captures;
use rustc_errors::{DiagnosticArgValue, DiagnosticMessage, ErrorGuaranteed, IntoDiagnosticArg};
use rustc_hir::def::{CtorKind, Namespace};
use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
-use rustc_hir::{self, GeneratorKind, ImplicitSelfKind};
+use rustc_hir::{self, CoroutineKind, ImplicitSelfKind};
use rustc_hir::{self as hir, HirId};
use rustc_session::Session;
use rustc_target::abi::{FieldIdx, VariantIdx};
@@ -246,19 +246,19 @@ impl<'tcx> MirSource<'tcx> {
}
#[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable, TypeVisitable)]
-pub struct GeneratorInfo<'tcx> {
- /// The yield type of the function, if it is a generator.
+pub struct CoroutineInfo<'tcx> {
+ /// The yield type of the function, if it is a coroutine.
pub yield_ty: Option<Ty<'tcx>>,
- /// Generator drop glue.
- pub generator_drop: Option<Body<'tcx>>,
+ /// Coroutine drop glue.
+ pub coroutine_drop: Option<Body<'tcx>>,
- /// The layout of a generator. Produced by the state transformation.
- pub generator_layout: Option<GeneratorLayout<'tcx>>,
+ /// The layout of a coroutine. Produced by the state transformation.
+ pub coroutine_layout: Option<CoroutineLayout<'tcx>>,
- /// If this is a generator then record the type of source expression that caused this generator
+ /// If this is a coroutine then record the type of source expression that caused this coroutine
/// to be created.
- pub generator_kind: GeneratorKind,
+ pub coroutine_kind: CoroutineKind,
}
/// The lowered representation of a single function.
@@ -284,7 +284,7 @@ pub struct Body<'tcx> {
/// and used for debuginfo. Indexed by a `SourceScope`.
pub source_scopes: IndexVec<SourceScope, SourceScopeData<'tcx>>,
- pub generator: Option<Box<GeneratorInfo<'tcx>>>,
+ pub coroutine: Option<Box<CoroutineInfo<'tcx>>>,
/// Declarations of locals.
///
@@ -345,6 +345,14 @@ pub struct Body<'tcx> {
pub injection_phase: Option<MirPhase>,
pub tainted_by_errors: Option<ErrorGuaranteed>,
+
+ /// Per-function coverage information added by the `InstrumentCoverage`
+ /// pass, to be used in conjunction with the coverage statements injected
+ /// into this body's blocks.
+ ///
+ /// If `-Cinstrument-coverage` is not active, or if an individual function
+ /// is not eligible for coverage, then this should always be `None`.
+ pub function_coverage_info: Option<Box<coverage::FunctionCoverageInfo>>,
}
impl<'tcx> Body<'tcx> {
@@ -357,7 +365,7 @@ impl<'tcx> Body<'tcx> {
arg_count: usize,
var_debug_info: Vec<VarDebugInfo<'tcx>>,
span: Span,
- generator_kind: Option<GeneratorKind>,
+ coroutine_kind: Option<CoroutineKind>,
tainted_by_errors: Option<ErrorGuaranteed>,
) -> Self {
// We need `arg_count` locals, and one for the return place.
@@ -374,12 +382,12 @@ impl<'tcx> Body<'tcx> {
source,
basic_blocks: BasicBlocks::new(basic_blocks),
source_scopes,
- generator: generator_kind.map(|generator_kind| {
- Box::new(GeneratorInfo {
+ coroutine: coroutine_kind.map(|coroutine_kind| {
+ Box::new(CoroutineInfo {
yield_ty: None,
- generator_drop: None,
- generator_layout: None,
- generator_kind,
+ coroutine_drop: None,
+ coroutine_layout: None,
+ coroutine_kind,
})
}),
local_decls,
@@ -392,6 +400,7 @@ impl<'tcx> Body<'tcx> {
is_polymorphic: false,
injection_phase: None,
tainted_by_errors,
+ function_coverage_info: None,
};
body.is_polymorphic = body.has_non_region_param();
body
@@ -409,7 +418,7 @@ impl<'tcx> Body<'tcx> {
source: MirSource::item(CRATE_DEF_ID.to_def_id()),
basic_blocks: BasicBlocks::new(basic_blocks),
source_scopes: IndexVec::new(),
- generator: None,
+ coroutine: None,
local_decls: IndexVec::new(),
user_type_annotations: IndexVec::new(),
arg_count: 0,
@@ -420,6 +429,7 @@ impl<'tcx> Body<'tcx> {
is_polymorphic: false,
injection_phase: None,
tainted_by_errors: None,
+ function_coverage_info: None,
};
body.is_polymorphic = body.has_non_region_param();
body
@@ -538,22 +548,22 @@ impl<'tcx> Body<'tcx> {
#[inline]
pub fn yield_ty(&self) -> Option<Ty<'tcx>> {
- self.generator.as_ref().and_then(|generator| generator.yield_ty)
+ self.coroutine.as_ref().and_then(|coroutine| coroutine.yield_ty)
}
#[inline]
- pub fn generator_layout(&self) -> Option<&GeneratorLayout<'tcx>> {
- self.generator.as_ref().and_then(|generator| generator.generator_layout.as_ref())
+ pub fn coroutine_layout(&self) -> Option<&CoroutineLayout<'tcx>> {
+ self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_layout.as_ref())
}
#[inline]
- pub fn generator_drop(&self) -> Option<&Body<'tcx>> {
- self.generator.as_ref().and_then(|generator| generator.generator_drop.as_ref())
+ pub fn coroutine_drop(&self) -> Option<&Body<'tcx>> {
+ self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_drop.as_ref())
}
#[inline]
- pub fn generator_kind(&self) -> Option<GeneratorKind> {
- self.generator.as_ref().map(|generator| generator.generator_kind)
+ pub fn coroutine_kind(&self) -> Option<CoroutineKind> {
+ self.coroutine.as_ref().map(|coroutine| coroutine.coroutine_kind)
}
#[inline]
@@ -569,32 +579,38 @@ impl<'tcx> Body<'tcx> {
self.injection_phase.is_some()
}
- /// *Must* be called once the full substitution for this body is known, to ensure that the body
- /// is indeed fit for code generation or consumption more generally.
- ///
- /// Sadly there's no nice way to represent an "arbitrary normalizer", so we take one for
- /// constants specifically. (`Option<GenericArgsRef>` could be used for that, but the fact
- /// that `Instance::args_for_mir_body` is private and instead instance exposes normalization
- /// functions makes it seem like exposing the generic args is not the intended strategy.)
- ///
- /// Also sadly, CTFE doesn't even know whether it runs on MIR that is already polymorphic or still monomorphic,
- /// so we cannot just immediately ICE on TooGeneric.
- ///
- /// Returns Ok(()) if everything went fine, and `Err` if a problem occurred and got reported.
- pub fn post_mono_checks(
+ /// For a `Location` in this scope, determine what the "caller location" at that point is. This
+ /// is interesting because of inlining: the `#[track_caller]` attribute of inlined functions
+ /// must be honored. Falls back to the `tracked_caller` value for `#[track_caller]` functions,
+ /// or the function's scope.
+ pub fn caller_location_span<T>(
&self,
+ mut source_info: SourceInfo,
+ caller_location: Option<T>,
tcx: TyCtxt<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- normalize_const: impl Fn(Const<'tcx>) -> Result<Const<'tcx>, ErrorHandled>,
- ) -> Result<(), ErrorHandled> {
- // For now, the only thing we have to check is is to ensure that all the constants used in
- // the body successfully evaluate.
- for &const_ in &self.required_consts {
- let c = normalize_const(const_.const_)?;
- c.eval(tcx, param_env, Some(const_.span))?;
+ from_span: impl FnOnce(Span) -> T,
+ ) -> T {
+ loop {
+ let scope_data = &self.source_scopes[source_info.scope];
+
+ if let Some((callee, callsite_span)) = scope_data.inlined {
+ // Stop inside the most nested non-`#[track_caller]` function,
+ // before ever reaching its caller (which is irrelevant).
+ if !callee.def.requires_caller_location(tcx) {
+ return from_span(source_info.span);
+ }
+ source_info.span = callsite_span;
+ }
+
+ // Skip past all of the parents with `inlined: None`.
+ match scope_data.inlined_parent_scope {
+ Some(parent) => source_info.scope = parent,
+ None => break,
+ }
}
- Ok(())
+ // No inlined `SourceScope`s, or all of them were `#[track_caller]`.
+ caller_location.unwrap_or_else(|| from_span(source_info.span))
}
}
@@ -830,22 +846,6 @@ pub struct LocalDecl<'tcx> {
// FIXME(matthewjasper) Don't store in this in `Body`
pub local_info: ClearCrossCrate<Box<LocalInfo<'tcx>>>,
- /// `true` if this is an internal local.
- ///
- /// These locals are not based on types in the source code and are only used
- /// for a few desugarings at the moment.
- ///
- /// The generator transformation will sanity check the locals which are live
- /// across a suspension point against the type components of the generator
- /// which type checking knows are live across a suspension point. We need to
- /// flag drop flags to avoid triggering this check as they are introduced
- /// outside of type inference.
- ///
- /// This should be sound because the drop flags are fully algebraic, and
- /// therefore don't affect the auto-trait or outlives properties of the
- /// generator.
- pub internal: bool,
-
/// The type of this local.
pub ty: Ty<'tcx>,
@@ -1058,7 +1058,7 @@ impl<'tcx> LocalDecl<'tcx> {
self.source_info.span.desugaring_kind().is_some()
}
- /// Creates a new `LocalDecl` for a temporary: mutable, non-internal.
+ /// Creates a new `LocalDecl` for a temporary, mutable.
#[inline]
pub fn new(ty: Ty<'tcx>, span: Span) -> Self {
Self::with_source_info(ty, SourceInfo::outermost(span))
@@ -1070,20 +1070,12 @@ impl<'tcx> LocalDecl<'tcx> {
LocalDecl {
mutability: Mutability::Mut,
local_info: ClearCrossCrate::Set(Box::new(LocalInfo::Boring)),
- internal: false,
ty,
user_ty: None,
source_info,
}
}
- /// Converts `self` into same `LocalDecl` except tagged as internal.
- #[inline]
- pub fn internal(mut self) -> Self {
- self.internal = true;
- self
- }
-
/// Converts `self` into same `LocalDecl` except tagged as immutable.
#[inline]
pub fn immutable(mut self) -> Self {
@@ -1614,6 +1606,23 @@ impl Location {
}
}
+/// `DefLocation` represents the location of a definition - either an argument or an assignment
+/// within MIR body.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum DefLocation {
+ Argument,
+ Body(Location),
+}
+
+impl DefLocation {
+ pub fn dominates(self, location: Location, dominators: &Dominators<BasicBlock>) -> bool {
+ match self {
+ DefLocation::Argument => true,
+ DefLocation::Body(def) => def.successor_within_block().dominates(location, dominators),
+ }
+ }
+}
+
// Some nodes are used a lot. Make sure they don't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
mod size_asserts {
diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs
index 403e80bd3..91fdf0b31 100644
--- a/compiler/rustc_middle/src/mir/mono.rs
+++ b/compiler/rustc_middle/src/mir/mono.rs
@@ -10,8 +10,8 @@ use rustc_hir::ItemId;
use rustc_index::Idx;
use rustc_query_system::ich::StableHashingContext;
use rustc_session::config::OptLevel;
-use rustc_span::source_map::Span;
use rustc_span::symbol::Symbol;
+use rustc_span::Span;
use std::fmt;
use std::hash::Hash;
diff --git a/compiler/rustc_middle/src/mir/patch.rs b/compiler/rustc_middle/src/mir/patch.rs
index da486c346..eb4aa9eb9 100644
--- a/compiler/rustc_middle/src/mir/patch.rs
+++ b/compiler/rustc_middle/src/mir/patch.rs
@@ -99,7 +99,9 @@ impl<'tcx> MirPatch<'tcx> {
}
pub fn terminate_block(&mut self, reason: UnwindTerminateReason) -> BasicBlock {
- if let Some((cached_bb, cached_reason)) = self.terminate_block && reason == cached_reason {
+ if let Some((cached_bb, cached_reason)) = self.terminate_block
+ && reason == cached_reason
+ {
return cached_bb;
}
@@ -127,7 +129,7 @@ impl<'tcx> MirPatch<'tcx> {
Location { block: bb, statement_index: offset }
}
- pub fn new_internal_with_info(
+ pub fn new_local_with_info(
&mut self,
ty: Ty<'tcx>,
span: Span,
@@ -135,7 +137,7 @@ impl<'tcx> MirPatch<'tcx> {
) -> Local {
let index = self.next_local;
self.next_local += 1;
- let mut new_decl = LocalDecl::new(ty, span).internal();
+ let mut new_decl = LocalDecl::new(ty, span);
**new_decl.local_info.as_mut().assert_crate_local() = local_info;
self.new_locals.push(new_decl);
Local::new(index)
@@ -148,13 +150,6 @@ impl<'tcx> MirPatch<'tcx> {
Local::new(index)
}
- pub fn new_internal(&mut self, ty: Ty<'tcx>, span: Span) -> Local {
- let index = self.next_local;
- self.next_local += 1;
- self.new_locals.push(LocalDecl::new(ty, span).internal());
- Local::new(index)
- }
-
pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
let block = BasicBlock::new(self.patch_map.len());
debug!("MirPatch: new_block: {:?}: {:?}", block, data);
diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs
index f032fd29d..a13248584 100644
--- a/compiler/rustc_middle/src/mir/pretty.rs
+++ b/compiler/rustc_middle/src/mir/pretty.rs
@@ -16,7 +16,7 @@ use rustc_middle::mir::interpret::{
Pointer, Provenance,
};
use rustc_middle::mir::visit::Visitor;
-use rustc_middle::mir::*;
+use rustc_middle::mir::{self, *};
use rustc_middle::ty::{self, TyCtxt};
use rustc_target::abi::Size;
@@ -130,8 +130,8 @@ fn dump_matched_mir_node<'tcx, F>(
Some(promoted) => write!(file, "::{promoted:?}`")?,
}
writeln!(file, " {disambiguator} {pass_name}")?;
- if let Some(ref layout) = body.generator_layout() {
- writeln!(file, "/* generator_layout = {layout:#?} */")?;
+ if let Some(ref layout) = body.coroutine_layout() {
+ writeln!(file, "/* coroutine_layout = {layout:#?} */")?;
}
writeln!(file)?;
extra_data(PassWhere::BeforeCFG, &mut file)?;
@@ -493,6 +493,27 @@ pub fn write_mir_intro<'tcx>(
// Add an empty line before the first block is printed.
writeln!(w)?;
+ if let Some(function_coverage_info) = &body.function_coverage_info {
+ write_function_coverage_info(function_coverage_info, w)?;
+ }
+
+ Ok(())
+}
+
+fn write_function_coverage_info(
+ function_coverage_info: &coverage::FunctionCoverageInfo,
+ w: &mut dyn io::Write,
+) -> io::Result<()> {
+ let coverage::FunctionCoverageInfo { expressions, mappings, .. } = function_coverage_info;
+
+ for (id, expression) in expressions.iter_enumerated() {
+ writeln!(w, "{INDENT}coverage {id:?} => {expression:?};")?;
+ }
+ for coverage::Mapping { term, code_region } in mappings {
+ writeln!(w, "{INDENT}coverage {term:?} => {code_region:?};")?;
+ }
+ writeln!(w)?;
+
Ok(())
}
@@ -685,10 +706,7 @@ impl Debug for Statement<'_> {
AscribeUserType(box (ref place, ref c_ty), ref variance) => {
write!(fmt, "AscribeUserType({place:?}, {variance:?}, {c_ty:?})")
}
- Coverage(box self::Coverage { ref kind, code_region: Some(ref rgn) }) => {
- write!(fmt, "Coverage::{kind:?} for {rgn:?}")
- }
- Coverage(box ref coverage) => write!(fmt, "Coverage::{:?}", coverage.kind),
+ Coverage(box mir::Coverage { ref kind }) => write!(fmt, "Coverage::{kind:?}"),
Intrinsic(box ref intrinsic) => write!(fmt, "{intrinsic}"),
ConstEvalCounter => write!(fmt, "ConstEvalCounter"),
Nop => write!(fmt, "nop"),
@@ -764,10 +782,10 @@ impl<'tcx> TerminatorKind<'tcx> {
Goto { .. } => write!(fmt, "goto"),
SwitchInt { discr, .. } => write!(fmt, "switchInt({discr:?})"),
Return => write!(fmt, "return"),
- GeneratorDrop => write!(fmt, "generator_drop"),
+ CoroutineDrop => write!(fmt, "coroutine_drop"),
UnwindResume => write!(fmt, "resume"),
UnwindTerminate(reason) => {
- write!(fmt, "abort({})", reason.as_short_str())
+ write!(fmt, "terminate({})", reason.as_short_str())
}
Yield { value, resume_arg, .. } => write!(fmt, "{resume_arg:?} = yield({value:?})"),
Unreachable => write!(fmt, "unreachable"),
@@ -847,7 +865,7 @@ impl<'tcx> TerminatorKind<'tcx> {
pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
use self::TerminatorKind::*;
match *self {
- Return | UnwindResume | UnwindTerminate(_) | Unreachable | GeneratorDrop => vec![],
+ Return | UnwindResume | UnwindTerminate(_) | Unreachable | CoroutineDrop => vec![],
Goto { .. } => vec!["".into()],
SwitchInt { ref targets, .. } => targets
.values
@@ -980,9 +998,9 @@ impl<'tcx> Debug for Rvalue<'tcx> {
ty::tls::with(|tcx| {
let variant_def = &tcx.adt_def(adt_did).variant(variant);
let args = tcx.lift(args).expect("could not lift for printing");
- let name = FmtPrinter::new(tcx, Namespace::ValueNS)
- .print_def_path(variant_def.def_id, args)?
- .into_buffer();
+ let name = FmtPrinter::print_string(tcx, Namespace::ValueNS, |cx| {
+ cx.print_def_path(variant_def.def_id, args)
+ })?;
match variant_def.ctor_kind() {
Some(CtorKind::Const) => fmt.write_str(&name),
@@ -1028,8 +1046,8 @@ impl<'tcx> Debug for Rvalue<'tcx> {
struct_fmt.finish()
}),
- AggregateKind::Generator(def_id, _, _) => ty::tls::with(|tcx| {
- let name = format!("{{generator@{:?}}}", tcx.def_span(def_id));
+ AggregateKind::Coroutine(def_id, _, _) => ty::tls::with(|tcx| {
+ let name = format!("{{coroutine@{:?}}}", tcx.def_span(def_id));
let mut struct_fmt = fmt.debug_struct(&name);
// FIXME(project-rfc-2229#48): This should be a list of capture names/places
@@ -1283,8 +1301,8 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> {
self.push(&format!("+ args: {args:#?}"));
}
- AggregateKind::Generator(def_id, args, movability) => {
- self.push("generator");
+ AggregateKind::Coroutine(def_id, args, movability) => {
+ self.push("coroutine");
self.push(&format!("+ def_id: {def_id:?}"));
self.push(&format!("+ args: {args:#?}"));
self.push(&format!("+ movability: {movability:?}"));
@@ -1695,7 +1713,7 @@ fn pretty_print_const_value_tcx<'tcx>(
(_, ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) if !ty.has_non_region_param() => {
let ct = tcx.lift(ct).unwrap();
let ty = tcx.lift(ty).unwrap();
- if let Some(contents) = tcx.try_destructure_mir_constant_for_diagnostics(ct, ty) {
+ if let Some(contents) = tcx.try_destructure_mir_constant_for_user_output(ct, ty) {
let fields: Vec<(ConstValue<'_>, Ty<'_>)> = contents.fields.to_vec();
match *ty.kind() {
ty::Array(..) => {
@@ -1722,7 +1740,7 @@ fn pretty_print_const_value_tcx<'tcx>(
let args = tcx.lift(args).unwrap();
let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS);
cx.print_alloc_ids = true;
- let cx = cx.print_value_path(variant_def.def_id, args)?;
+ cx.print_value_path(variant_def.def_id, args)?;
fmt.write_str(&cx.into_buffer())?;
match variant_def.ctor_kind() {
@@ -1757,14 +1775,14 @@ fn pretty_print_const_value_tcx<'tcx>(
let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS);
cx.print_alloc_ids = true;
let ty = tcx.lift(ty).unwrap();
- cx = cx.pretty_print_const_scalar(scalar, ty)?;
+ cx.pretty_print_const_scalar(scalar, ty)?;
fmt.write_str(&cx.into_buffer())?;
return Ok(());
}
(ConstValue::ZeroSized, ty::FnDef(d, s)) => {
let mut cx = FmtPrinter::new(tcx, Namespace::ValueNS);
cx.print_alloc_ids = true;
- let cx = cx.print_value_path(*d, s)?;
+ cx.print_value_path(*d, s)?;
fmt.write_str(&cx.into_buffer())?;
return Ok(());
}
diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs
index c74a9536b..0540eb0ef 100644
--- a/compiler/rustc_middle/src/mir/query.rs
+++ b/compiler/rustc_middle/src/mir/query.rs
@@ -1,5 +1,6 @@
//! Values computed by queries that use MIR.
+use crate::mir;
use crate::ty::{self, OpaqueHiddenType, Ty, TyCtxt};
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::unord::UnordSet;
@@ -132,11 +133,11 @@ pub struct UnsafetyCheckResult {
rustc_index::newtype_index! {
#[derive(HashStable)]
#[debug_format = "_{}"]
- pub struct GeneratorSavedLocal {}
+ pub struct CoroutineSavedLocal {}
}
#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
-pub struct GeneratorSavedTy<'tcx> {
+pub struct CoroutineSavedTy<'tcx> {
pub ty: Ty<'tcx>,
/// Source info corresponding to the local in the original MIR body.
pub source_info: SourceInfo,
@@ -144,18 +145,18 @@ pub struct GeneratorSavedTy<'tcx> {
pub ignore_for_traits: bool,
}
-/// The layout of generator state.
+/// The layout of coroutine state.
#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
-pub struct GeneratorLayout<'tcx> {
- /// The type of every local stored inside the generator.
- pub field_tys: IndexVec<GeneratorSavedLocal, GeneratorSavedTy<'tcx>>,
+pub struct CoroutineLayout<'tcx> {
+ /// The type of every local stored inside the coroutine.
+ pub field_tys: IndexVec<CoroutineSavedLocal, CoroutineSavedTy<'tcx>>,
/// The name for debuginfo.
- pub field_names: IndexVec<GeneratorSavedLocal, Option<Symbol>>,
+ pub field_names: IndexVec<CoroutineSavedLocal, Option<Symbol>>,
/// Which of the above fields are in each variant. Note that one field may
/// be stored in multiple variants.
- pub variant_fields: IndexVec<VariantIdx, IndexVec<FieldIdx, GeneratorSavedLocal>>,
+ pub variant_fields: IndexVec<VariantIdx, IndexVec<FieldIdx, CoroutineSavedLocal>>,
/// The source that led to each variant being created (usually, a yield or
/// await).
@@ -166,10 +167,10 @@ pub struct GeneratorLayout<'tcx> {
/// layout.
#[type_foldable(identity)]
#[type_visitable(ignore)]
- pub storage_conflicts: BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>,
+ pub storage_conflicts: BitMatrix<CoroutineSavedLocal, CoroutineSavedLocal>,
}
-impl Debug for GeneratorLayout<'_> {
+impl Debug for CoroutineLayout<'_> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
/// Prints an iterator of (key, value) tuples as a map.
struct MapPrinter<'a, K, V>(Cell<Option<Box<dyn Iterator<Item = (K, V)> + 'a>>>);
@@ -184,7 +185,7 @@ impl Debug for GeneratorLayout<'_> {
}
}
- /// Prints the generator variant name.
+ /// Prints the coroutine variant name.
struct GenVariantPrinter(VariantIdx);
impl From<VariantIdx> for GenVariantPrinter {
fn from(idx: VariantIdx) -> Self {
@@ -193,7 +194,7 @@ impl Debug for GeneratorLayout<'_> {
}
impl Debug for GenVariantPrinter {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
- let variant_name = ty::GeneratorArgs::variant_name(self.0);
+ let variant_name = ty::CoroutineArgs::variant_name(self.0);
if fmt.alternate() {
write!(fmt, "{:9}({:?})", variant_name, self.0)
} else {
@@ -210,7 +211,7 @@ impl Debug for GeneratorLayout<'_> {
}
}
- fmt.debug_struct("GeneratorLayout")
+ fmt.debug_struct("CoroutineLayout")
.field("field_tys", &MapPrinter::new(self.field_tys.iter_enumerated()))
.field(
"variant_fields",
@@ -258,7 +259,7 @@ pub struct ConstQualifs {
///
/// The requirements are listed as being between various `RegionVid`. The 0th
/// region refers to `'static`; subsequent region vids refer to the free
-/// regions that appear in the closure (or generator's) type, in order of
+/// regions that appear in the closure (or coroutine's) type, in order of
/// appearance. (This numbering is actually defined by the `UniversalRegions`
/// struct in the NLL region checker. See for example
/// `UniversalRegions::closure_mapping`.) Note the free regions in the
@@ -445,14 +446,19 @@ pub struct DestructuredConstant<'tcx> {
pub fields: &'tcx [(ConstValue<'tcx>, Ty<'tcx>)],
}
-/// Coverage information summarized from a MIR if instrumented for source code coverage (see
-/// compiler option `-Cinstrument-coverage`). This information is generated by the
-/// `InstrumentCoverage` MIR pass and can be retrieved via the `coverageinfo` query.
+/// Summarizes coverage IDs inserted by the `InstrumentCoverage` MIR pass
+/// (for compiler option `-Cinstrument-coverage`), after MIR optimizations
+/// have had a chance to potentially remove some of them.
+///
+/// Used by the `coverage_ids_info` query.
#[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable)]
-pub struct CoverageInfo {
- /// The total number of coverage region counters added to the MIR `Body`.
- pub num_counters: u32,
-
- /// The total number of coverage region counter expressions added to the MIR `Body`.
- pub num_expressions: u32,
+pub struct CoverageIdsInfo {
+ /// Coverage codegen needs to know the highest counter ID that is ever
+ /// incremented within a function, so that it can set the `num-counters`
+ /// argument of the `llvm.instrprof.increment` intrinsic.
+ ///
+ /// This may be less than the highest counter ID emitted by the
+ /// InstrumentCoverage MIR pass, if the highest-numbered counter increments
+ /// were removed by MIR optimizations.
+ pub max_counter_id: mir::coverage::CounterId,
}
diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs
index 0b95fdfa1..7b0f27f9b 100644
--- a/compiler/rustc_middle/src/mir/syntax.rs
+++ b/compiler/rustc_middle/src/mir/syntax.rs
@@ -5,7 +5,7 @@
use super::{BasicBlock, Const, Local, UserTypeProjection};
-use crate::mir::coverage::{CodeRegion, CoverageKind};
+use crate::mir::coverage::CoverageKind;
use crate::traits::Reveal;
use crate::ty::adjustment::PointerCoercion;
use crate::ty::GenericArgsRef;
@@ -15,7 +15,7 @@ use crate::ty::{Region, UserTypeAnnotationIndex};
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_hir::def_id::DefId;
use rustc_hir::{self as hir};
-use rustc_hir::{self, GeneratorKind};
+use rustc_hir::{self, CoroutineKind};
use rustc_index::IndexVec;
use rustc_target::abi::{FieldIdx, VariantIdx};
@@ -82,10 +82,10 @@ pub enum MirPhase {
/// that Rust itself has them. Where exactly these are is generally subject to change, and so we
/// don't document this here. Runtime MIR has most retags explicit (though implicit retags
/// can still occur at `Rvalue::{Ref,AddrOf}`).
- /// - Generator bodies: In analysis MIR, locals may actually be behind a pointer that user code has
- /// access to. This occurs in generator bodies. Such locals do not behave like other locals,
+ /// - Coroutine bodies: In analysis MIR, locals may actually be behind a pointer that user code has
+ /// access to. This occurs in coroutine bodies. Such locals do not behave like other locals,
/// because they eg may be aliased in surprising ways. Runtime MIR has no such special locals -
- /// all generator bodies are lowered and so all places that look like locals really are locals.
+ /// all coroutine bodies are lowered and so all places that look like locals really are locals.
///
/// Also note that the lint pass which reports eg `200_u8 + 200_u8` as an error is run as a part
/// of analysis to runtime MIR lowering. To ensure lints are reported reliably, this means that
@@ -137,7 +137,7 @@ pub enum RuntimePhase {
/// In addition to the semantic changes, beginning with this phase, the following variants are
/// disallowed:
/// * [`TerminatorKind::Yield`]
- /// * [`TerminatorKind::GeneratorDrop`]
+ /// * [`TerminatorKind::CoroutineDrop`]
/// * [`Rvalue::Aggregate`] for any `AggregateKind` except `Array`
/// * [`PlaceElem::OpaqueCast`]
///
@@ -292,7 +292,7 @@ pub enum StatementKind<'tcx> {
/// Write the discriminant for a variant to the enum Place.
///
- /// This is permitted for both generators and ADTs. This does not necessarily write to the
+ /// This is permitted for both coroutines and ADTs. This does not necessarily write to the
/// entire place; instead, it writes to the minimum set of bytes as required by the layout for
/// the type.
SetDiscriminant { place: Box<Place<'tcx>>, variant_index: VariantIdx },
@@ -361,11 +361,16 @@ pub enum StatementKind<'tcx> {
/// Disallowed after drop elaboration.
AscribeUserType(Box<(Place<'tcx>, UserTypeProjection)>, ty::Variance),
- /// Marks the start of a "coverage region", injected with '-Cinstrument-coverage'. A
- /// `Coverage` statement carries metadata about the coverage region, used to inject a coverage
- /// map into the binary. If `Coverage::kind` is a `Counter`, the statement also generates
- /// executable code, to increment a counter variable at runtime, each time the code region is
- /// executed.
+ /// Carries control-flow-sensitive information injected by `-Cinstrument-coverage`,
+ /// such as where to generate physical coverage-counter-increments during codegen.
+ ///
+ /// Coverage statements are used in conjunction with the coverage mappings and other
+ /// information stored in the function's
+ /// [`mir::Body::function_coverage_info`](crate::mir::Body::function_coverage_info).
+ /// (For inlined MIR, take care to look up the *original function's* coverage info.)
+ ///
+ /// Interpreters and codegen backends that don't support coverage instrumentation
+ /// can usually treat this as a no-op.
Coverage(Box<Coverage>),
/// Denotes a call to an intrinsic that does not require an unwind path and always returns.
@@ -514,7 +519,6 @@ pub enum FakeReadCause {
#[derive(TypeFoldable, TypeVisitable)]
pub struct Coverage {
pub kind: CoverageKind,
- pub code_region: Option<CodeRegion>,
}
#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, Hash, HashStable)]
@@ -622,8 +626,8 @@ pub enum TerminatorKind<'tcx> {
/// `dest = move _0`. It might additionally do other things, like have side-effects in the
/// aliasing model.
///
- /// If the body is a generator body, this has slightly different semantics; it instead causes a
- /// `GeneratorState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
+ /// If the body is a coroutine body, this has slightly different semantics; it instead causes a
+ /// `CoroutineState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
/// to the return place.
Return,
@@ -705,14 +709,14 @@ pub enum TerminatorKind<'tcx> {
/// Marks a suspend point.
///
- /// Like `Return` terminators in generator bodies, this computes `value` and then a
- /// `GeneratorState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
+ /// Like `Return` terminators in coroutine bodies, this computes `value` and then a
+ /// `CoroutineState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
/// the return place of the function calling this one, and execution continues in the calling
/// function. When next invoked with the same first argument, execution of this function
/// continues at the `resume` basic block, with the second argument written to the `resume_arg`
- /// place. If the generator is dropped before then, the `drop` basic block is invoked.
+ /// place. If the coroutine is dropped before then, the `drop` basic block is invoked.
///
- /// Not permitted in bodies that are not generator bodies, or after generator lowering.
+ /// Not permitted in bodies that are not coroutine bodies, or after coroutine lowering.
///
/// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
Yield {
@@ -722,21 +726,21 @@ pub enum TerminatorKind<'tcx> {
resume: BasicBlock,
/// The place to store the resume argument in.
resume_arg: Place<'tcx>,
- /// Cleanup to be done if the generator is dropped at this suspend point.
+ /// Cleanup to be done if the coroutine is dropped at this suspend point.
drop: Option<BasicBlock>,
},
- /// Indicates the end of dropping a generator.
+ /// Indicates the end of dropping a coroutine.
///
- /// Semantically just a `return` (from the generators drop glue). Only permitted in the same situations
+ /// Semantically just a `return` (from the coroutines drop glue). Only permitted in the same situations
/// as `yield`.
///
- /// **Needs clarification**: Is that even correct? The generator drop code is always confusing
+ /// **Needs clarification**: Is that even correct? The coroutine drop code is always confusing
/// to me, because it's not even really in the current body.
///
/// **Needs clarification**: Are there type system constraints on these terminators? Should
/// there be a "block type" like `cleanup` blocks for them?
- GeneratorDrop,
+ CoroutineDrop,
/// A block where control flow only ever takes one real path, but borrowck needs to be more
/// conservative.
@@ -811,7 +815,7 @@ impl TerminatorKind<'_> {
TerminatorKind::Call { .. } => "Call",
TerminatorKind::Assert { .. } => "Assert",
TerminatorKind::Yield { .. } => "Yield",
- TerminatorKind::GeneratorDrop => "GeneratorDrop",
+ TerminatorKind::CoroutineDrop => "CoroutineDrop",
TerminatorKind::FalseEdge { .. } => "FalseEdge",
TerminatorKind::FalseUnwind { .. } => "FalseUnwind",
TerminatorKind::InlineAsm { .. } => "InlineAsm",
@@ -879,8 +883,8 @@ pub enum AssertKind<O> {
OverflowNeg(O),
DivisionByZero(O),
RemainderByZero(O),
- ResumedAfterReturn(GeneratorKind),
- ResumedAfterPanic(GeneratorKind),
+ ResumedAfterReturn(CoroutineKind),
+ ResumedAfterPanic(CoroutineKind),
MisalignedPointerDereference { required: O, found: O },
}
@@ -957,8 +961,8 @@ pub type AssertMessage<'tcx> = AssertKind<Operand<'tcx>>;
/// was unsized and so had metadata associated with it, then the metadata is retained if the
/// field is unsized and thrown out if it is sized.
///
-/// These projections are only legal for tuples, ADTs, closures, and generators. If the ADT or
-/// generator has more than one variant, the parent place's variant index must be set, indicating
+/// These projections are only legal for tuples, ADTs, closures, and coroutines. If the ADT or
+/// coroutine has more than one variant, the parent place's variant index must be set, indicating
/// which variant is being used. If it has just one variant, the variant index may or may not be
/// included - the single possible variant is inferred if it is not included.
/// - [`OpaqueCast`](ProjectionElem::OpaqueCast): This projection changes the place's type to the
@@ -986,18 +990,15 @@ pub type AssertMessage<'tcx> = AssertKind<Operand<'tcx>>;
/// pointee's type. The resulting address is the address that was stored in the pointer. If the
/// pointee type is unsized, the pointer additionally stored the value of the metadata.
///
-/// Computing a place may cause UB. One possibility is that the pointer used for a `Deref` may not
-/// be suitably aligned. Another possibility is that the place is not in bounds, meaning it does not
-/// point to an actual allocation.
-///
-/// However, if this is actually UB and when the UB kicks in is undecided. This is being discussed
-/// in [UCG#319]. The options include that every place must obey those rules, that only some places
-/// must obey them, or that places impose no rules of their own.
-///
-/// [UCG#319]: https://github.com/rust-lang/unsafe-code-guidelines/issues/319
-///
-/// Rust currently requires that every place obey those two rules. This is checked by MIRI and taken
-/// advantage of by codegen (via `gep inbounds`). That is possibly subject to change.
+/// The "validity invariant" of places is the same as that of raw pointers, meaning that e.g.
+/// `*ptr` on a dangling or unaligned pointer is never UB. (Later doing a load/store on that place
+/// or turning it into a reference can be UB though!) The only ways for a place computation can
+/// cause UB are:
+/// - On a `Deref` projection, we do an actual load of the inner place, with all the usual
+/// consequences (the inner place must be based on an aligned pointer, it must point to allocated
+/// memory, the aliasig model must allow reads, this must not be a data race).
+/// - For the projections that perform pointer arithmetic, the offset must in-bounds of an
+/// allocation (i.e., the preconditions of `ptr::offset` must be met).
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, HashStable, TypeFoldable, TypeVisitable)]
pub struct Place<'tcx> {
pub local: Local,
@@ -1067,7 +1068,7 @@ pub enum ProjectionElem<V, T> {
from_end: bool,
},
- /// "Downcast" to a variant of an enum or a generator.
+ /// "Downcast" to a variant of an enum or a coroutine.
///
/// The included Symbol is the name of the variant, used for printing MIR.
Downcast(Option<Symbol>, VariantIdx),
@@ -1277,8 +1278,8 @@ pub enum Rvalue<'tcx> {
/// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
/// has a destructor.
///
- /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After
- /// generator lowering, `Generator` aggregate kinds are disallowed too.
+ /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Coroutine`. After
+ /// coroutine lowering, `Coroutine` aggregate kinds are disallowed too.
Aggregate(Box<AggregateKind<'tcx>>, IndexVec<FieldIdx, Operand<'tcx>>),
/// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
@@ -1343,7 +1344,7 @@ pub enum AggregateKind<'tcx> {
Adt(DefId, VariantIdx, GenericArgsRef<'tcx>, Option<UserTypeAnnotationIndex>, Option<FieldIdx>),
Closure(DefId, GenericArgsRef<'tcx>),
- Generator(DefId, GenericArgsRef<'tcx>, hir::Movability),
+ Coroutine(DefId, GenericArgsRef<'tcx>, hir::Movability),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
@@ -1353,7 +1354,7 @@ pub enum NullOp<'tcx> {
/// Returns the minimum alignment of a type
AlignOf,
/// Returns the offset of a field
- OffsetOf(&'tcx List<FieldIdx>),
+ OffsetOf(&'tcx List<(VariantIdx, FieldIdx)>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs
index 7df25fc5c..6ab2da23a 100644
--- a/compiler/rustc_middle/src/mir/tcx.rs
+++ b/compiler/rustc_middle/src/mir/tcx.rs
@@ -11,7 +11,7 @@ use rustc_target::abi::{FieldIdx, VariantIdx};
#[derive(Copy, Clone, Debug, TypeFoldable, TypeVisitable)]
pub struct PlaceTy<'tcx> {
pub ty: Ty<'tcx>,
- /// Downcast to a particular variant of an enum or a generator, if included.
+ /// Downcast to a particular variant of an enum or a coroutine, if included.
pub variant_index: Option<VariantIdx>,
}
@@ -205,8 +205,8 @@ impl<'tcx> Rvalue<'tcx> {
}
AggregateKind::Adt(did, _, args, _, _) => tcx.type_of(did).instantiate(tcx, args),
AggregateKind::Closure(did, args) => Ty::new_closure(tcx, did, args),
- AggregateKind::Generator(did, args, movability) => {
- Ty::new_generator(tcx, did, args, movability)
+ AggregateKind::Coroutine(did, args, movability) => {
+ Ty::new_coroutine(tcx, did, args, movability)
}
},
Rvalue::ShallowInitBox(_, ty) => Ty::new_box(tcx, ty),
diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs
index 02aab4a89..9dfbe1733 100644
--- a/compiler/rustc_middle/src/mir/terminator.rs
+++ b/compiler/rustc_middle/src/mir/terminator.rs
@@ -3,12 +3,10 @@ use rustc_hir::LangItem;
use smallvec::SmallVec;
use super::{BasicBlock, InlineAsmOperand, Operand, SourceInfo, TerminatorKind, UnwindAction};
-pub use rustc_ast::Mutability;
use rustc_macros::HashStable;
use std::iter;
use std::slice;
-pub use super::query::*;
use super::*;
impl SwitchTargets {
@@ -28,6 +26,15 @@ impl SwitchTargets {
Self { values: smallvec![value], targets: smallvec![then, else_] }
}
+ /// Inverse of `SwitchTargets::static_if`.
+ pub fn as_static_if(&self) -> Option<(u128, BasicBlock, BasicBlock)> {
+ if let &[value] = &self.values[..] && let &[then, else_] = &self.targets[..] {
+ Some((value, then, else_))
+ } else {
+ None
+ }
+ }
+
/// Returns the fallback target that is jumped to when none of the values match the operand.
pub fn otherwise(&self) -> BasicBlock {
*self.targets.last().unwrap()
@@ -139,10 +146,17 @@ impl<O> AssertKind<O> {
Overflow(op, _, _) => bug!("{:?} cannot overflow", op),
DivisionByZero(_) => "attempt to divide by zero",
RemainderByZero(_) => "attempt to calculate the remainder with a divisor of zero",
- ResumedAfterReturn(GeneratorKind::Gen) => "generator resumed after completion",
- ResumedAfterReturn(GeneratorKind::Async(_)) => "`async fn` resumed after completion",
- ResumedAfterPanic(GeneratorKind::Gen) => "generator resumed after panicking",
- ResumedAfterPanic(GeneratorKind::Async(_)) => "`async fn` resumed after panicking",
+ ResumedAfterReturn(CoroutineKind::Coroutine) => "coroutine resumed after completion",
+ ResumedAfterReturn(CoroutineKind::Async(_)) => "`async fn` resumed after completion",
+ ResumedAfterReturn(CoroutineKind::Gen(_)) => {
+ "`gen fn` should just keep returning `None` after completion"
+ }
+ ResumedAfterPanic(CoroutineKind::Coroutine) => "coroutine resumed after panicking",
+ ResumedAfterPanic(CoroutineKind::Async(_)) => "`async fn` resumed after panicking",
+ ResumedAfterPanic(CoroutineKind::Gen(_)) => {
+ "`gen fn` should just keep returning `None` after panicking"
+ }
+
BoundsCheck { .. } | MisalignedPointerDereference { .. } => {
bug!("Unexpected AssertKind")
}
@@ -228,10 +242,18 @@ impl<O> AssertKind<O> {
OverflowNeg(_) => middle_assert_overflow_neg,
DivisionByZero(_) => middle_assert_divide_by_zero,
RemainderByZero(_) => middle_assert_remainder_by_zero,
- ResumedAfterReturn(GeneratorKind::Async(_)) => middle_assert_async_resume_after_return,
- ResumedAfterReturn(GeneratorKind::Gen) => middle_assert_generator_resume_after_return,
- ResumedAfterPanic(GeneratorKind::Async(_)) => middle_assert_async_resume_after_panic,
- ResumedAfterPanic(GeneratorKind::Gen) => middle_assert_generator_resume_after_panic,
+ ResumedAfterReturn(CoroutineKind::Async(_)) => middle_assert_async_resume_after_return,
+ ResumedAfterReturn(CoroutineKind::Gen(_)) => {
+ bug!("gen blocks can be resumed after they return and will keep returning `None`")
+ }
+ ResumedAfterReturn(CoroutineKind::Coroutine) => {
+ middle_assert_coroutine_resume_after_return
+ }
+ ResumedAfterPanic(CoroutineKind::Async(_)) => middle_assert_async_resume_after_panic,
+ ResumedAfterPanic(CoroutineKind::Gen(_)) => middle_assert_gen_resume_after_panic,
+ ResumedAfterPanic(CoroutineKind::Coroutine) => {
+ middle_assert_coroutine_resume_after_panic
+ }
MisalignedPointerDereference { .. } => middle_assert_misaligned_ptr_deref,
}
@@ -331,7 +353,7 @@ impl<'tcx> TerminatorKind<'tcx> {
}
UnwindResume
| UnwindTerminate(_)
- | GeneratorDrop
+ | CoroutineDrop
| Return
| Unreachable
| Call { target: None, unwind: _, .. }
@@ -373,7 +395,7 @@ impl<'tcx> TerminatorKind<'tcx> {
}
UnwindResume
| UnwindTerminate(_)
- | GeneratorDrop
+ | CoroutineDrop
| Return
| Unreachable
| Call { target: None, unwind: _, .. }
@@ -392,7 +414,7 @@ impl<'tcx> TerminatorKind<'tcx> {
| TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
- | TerminatorKind::GeneratorDrop
+ | TerminatorKind::CoroutineDrop
| TerminatorKind::Yield { .. }
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::FalseEdge { .. } => None,
@@ -411,7 +433,7 @@ impl<'tcx> TerminatorKind<'tcx> {
| TerminatorKind::UnwindTerminate(_)
| TerminatorKind::Return
| TerminatorKind::Unreachable
- | TerminatorKind::GeneratorDrop
+ | TerminatorKind::CoroutineDrop
| TerminatorKind::Yield { .. }
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::FalseEdge { .. } => None,
@@ -493,7 +515,7 @@ impl<'tcx> TerminatorKind<'tcx> {
pub fn edges(&self) -> TerminatorEdges<'_, 'tcx> {
use TerminatorKind::*;
match *self {
- Return | UnwindResume | UnwindTerminate(_) | GeneratorDrop | Unreachable => {
+ Return | UnwindResume | UnwindTerminate(_) | CoroutineDrop | Unreachable => {
TerminatorEdges::None
}
diff --git a/compiler/rustc_middle/src/mir/type_foldable.rs b/compiler/rustc_middle/src/mir/type_foldable.rs
index 8d427fdb6..d5c81b6cd 100644
--- a/compiler/rustc_middle/src/mir/type_foldable.rs
+++ b/compiler/rustc_middle/src/mir/type_foldable.rs
@@ -19,8 +19,8 @@ TrivialTypeTraversalImpls! {
hir::Movability,
BasicBlock,
SwitchTargets,
- GeneratorKind,
- GeneratorSavedLocal,
+ CoroutineKind,
+ CoroutineSavedLocal,
}
TrivialTypeTraversalImpls! {
diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs
index f2745b32c..d47cfd571 100644
--- a/compiler/rustc_middle/src/mir/visit.rs
+++ b/compiler/rustc_middle/src/mir/visit.rs
@@ -184,6 +184,8 @@ macro_rules! make_mir_visitor {
visit_place_fns!($($mutability)?);
+ /// This is called for every constant in the MIR body and every `required_consts`
+ /// (i.e., including consts that have been dead-code-eliminated).
fn visit_constant(
&mut self,
constant: & $($mutability)? ConstOperand<'tcx>,
@@ -471,7 +473,7 @@ macro_rules! make_mir_visitor {
TerminatorKind::Goto { .. } |
TerminatorKind::UnwindResume |
TerminatorKind::UnwindTerminate(_) |
- TerminatorKind::GeneratorDrop |
+ TerminatorKind::CoroutineDrop |
TerminatorKind::Unreachable |
TerminatorKind::FalseEdge { .. } |
TerminatorKind::FalseUnwind { .. } => {}
@@ -733,12 +735,12 @@ macro_rules! make_mir_visitor {
) => {
self.visit_args(closure_args, location);
}
- AggregateKind::Generator(
+ AggregateKind::Coroutine(
_,
- generator_args,
+ coroutine_args,
_movability,
) => {
- self.visit_args(generator_args, location);
+ self.visit_args(coroutine_args, location);
}
}
@@ -815,7 +817,6 @@ macro_rules! make_mir_visitor {
ty,
user_ty,
source_info,
- internal: _,
local_info: _,
} = local_decl;
@@ -991,7 +992,7 @@ macro_rules! extra_body_methods {
macro_rules! super_body {
($self:ident, $body:ident, $($mutability:ident, $invalidate:tt)?) => {
let span = $body.span;
- if let Some(gen) = &$($mutability)? $body.generator {
+ if let Some(gen) = &$($mutability)? $body.coroutine {
if let Some(yield_ty) = $(& $mutability)? gen.yield_ty {
$self.visit_ty(
yield_ty,