summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_mir_build/src/thir
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_build/src/thir')
-rw-r--r--compiler/rustc_mir_build/src/thir/constant.rs52
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/block.rs126
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/expr.rs1117
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/mod.rs101
-rw-r--r--compiler/rustc_mir_build/src/thir/mod.rs13
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/check_match.rs1162
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs601
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs1711
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/mod.rs802
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/usefulness.rs978
-rw-r--r--compiler/rustc_mir_build/src/thir/util.rs31
11 files changed, 6694 insertions, 0 deletions
diff --git a/compiler/rustc_mir_build/src/thir/constant.rs b/compiler/rustc_mir_build/src/thir/constant.rs
new file mode 100644
index 000000000..a7e4403a2
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/constant.rs
@@ -0,0 +1,52 @@
+use rustc_ast as ast;
+use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
+use rustc_middle::ty::{self, ParamEnv, ScalarInt, TyCtxt};
+
+pub(crate) fn lit_to_const<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ lit_input: LitToConstInput<'tcx>,
+) -> Result<ty::Const<'tcx>, LitToConstError> {
+ let LitToConstInput { lit, ty, neg } = lit_input;
+
+ let trunc = |n| {
+ let param_ty = ParamEnv::reveal_all().and(ty);
+ let width = tcx.layout_of(param_ty).map_err(|_| LitToConstError::Reported)?.size;
+ trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
+ let result = width.truncate(n);
+ trace!("trunc result: {}", result);
+
+ Ok(ScalarInt::try_from_uint(result, width)
+ .unwrap_or_else(|| bug!("expected to create ScalarInt from uint {:?}", result)))
+ };
+
+ let valtree = match (lit, &ty.kind()) {
+ (ast::LitKind::Str(s, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_str() => {
+ let str_bytes = s.as_str().as_bytes();
+ ty::ValTree::from_raw_bytes(tcx, str_bytes)
+ }
+ (ast::LitKind::ByteStr(data), ty::Ref(_, inner_ty, _))
+ if matches!(inner_ty.kind(), ty::Slice(_)) =>
+ {
+ let bytes = data as &[u8];
+ ty::ValTree::from_raw_bytes(tcx, bytes)
+ }
+ (ast::LitKind::ByteStr(data), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => {
+ let bytes = data as &[u8];
+ ty::ValTree::from_raw_bytes(tcx, bytes)
+ }
+ (ast::LitKind::Byte(n), ty::Uint(ty::UintTy::U8)) => {
+ ty::ValTree::from_scalar_int((*n).into())
+ }
+ (ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => {
+ let scalar_int =
+ trunc(if neg { (*n as i128).overflowing_neg().0 as u128 } else { *n })?;
+ ty::ValTree::from_scalar_int(scalar_int)
+ }
+ (ast::LitKind::Bool(b), ty::Bool) => ty::ValTree::from_scalar_int((*b).into()),
+ (ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()),
+ (ast::LitKind::Err(_), _) => return Err(LitToConstError::Reported),
+ _ => return Err(LitToConstError::TypeError),
+ };
+
+ Ok(ty::Const::from_value(tcx, valtree, ty))
+}
diff --git a/compiler/rustc_mir_build/src/thir/cx/block.rs b/compiler/rustc_mir_build/src/thir/cx/block.rs
new file mode 100644
index 000000000..dccaa61ed
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/cx/block.rs
@@ -0,0 +1,126 @@
+use crate::thir::cx::Cx;
+
+use rustc_hir as hir;
+use rustc_middle::middle::region;
+use rustc_middle::thir::*;
+use rustc_middle::ty;
+
+use rustc_index::vec::Idx;
+use rustc_middle::ty::CanonicalUserTypeAnnotation;
+
+impl<'tcx> Cx<'tcx> {
+ pub(crate) fn mirror_block(&mut self, block: &'tcx hir::Block<'tcx>) -> Block {
+ // We have to eagerly lower the "spine" of the statements
+ // in order to get the lexical scoping correctly.
+ let stmts = self.mirror_stmts(block.hir_id.local_id, block.stmts);
+ let opt_destruction_scope =
+ self.region_scope_tree.opt_destruction_scope(block.hir_id.local_id);
+ Block {
+ targeted_by_break: block.targeted_by_break,
+ region_scope: region::Scope {
+ id: block.hir_id.local_id,
+ data: region::ScopeData::Node,
+ },
+ opt_destruction_scope,
+ span: block.span,
+ stmts,
+ expr: block.expr.map(|expr| self.mirror_expr(expr)),
+ safety_mode: match block.rules {
+ hir::BlockCheckMode::DefaultBlock => BlockSafety::Safe,
+ hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated) => {
+ BlockSafety::BuiltinUnsafe
+ }
+ hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::UserProvided) => {
+ BlockSafety::ExplicitUnsafe(block.hir_id)
+ }
+ },
+ }
+ }
+
+ fn mirror_stmts(
+ &mut self,
+ block_id: hir::ItemLocalId,
+ stmts: &'tcx [hir::Stmt<'tcx>],
+ ) -> Box<[StmtId]> {
+ stmts
+ .iter()
+ .enumerate()
+ .filter_map(|(index, stmt)| {
+ let hir_id = stmt.hir_id;
+ let opt_dxn_ext = self.region_scope_tree.opt_destruction_scope(hir_id.local_id);
+ match stmt.kind {
+ hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
+ let stmt = Stmt {
+ kind: StmtKind::Expr {
+ scope: region::Scope {
+ id: hir_id.local_id,
+ data: region::ScopeData::Node,
+ },
+ expr: self.mirror_expr(expr),
+ },
+ opt_destruction_scope: opt_dxn_ext,
+ };
+ Some(self.thir.stmts.push(stmt))
+ }
+ hir::StmtKind::Item(..) => {
+ // ignore for purposes of the MIR
+ None
+ }
+ hir::StmtKind::Local(ref local) => {
+ let remainder_scope = region::Scope {
+ id: block_id,
+ data: region::ScopeData::Remainder(region::FirstStatementIndex::new(
+ index,
+ )),
+ };
+
+ let else_block = local.els.map(|els| self.mirror_block(els));
+
+ let mut pattern = self.pattern_from_hir(local.pat);
+ debug!(?pattern);
+
+ if let Some(ty) = &local.ty {
+ if let Some(&user_ty) =
+ self.typeck_results.user_provided_types().get(ty.hir_id)
+ {
+ debug!("mirror_stmts: user_ty={:?}", user_ty);
+ let annotation = CanonicalUserTypeAnnotation {
+ user_ty,
+ span: ty.span,
+ inferred_ty: self.typeck_results.node_type(ty.hir_id),
+ };
+ pattern = Pat {
+ ty: pattern.ty,
+ span: pattern.span,
+ kind: Box::new(PatKind::AscribeUserType {
+ ascription: Ascription {
+ annotation,
+ variance: ty::Variance::Covariant,
+ },
+ subpattern: pattern,
+ }),
+ };
+ }
+ }
+
+ let stmt = Stmt {
+ kind: StmtKind::Let {
+ remainder_scope,
+ init_scope: region::Scope {
+ id: hir_id.local_id,
+ data: region::ScopeData::Node,
+ },
+ pattern,
+ initializer: local.init.map(|init| self.mirror_expr(init)),
+ else_block,
+ lint_level: LintLevel::Explicit(local.hir_id),
+ },
+ opt_destruction_scope: opt_dxn_ext,
+ };
+ Some(self.thir.stmts.push(stmt))
+ }
+ }
+ })
+ .collect()
+ }
+}
diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs
new file mode 100644
index 000000000..985601712
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs
@@ -0,0 +1,1117 @@
+use crate::thir::cx::region::Scope;
+use crate::thir::cx::Cx;
+use crate::thir::util::UserAnnotatedTyHelpers;
+use rustc_data_structures::stack::ensure_sufficient_stack;
+use rustc_hir as hir;
+use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
+use rustc_index::vec::Idx;
+use rustc_middle::hir::place::Place as HirPlace;
+use rustc_middle::hir::place::PlaceBase as HirPlaceBase;
+use rustc_middle::hir::place::ProjectionKind as HirProjectionKind;
+use rustc_middle::middle::region;
+use rustc_middle::mir::{self, BinOp, BorrowKind, Field, UnOp};
+use rustc_middle::thir::*;
+use rustc_middle::ty::adjustment::{
+ Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, PointerCast,
+};
+use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
+use rustc_middle::ty::{
+ self, AdtKind, InlineConstSubsts, InlineConstSubstsParts, ScalarInt, Ty, UpvarSubsts, UserType,
+};
+use rustc_span::def_id::DefId;
+use rustc_span::Span;
+use rustc_target::abi::VariantIdx;
+
+impl<'tcx> Cx<'tcx> {
+ pub(crate) fn mirror_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) -> ExprId {
+ // `mirror_expr` is recursing very deep. Make sure the stack doesn't overflow.
+ ensure_sufficient_stack(|| self.mirror_expr_inner(expr))
+ }
+
+ pub(crate) fn mirror_exprs(&mut self, exprs: &'tcx [hir::Expr<'tcx>]) -> Box<[ExprId]> {
+ exprs.iter().map(|expr| self.mirror_expr_inner(expr)).collect()
+ }
+
+ #[instrument(level = "trace", skip(self, hir_expr))]
+ pub(super) fn mirror_expr_inner(&mut self, hir_expr: &'tcx hir::Expr<'tcx>) -> ExprId {
+ let temp_lifetime =
+ self.rvalue_scopes.temporary_scope(self.region_scope_tree, hir_expr.hir_id.local_id);
+ let expr_scope =
+ region::Scope { id: hir_expr.hir_id.local_id, data: region::ScopeData::Node };
+
+ trace!(?hir_expr.hir_id, ?hir_expr.span);
+
+ let mut expr = self.make_mirror_unadjusted(hir_expr);
+
+ let adjustment_span = match self.adjustment_span {
+ Some((hir_id, span)) if hir_id == hir_expr.hir_id => Some(span),
+ _ => None,
+ };
+
+ // Now apply adjustments, if any.
+ for adjustment in self.typeck_results.expr_adjustments(hir_expr) {
+ trace!(?expr, ?adjustment);
+ let span = expr.span;
+ expr =
+ self.apply_adjustment(hir_expr, expr, adjustment, adjustment_span.unwrap_or(span));
+ }
+
+ // Next, wrap this up in the expr's scope.
+ expr = Expr {
+ temp_lifetime,
+ ty: expr.ty,
+ span: hir_expr.span,
+ kind: ExprKind::Scope {
+ region_scope: expr_scope,
+ value: self.thir.exprs.push(expr),
+ lint_level: LintLevel::Explicit(hir_expr.hir_id),
+ },
+ };
+
+ // Finally, create a destruction scope, if any.
+ if let Some(region_scope) =
+ self.region_scope_tree.opt_destruction_scope(hir_expr.hir_id.local_id)
+ {
+ expr = Expr {
+ temp_lifetime,
+ ty: expr.ty,
+ span: hir_expr.span,
+ kind: ExprKind::Scope {
+ region_scope,
+ value: self.thir.exprs.push(expr),
+ lint_level: LintLevel::Inherited,
+ },
+ };
+ }
+
+ // OK, all done!
+ self.thir.exprs.push(expr)
+ }
+
+ fn apply_adjustment(
+ &mut self,
+ hir_expr: &'tcx hir::Expr<'tcx>,
+ mut expr: Expr<'tcx>,
+ adjustment: &Adjustment<'tcx>,
+ mut span: Span,
+ ) -> Expr<'tcx> {
+ let Expr { temp_lifetime, .. } = expr;
+
+ // Adjust the span from the block, to the last expression of the
+ // block. This is a better span when returning a mutable reference
+ // with too short a lifetime. The error message will use the span
+ // from the assignment to the return place, which should only point
+ // at the returned value, not the entire function body.
+ //
+ // fn return_short_lived<'a>(x: &'a mut i32) -> &'static mut i32 {
+ // x
+ // // ^ error message points at this expression.
+ // }
+ let mut adjust_span = |expr: &mut Expr<'tcx>| {
+ if let ExprKind::Block { body } = &expr.kind {
+ if let Some(last_expr) = body.expr {
+ span = self.thir[last_expr].span;
+ expr.span = span;
+ }
+ }
+ };
+
+ let kind = match adjustment.kind {
+ Adjust::Pointer(PointerCast::Unsize) => {
+ adjust_span(&mut expr);
+ ExprKind::Pointer { cast: PointerCast::Unsize, source: self.thir.exprs.push(expr) }
+ }
+ Adjust::Pointer(cast) => ExprKind::Pointer { cast, source: self.thir.exprs.push(expr) },
+ Adjust::NeverToAny => ExprKind::NeverToAny { source: self.thir.exprs.push(expr) },
+ Adjust::Deref(None) => {
+ adjust_span(&mut expr);
+ ExprKind::Deref { arg: self.thir.exprs.push(expr) }
+ }
+ Adjust::Deref(Some(deref)) => {
+ // We don't need to do call adjust_span here since
+ // deref coercions always start with a built-in deref.
+ let call = deref.method_call(self.tcx(), expr.ty);
+
+ expr = Expr {
+ temp_lifetime,
+ ty: self
+ .tcx
+ .mk_ref(deref.region, ty::TypeAndMut { ty: expr.ty, mutbl: deref.mutbl }),
+ span,
+ kind: ExprKind::Borrow {
+ borrow_kind: deref.mutbl.to_borrow_kind(),
+ arg: self.thir.exprs.push(expr),
+ },
+ };
+
+ let expr = Box::new([self.thir.exprs.push(expr)]);
+
+ self.overloaded_place(hir_expr, adjustment.target, Some(call), expr, deref.span)
+ }
+ Adjust::Borrow(AutoBorrow::Ref(_, m)) => ExprKind::Borrow {
+ borrow_kind: m.to_borrow_kind(),
+ arg: self.thir.exprs.push(expr),
+ },
+ Adjust::Borrow(AutoBorrow::RawPtr(mutability)) => {
+ ExprKind::AddressOf { mutability, arg: self.thir.exprs.push(expr) }
+ }
+ };
+
+ Expr { temp_lifetime, ty: adjustment.target, span, kind }
+ }
+
+ /// Lowers a cast expression.
+ ///
+ /// Dealing with user type annotations is left to the caller.
+ fn mirror_expr_cast(
+ &mut self,
+ source: &'tcx hir::Expr<'tcx>,
+ temp_lifetime: Option<Scope>,
+ span: Span,
+ ) -> ExprKind<'tcx> {
+ let tcx = self.tcx;
+
+ // Check to see if this cast is a "coercion cast", where the cast is actually done
+ // using a coercion (or is a no-op).
+ if self.typeck_results().is_coercion_cast(source.hir_id) {
+ // Convert the lexpr to a vexpr.
+ ExprKind::Use { source: self.mirror_expr(source) }
+ } else if self.typeck_results().expr_ty(source).is_region_ptr() {
+ // Special cased so that we can type check that the element
+ // type of the source matches the pointed to type of the
+ // destination.
+ ExprKind::Pointer {
+ source: self.mirror_expr(source),
+ cast: PointerCast::ArrayToPointer,
+ }
+ } else {
+ // check whether this is casting an enum variant discriminant
+ // to prevent cycles, we refer to the discriminant initializer
+ // which is always an integer and thus doesn't need to know the
+ // enum's layout (or its tag type) to compute it during const eval
+ // Example:
+ // enum Foo {
+ // A,
+ // B = A as isize + 4,
+ // }
+ // The correct solution would be to add symbolic computations to miri,
+ // so we wouldn't have to compute and store the actual value
+
+ let hir::ExprKind::Path(ref qpath) = source.kind else {
+ return ExprKind::Cast { source: self.mirror_expr(source)};
+ };
+
+ let res = self.typeck_results().qpath_res(qpath, source.hir_id);
+ let ty = self.typeck_results().node_type(source.hir_id);
+ let ty::Adt(adt_def, substs) = ty.kind() else {
+ return ExprKind::Cast { source: self.mirror_expr(source)};
+ };
+
+ let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), variant_ctor_id) = res else {
+ return ExprKind::Cast { source: self.mirror_expr(source)};
+ };
+
+ let idx = adt_def.variant_index_with_ctor_id(variant_ctor_id);
+ let (discr_did, discr_offset) = adt_def.discriminant_def_for_variant(idx);
+
+ use rustc_middle::ty::util::IntTypeExt;
+ let ty = adt_def.repr().discr_type();
+ let discr_ty = ty.to_ty(tcx);
+
+ let param_env_ty = self.param_env.and(discr_ty);
+ let size = tcx
+ .layout_of(param_env_ty)
+ .unwrap_or_else(|e| {
+ panic!("could not compute layout for {:?}: {:?}", param_env_ty, e)
+ })
+ .size;
+
+ let lit = ScalarInt::try_from_uint(discr_offset as u128, size).unwrap();
+ let kind = ExprKind::NonHirLiteral { lit, user_ty: None };
+ let offset = self.thir.exprs.push(Expr { temp_lifetime, ty: discr_ty, span, kind });
+
+ let source = match discr_did {
+ // in case we are offsetting from a computed discriminant
+ // and not the beginning of discriminants (which is always `0`)
+ Some(did) => {
+ let kind = ExprKind::NamedConst { def_id: did, substs, user_ty: None };
+ let lhs =
+ self.thir.exprs.push(Expr { temp_lifetime, ty: discr_ty, span, kind });
+ let bin = ExprKind::Binary { op: BinOp::Add, lhs, rhs: offset };
+ self.thir.exprs.push(Expr {
+ temp_lifetime,
+ ty: discr_ty,
+ span: span,
+ kind: bin,
+ })
+ }
+ None => offset,
+ };
+
+ ExprKind::Cast { source }
+ }
+ }
+
+ fn make_mirror_unadjusted(&mut self, expr: &'tcx hir::Expr<'tcx>) -> Expr<'tcx> {
+ let tcx = self.tcx;
+ let expr_ty = self.typeck_results().expr_ty(expr);
+ let expr_span = expr.span;
+ let temp_lifetime =
+ self.rvalue_scopes.temporary_scope(self.region_scope_tree, expr.hir_id.local_id);
+
+ let kind = match expr.kind {
+ // Here comes the interesting stuff:
+ hir::ExprKind::MethodCall(segment, ref args, fn_span) => {
+ // Rewrite a.b(c) into UFCS form like Trait::b(a, c)
+ let expr = self.method_callee(expr, segment.ident.span, None);
+ // When we apply adjustments to the receiver, use the span of
+ // the overall method call for better diagnostics. args[0]
+ // is guaranteed to exist, since a method call always has a receiver.
+ let old_adjustment_span = self.adjustment_span.replace((args[0].hir_id, expr_span));
+ tracing::info!("Using method span: {:?}", expr.span);
+ let args = self.mirror_exprs(args);
+ self.adjustment_span = old_adjustment_span;
+ ExprKind::Call {
+ ty: expr.ty,
+ fun: self.thir.exprs.push(expr),
+ args,
+ from_hir_call: true,
+ fn_span,
+ }
+ }
+
+ hir::ExprKind::Call(ref fun, ref args) => {
+ if self.typeck_results().is_method_call(expr) {
+ // The callee is something implementing Fn, FnMut, or FnOnce.
+ // Find the actual method implementation being called and
+ // build the appropriate UFCS call expression with the
+ // callee-object as expr parameter.
+
+ // rewrite f(u, v) into FnOnce::call_once(f, (u, v))
+
+ let method = self.method_callee(expr, fun.span, None);
+
+ let arg_tys = args.iter().map(|e| self.typeck_results().expr_ty_adjusted(e));
+ let tupled_args = Expr {
+ ty: tcx.mk_tup(arg_tys),
+ temp_lifetime,
+ span: expr.span,
+ kind: ExprKind::Tuple { fields: self.mirror_exprs(args) },
+ };
+ let tupled_args = self.thir.exprs.push(tupled_args);
+
+ ExprKind::Call {
+ ty: method.ty,
+ fun: self.thir.exprs.push(method),
+ args: Box::new([self.mirror_expr(fun), tupled_args]),
+ from_hir_call: true,
+ fn_span: expr.span,
+ }
+ } else {
+ let adt_data =
+ if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = fun.kind {
+ // Tuple-like ADTs are represented as ExprKind::Call. We convert them here.
+ expr_ty.ty_adt_def().and_then(|adt_def| match path.res {
+ Res::Def(DefKind::Ctor(_, CtorKind::Fn), ctor_id) => {
+ Some((adt_def, adt_def.variant_index_with_ctor_id(ctor_id)))
+ }
+ Res::SelfCtor(..) => Some((adt_def, VariantIdx::new(0))),
+ _ => None,
+ })
+ } else {
+ None
+ };
+ if let Some((adt_def, index)) = adt_data {
+ let substs = self.typeck_results().node_substs(fun.hir_id);
+ let user_provided_types = self.typeck_results().user_provided_types();
+ let user_ty =
+ user_provided_types.get(fun.hir_id).copied().map(|mut u_ty| {
+ if let UserType::TypeOf(ref mut did, _) = &mut u_ty.value {
+ *did = adt_def.did();
+ }
+ u_ty
+ });
+ debug!("make_mirror_unadjusted: (call) user_ty={:?}", user_ty);
+
+ let field_refs = args
+ .iter()
+ .enumerate()
+ .map(|(idx, e)| FieldExpr {
+ name: Field::new(idx),
+ expr: self.mirror_expr(e),
+ })
+ .collect();
+ ExprKind::Adt(Box::new(Adt {
+ adt_def,
+ substs,
+ variant_index: index,
+ fields: field_refs,
+ user_ty,
+ base: None,
+ }))
+ } else {
+ ExprKind::Call {
+ ty: self.typeck_results().node_type(fun.hir_id),
+ fun: self.mirror_expr(fun),
+ args: self.mirror_exprs(args),
+ from_hir_call: true,
+ fn_span: expr.span,
+ }
+ }
+ }
+ }
+
+ hir::ExprKind::AddrOf(hir::BorrowKind::Ref, mutbl, ref arg) => {
+ ExprKind::Borrow { borrow_kind: mutbl.to_borrow_kind(), arg: self.mirror_expr(arg) }
+ }
+
+ hir::ExprKind::AddrOf(hir::BorrowKind::Raw, mutability, ref arg) => {
+ ExprKind::AddressOf { mutability, arg: self.mirror_expr(arg) }
+ }
+
+ hir::ExprKind::Block(ref blk, _) => ExprKind::Block { body: self.mirror_block(blk) },
+
+ hir::ExprKind::Assign(ref lhs, ref rhs, _) => {
+ ExprKind::Assign { lhs: self.mirror_expr(lhs), rhs: self.mirror_expr(rhs) }
+ }
+
+ hir::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
+ if self.typeck_results().is_method_call(expr) {
+ let lhs = self.mirror_expr(lhs);
+ let rhs = self.mirror_expr(rhs);
+ self.overloaded_operator(expr, Box::new([lhs, rhs]))
+ } else {
+ ExprKind::AssignOp {
+ op: bin_op(op.node),
+ lhs: self.mirror_expr(lhs),
+ rhs: self.mirror_expr(rhs),
+ }
+ }
+ }
+
+ hir::ExprKind::Lit(ref lit) => ExprKind::Literal { lit, neg: false },
+
+ hir::ExprKind::Binary(op, ref lhs, ref rhs) => {
+ if self.typeck_results().is_method_call(expr) {
+ let lhs = self.mirror_expr(lhs);
+ let rhs = self.mirror_expr(rhs);
+ self.overloaded_operator(expr, Box::new([lhs, rhs]))
+ } else {
+ // FIXME overflow
+ match op.node {
+ hir::BinOpKind::And => ExprKind::LogicalOp {
+ op: LogicalOp::And,
+ lhs: self.mirror_expr(lhs),
+ rhs: self.mirror_expr(rhs),
+ },
+ hir::BinOpKind::Or => ExprKind::LogicalOp {
+ op: LogicalOp::Or,
+ lhs: self.mirror_expr(lhs),
+ rhs: self.mirror_expr(rhs),
+ },
+ _ => {
+ let op = bin_op(op.node);
+ ExprKind::Binary {
+ op,
+ lhs: self.mirror_expr(lhs),
+ rhs: self.mirror_expr(rhs),
+ }
+ }
+ }
+ }
+ }
+
+ hir::ExprKind::Index(ref lhs, ref index) => {
+ if self.typeck_results().is_method_call(expr) {
+ let lhs = self.mirror_expr(lhs);
+ let index = self.mirror_expr(index);
+ self.overloaded_place(expr, expr_ty, None, Box::new([lhs, index]), expr.span)
+ } else {
+ ExprKind::Index { lhs: self.mirror_expr(lhs), index: self.mirror_expr(index) }
+ }
+ }
+
+ hir::ExprKind::Unary(hir::UnOp::Deref, ref arg) => {
+ if self.typeck_results().is_method_call(expr) {
+ let arg = self.mirror_expr(arg);
+ self.overloaded_place(expr, expr_ty, None, Box::new([arg]), expr.span)
+ } else {
+ ExprKind::Deref { arg: self.mirror_expr(arg) }
+ }
+ }
+
+ hir::ExprKind::Unary(hir::UnOp::Not, ref arg) => {
+ if self.typeck_results().is_method_call(expr) {
+ let arg = self.mirror_expr(arg);
+ self.overloaded_operator(expr, Box::new([arg]))
+ } else {
+ ExprKind::Unary { op: UnOp::Not, arg: self.mirror_expr(arg) }
+ }
+ }
+
+ hir::ExprKind::Unary(hir::UnOp::Neg, ref arg) => {
+ if self.typeck_results().is_method_call(expr) {
+ let arg = self.mirror_expr(arg);
+ self.overloaded_operator(expr, Box::new([arg]))
+ } else if let hir::ExprKind::Lit(ref lit) = arg.kind {
+ ExprKind::Literal { lit, neg: true }
+ } else {
+ ExprKind::Unary { op: UnOp::Neg, arg: self.mirror_expr(arg) }
+ }
+ }
+
+ hir::ExprKind::Struct(ref qpath, ref fields, ref base) => match expr_ty.kind() {
+ ty::Adt(adt, substs) => match adt.adt_kind() {
+ AdtKind::Struct | AdtKind::Union => {
+ let user_provided_types = self.typeck_results().user_provided_types();
+ let user_ty = user_provided_types.get(expr.hir_id).copied();
+ debug!("make_mirror_unadjusted: (struct/union) user_ty={:?}", user_ty);
+ ExprKind::Adt(Box::new(Adt {
+ adt_def: *adt,
+ variant_index: VariantIdx::new(0),
+ substs,
+ user_ty,
+ fields: self.field_refs(fields),
+ base: base.as_ref().map(|base| FruInfo {
+ base: self.mirror_expr(base),
+ field_types: self.typeck_results().fru_field_types()[expr.hir_id]
+ .iter()
+ .copied()
+ .collect(),
+ }),
+ }))
+ }
+ AdtKind::Enum => {
+ let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
+ match res {
+ Res::Def(DefKind::Variant, variant_id) => {
+ assert!(base.is_none());
+
+ let index = adt.variant_index_with_id(variant_id);
+ let user_provided_types =
+ self.typeck_results().user_provided_types();
+ let user_ty = user_provided_types.get(expr.hir_id).copied();
+ debug!("make_mirror_unadjusted: (variant) user_ty={:?}", user_ty);
+ ExprKind::Adt(Box::new(Adt {
+ adt_def: *adt,
+ variant_index: index,
+ substs,
+ user_ty,
+ fields: self.field_refs(fields),
+ base: None,
+ }))
+ }
+ _ => {
+ span_bug!(expr.span, "unexpected res: {:?}", res);
+ }
+ }
+ }
+ },
+ _ => {
+ span_bug!(expr.span, "unexpected type for struct literal: {:?}", expr_ty);
+ }
+ },
+
+ hir::ExprKind::Closure { .. } => {
+ let closure_ty = self.typeck_results().expr_ty(expr);
+ let (def_id, substs, movability) = match *closure_ty.kind() {
+ ty::Closure(def_id, substs) => (def_id, UpvarSubsts::Closure(substs), None),
+ ty::Generator(def_id, substs, movability) => {
+ (def_id, UpvarSubsts::Generator(substs), Some(movability))
+ }
+ _ => {
+ span_bug!(expr.span, "closure expr w/o closure type: {:?}", closure_ty);
+ }
+ };
+ let def_id = def_id.expect_local();
+
+ let upvars = self
+ .typeck_results
+ .closure_min_captures_flattened(def_id)
+ .zip(substs.upvar_tys())
+ .map(|(captured_place, ty)| {
+ let upvars = self.capture_upvar(expr, captured_place, ty);
+ self.thir.exprs.push(upvars)
+ })
+ .collect();
+
+ // Convert the closure fake reads, if any, from hir `Place` to ExprRef
+ let fake_reads = match self.typeck_results.closure_fake_reads.get(&def_id) {
+ Some(fake_reads) => fake_reads
+ .iter()
+ .map(|(place, cause, hir_id)| {
+ let expr = self.convert_captured_hir_place(expr, place.clone());
+ (self.thir.exprs.push(expr), *cause, *hir_id)
+ })
+ .collect(),
+ None => Vec::new(),
+ };
+
+ ExprKind::Closure { closure_id: def_id, substs, upvars, movability, fake_reads }
+ }
+
+ hir::ExprKind::Path(ref qpath) => {
+ let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
+ self.convert_path_expr(expr, res)
+ }
+
+ hir::ExprKind::InlineAsm(ref asm) => ExprKind::InlineAsm {
+ template: asm.template,
+ operands: asm
+ .operands
+ .iter()
+ .map(|(op, _op_sp)| match *op {
+ hir::InlineAsmOperand::In { reg, ref expr } => {
+ InlineAsmOperand::In { reg, expr: self.mirror_expr(expr) }
+ }
+ hir::InlineAsmOperand::Out { reg, late, ref expr } => {
+ InlineAsmOperand::Out {
+ reg,
+ late,
+ expr: expr.as_ref().map(|expr| self.mirror_expr(expr)),
+ }
+ }
+ hir::InlineAsmOperand::InOut { reg, late, ref expr } => {
+ InlineAsmOperand::InOut { reg, late, expr: self.mirror_expr(expr) }
+ }
+ hir::InlineAsmOperand::SplitInOut {
+ reg,
+ late,
+ ref in_expr,
+ ref out_expr,
+ } => InlineAsmOperand::SplitInOut {
+ reg,
+ late,
+ in_expr: self.mirror_expr(in_expr),
+ out_expr: out_expr.as_ref().map(|expr| self.mirror_expr(expr)),
+ },
+ hir::InlineAsmOperand::Const { ref anon_const } => {
+ let anon_const_def_id = tcx.hir().local_def_id(anon_const.hir_id);
+ let value = mir::ConstantKind::from_anon_const(
+ tcx,
+ anon_const_def_id,
+ self.param_env,
+ );
+ let span = tcx.hir().span(anon_const.hir_id);
+
+ InlineAsmOperand::Const { value, span }
+ }
+ hir::InlineAsmOperand::SymFn { ref anon_const } => {
+ let anon_const_def_id = tcx.hir().local_def_id(anon_const.hir_id);
+ let value = mir::ConstantKind::from_anon_const(
+ tcx,
+ anon_const_def_id,
+ self.param_env,
+ );
+ let span = tcx.hir().span(anon_const.hir_id);
+
+ InlineAsmOperand::SymFn { value, span }
+ }
+ hir::InlineAsmOperand::SymStatic { path: _, def_id } => {
+ InlineAsmOperand::SymStatic { def_id }
+ }
+ })
+ .collect(),
+ options: asm.options,
+ line_spans: asm.line_spans,
+ },
+
+ hir::ExprKind::ConstBlock(ref anon_const) => {
+ let ty = self.typeck_results().node_type(anon_const.hir_id);
+ let did = tcx.hir().local_def_id(anon_const.hir_id).to_def_id();
+ let typeck_root_def_id = tcx.typeck_root_def_id(did);
+ let parent_substs =
+ tcx.erase_regions(InternalSubsts::identity_for_item(tcx, typeck_root_def_id));
+ let substs =
+ InlineConstSubsts::new(tcx, InlineConstSubstsParts { parent_substs, ty })
+ .substs;
+
+ ExprKind::ConstBlock { did, substs }
+ }
+ // Now comes the rote stuff:
+ hir::ExprKind::Repeat(ref v, _) => {
+ let ty = self.typeck_results().expr_ty(expr);
+ let ty::Array(_, count) = ty.kind() else {
+ span_bug!(expr.span, "unexpected repeat expr ty: {:?}", ty);
+ };
+
+ ExprKind::Repeat { value: self.mirror_expr(v), count: *count }
+ }
+ hir::ExprKind::Ret(ref v) => {
+ ExprKind::Return { value: v.as_ref().map(|v| self.mirror_expr(v)) }
+ }
+ hir::ExprKind::Break(dest, ref value) => match dest.target_id {
+ Ok(target_id) => ExprKind::Break {
+ label: region::Scope { id: target_id.local_id, data: region::ScopeData::Node },
+ value: value.as_ref().map(|value| self.mirror_expr(value)),
+ },
+ Err(err) => bug!("invalid loop id for break: {}", err),
+ },
+ hir::ExprKind::Continue(dest) => match dest.target_id {
+ Ok(loop_id) => ExprKind::Continue {
+ label: region::Scope { id: loop_id.local_id, data: region::ScopeData::Node },
+ },
+ Err(err) => bug!("invalid loop id for continue: {}", err),
+ },
+ hir::ExprKind::Let(let_expr) => ExprKind::Let {
+ expr: self.mirror_expr(let_expr.init),
+ pat: self.pattern_from_hir(let_expr.pat),
+ },
+ hir::ExprKind::If(cond, then, else_opt) => ExprKind::If {
+ if_then_scope: region::Scope {
+ id: then.hir_id.local_id,
+ data: region::ScopeData::IfThen,
+ },
+ cond: self.mirror_expr(cond),
+ then: self.mirror_expr(then),
+ else_opt: else_opt.map(|el| self.mirror_expr(el)),
+ },
+ hir::ExprKind::Match(ref discr, ref arms, _) => ExprKind::Match {
+ scrutinee: self.mirror_expr(discr),
+ arms: arms.iter().map(|a| self.convert_arm(a)).collect(),
+ },
+ hir::ExprKind::Loop(ref body, ..) => {
+ let block_ty = self.typeck_results().node_type(body.hir_id);
+ let temp_lifetime = self
+ .rvalue_scopes
+ .temporary_scope(self.region_scope_tree, body.hir_id.local_id);
+ let block = self.mirror_block(body);
+ let body = self.thir.exprs.push(Expr {
+ ty: block_ty,
+ temp_lifetime,
+ span: block.span,
+ kind: ExprKind::Block { body: block },
+ });
+ ExprKind::Loop { body }
+ }
+ hir::ExprKind::Field(ref source, ..) => ExprKind::Field {
+ lhs: self.mirror_expr(source),
+ variant_index: VariantIdx::new(0),
+ name: Field::new(tcx.field_index(expr.hir_id, self.typeck_results)),
+ },
+ hir::ExprKind::Cast(ref source, ref cast_ty) => {
+ // Check for a user-given type annotation on this `cast`
+ let user_provided_types = self.typeck_results.user_provided_types();
+ let user_ty = user_provided_types.get(cast_ty.hir_id);
+
+ debug!(
+ "cast({:?}) has ty w/ hir_id {:?} and user provided ty {:?}",
+ expr, cast_ty.hir_id, user_ty,
+ );
+
+ let cast = self.mirror_expr_cast(*source, temp_lifetime, expr.span);
+
+ if let Some(user_ty) = user_ty {
+ // NOTE: Creating a new Expr and wrapping a Cast inside of it may be
+ // inefficient, revisit this when performance becomes an issue.
+ let cast_expr = self.thir.exprs.push(Expr {
+ temp_lifetime,
+ ty: expr_ty,
+ span: expr.span,
+ kind: cast,
+ });
+ debug!("make_mirror_unadjusted: (cast) user_ty={:?}", user_ty);
+
+ ExprKind::ValueTypeAscription { source: cast_expr, user_ty: Some(*user_ty) }
+ } else {
+ cast
+ }
+ }
+ hir::ExprKind::Type(ref source, ref ty) => {
+ let user_provided_types = self.typeck_results.user_provided_types();
+ let user_ty = user_provided_types.get(ty.hir_id).copied();
+ debug!("make_mirror_unadjusted: (type) user_ty={:?}", user_ty);
+ let mirrored = self.mirror_expr(source);
+ if source.is_syntactic_place_expr() {
+ ExprKind::PlaceTypeAscription { source: mirrored, user_ty }
+ } else {
+ ExprKind::ValueTypeAscription { source: mirrored, user_ty }
+ }
+ }
+ hir::ExprKind::DropTemps(ref source) => {
+ ExprKind::Use { source: self.mirror_expr(source) }
+ }
+ hir::ExprKind::Box(ref value) => ExprKind::Box { value: self.mirror_expr(value) },
+ hir::ExprKind::Array(ref fields) => {
+ ExprKind::Array { fields: self.mirror_exprs(fields) }
+ }
+ hir::ExprKind::Tup(ref fields) => ExprKind::Tuple { fields: self.mirror_exprs(fields) },
+
+ hir::ExprKind::Yield(ref v, _) => ExprKind::Yield { value: self.mirror_expr(v) },
+ hir::ExprKind::Err => unreachable!(),
+ };
+
+ Expr { temp_lifetime, ty: expr_ty, span: expr.span, kind }
+ }
+
+ fn user_substs_applied_to_res(
+ &mut self,
+ hir_id: hir::HirId,
+ res: Res,
+ ) -> Option<ty::CanonicalUserType<'tcx>> {
+ debug!("user_substs_applied_to_res: res={:?}", res);
+ let user_provided_type = match res {
+ // A reference to something callable -- e.g., a fn, method, or
+ // a tuple-struct or tuple-variant. This has the type of a
+ // `Fn` but with the user-given substitutions.
+ Res::Def(DefKind::Fn, _)
+ | Res::Def(DefKind::AssocFn, _)
+ | Res::Def(DefKind::Ctor(_, CtorKind::Fn), _)
+ | Res::Def(DefKind::Const, _)
+ | Res::Def(DefKind::AssocConst, _) => {
+ self.typeck_results().user_provided_types().get(hir_id).copied()
+ }
+
+ // A unit struct/variant which is used as a value (e.g.,
+ // `None`). This has the type of the enum/struct that defines
+ // this variant -- but with the substitutions given by the
+ // user.
+ Res::Def(DefKind::Ctor(_, CtorKind::Const), _) => {
+ self.user_substs_applied_to_ty_of_hir_id(hir_id)
+ }
+
+ // `Self` is used in expression as a tuple struct constructor or a unit struct constructor
+ Res::SelfCtor(_) => self.user_substs_applied_to_ty_of_hir_id(hir_id),
+
+ _ => bug!("user_substs_applied_to_res: unexpected res {:?} at {:?}", res, hir_id),
+ };
+ debug!("user_substs_applied_to_res: user_provided_type={:?}", user_provided_type);
+ user_provided_type
+ }
+
+ fn method_callee(
+ &mut self,
+ expr: &hir::Expr<'_>,
+ span: Span,
+ overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
+ ) -> Expr<'tcx> {
+ let temp_lifetime =
+ self.rvalue_scopes.temporary_scope(self.region_scope_tree, expr.hir_id.local_id);
+ let (def_id, substs, user_ty) = match overloaded_callee {
+ Some((def_id, substs)) => (def_id, substs, None),
+ None => {
+ let (kind, def_id) =
+ self.typeck_results().type_dependent_def(expr.hir_id).unwrap_or_else(|| {
+ span_bug!(expr.span, "no type-dependent def for method callee")
+ });
+ let user_ty = self.user_substs_applied_to_res(expr.hir_id, Res::Def(kind, def_id));
+ debug!("method_callee: user_ty={:?}", user_ty);
+ (def_id, self.typeck_results().node_substs(expr.hir_id), user_ty)
+ }
+ };
+ let ty = self.tcx().mk_fn_def(def_id, substs);
+ Expr { temp_lifetime, ty, span, kind: ExprKind::ZstLiteral { user_ty } }
+ }
+
+ fn convert_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) -> ArmId {
+ let arm = Arm {
+ pattern: self.pattern_from_hir(&arm.pat),
+ guard: arm.guard.as_ref().map(|g| match g {
+ hir::Guard::If(ref e) => Guard::If(self.mirror_expr(e)),
+ hir::Guard::IfLet(ref l) => {
+ Guard::IfLet(self.pattern_from_hir(l.pat), self.mirror_expr(l.init))
+ }
+ }),
+ body: self.mirror_expr(arm.body),
+ lint_level: LintLevel::Explicit(arm.hir_id),
+ scope: region::Scope { id: arm.hir_id.local_id, data: region::ScopeData::Node },
+ span: arm.span,
+ };
+ self.thir.arms.push(arm)
+ }
+
+ fn convert_path_expr(&mut self, expr: &'tcx hir::Expr<'tcx>, res: Res) -> ExprKind<'tcx> {
+ let substs = self.typeck_results().node_substs(expr.hir_id);
+ match res {
+ // A regular function, constructor function or a constant.
+ Res::Def(DefKind::Fn, _)
+ | Res::Def(DefKind::AssocFn, _)
+ | Res::Def(DefKind::Ctor(_, CtorKind::Fn), _)
+ | Res::SelfCtor(_) => {
+ let user_ty = self.user_substs_applied_to_res(expr.hir_id, res);
+ ExprKind::ZstLiteral { user_ty }
+ }
+
+ Res::Def(DefKind::ConstParam, def_id) => {
+ let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
+ let item_id = self.tcx.hir().get_parent_node(hir_id);
+ let item_def_id = self.tcx.hir().local_def_id(item_id);
+ let generics = self.tcx.generics_of(item_def_id);
+ let index = generics.param_def_id_to_index[&def_id];
+ let name = self.tcx.hir().name(hir_id);
+ let param = ty::ParamConst::new(index, name);
+
+ ExprKind::ConstParam { param, def_id }
+ }
+
+ Res::Def(DefKind::Const, def_id) | Res::Def(DefKind::AssocConst, def_id) => {
+ let user_ty = self.user_substs_applied_to_res(expr.hir_id, res);
+ ExprKind::NamedConst { def_id, substs, user_ty: user_ty }
+ }
+
+ Res::Def(DefKind::Ctor(_, CtorKind::Const), def_id) => {
+ let user_provided_types = self.typeck_results.user_provided_types();
+ let user_provided_type = user_provided_types.get(expr.hir_id).copied();
+ debug!("convert_path_expr: user_provided_type={:?}", user_provided_type);
+ let ty = self.typeck_results().node_type(expr.hir_id);
+ match ty.kind() {
+ // A unit struct/variant which is used as a value.
+ // We return a completely different ExprKind here to account for this special case.
+ ty::Adt(adt_def, substs) => ExprKind::Adt(Box::new(Adt {
+ adt_def: *adt_def,
+ variant_index: adt_def.variant_index_with_ctor_id(def_id),
+ substs,
+ user_ty: user_provided_type,
+ fields: Box::new([]),
+ base: None,
+ })),
+ _ => bug!("unexpected ty: {:?}", ty),
+ }
+ }
+
+ // We encode uses of statics as a `*&STATIC` where the `&STATIC` part is
+ // a constant reference (or constant raw pointer for `static mut`) in MIR
+ Res::Def(DefKind::Static(_), id) => {
+ let ty = self.tcx.static_ptr_ty(id);
+ let temp_lifetime = self
+ .rvalue_scopes
+ .temporary_scope(self.region_scope_tree, expr.hir_id.local_id);
+ let kind = if self.tcx.is_thread_local_static(id) {
+ ExprKind::ThreadLocalRef(id)
+ } else {
+ let alloc_id = self.tcx.create_static_alloc(id);
+ ExprKind::StaticRef { alloc_id, ty, def_id: id }
+ };
+ ExprKind::Deref {
+ arg: self.thir.exprs.push(Expr { ty, temp_lifetime, span: expr.span, kind }),
+ }
+ }
+
+ Res::Local(var_hir_id) => self.convert_var(var_hir_id),
+
+ _ => span_bug!(expr.span, "res `{:?}` not yet implemented", res),
+ }
+ }
+
+ fn convert_var(&mut self, var_hir_id: hir::HirId) -> ExprKind<'tcx> {
+ // We want upvars here not captures.
+ // Captures will be handled in MIR.
+ let is_upvar = self
+ .tcx
+ .upvars_mentioned(self.body_owner)
+ .map_or(false, |upvars| upvars.contains_key(&var_hir_id));
+
+ debug!(
+ "convert_var({:?}): is_upvar={}, body_owner={:?}",
+ var_hir_id, is_upvar, self.body_owner
+ );
+
+ if is_upvar {
+ ExprKind::UpvarRef {
+ closure_def_id: self.body_owner,
+ var_hir_id: LocalVarId(var_hir_id),
+ }
+ } else {
+ ExprKind::VarRef { id: LocalVarId(var_hir_id) }
+ }
+ }
+
+ fn overloaded_operator(
+ &mut self,
+ expr: &'tcx hir::Expr<'tcx>,
+ args: Box<[ExprId]>,
+ ) -> ExprKind<'tcx> {
+ let fun = self.method_callee(expr, expr.span, None);
+ let fun = self.thir.exprs.push(fun);
+ ExprKind::Call {
+ ty: self.thir[fun].ty,
+ fun,
+ args,
+ from_hir_call: false,
+ fn_span: expr.span,
+ }
+ }
+
+ fn overloaded_place(
+ &mut self,
+ expr: &'tcx hir::Expr<'tcx>,
+ place_ty: Ty<'tcx>,
+ overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
+ args: Box<[ExprId]>,
+ span: Span,
+ ) -> ExprKind<'tcx> {
+ // For an overloaded *x or x[y] expression of type T, the method
+ // call returns an &T and we must add the deref so that the types
+ // line up (this is because `*x` and `x[y]` represent places):
+
+ // Reconstruct the output assuming it's a reference with the
+ // same region and mutability as the receiver. This holds for
+ // `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
+ let ty::Ref(region, _, mutbl) = *self.thir[args[0]].ty.kind() else {
+ span_bug!(span, "overloaded_place: receiver is not a reference");
+ };
+ let ref_ty = self.tcx.mk_ref(region, ty::TypeAndMut { ty: place_ty, mutbl });
+
+ // construct the complete expression `foo()` for the overloaded call,
+ // which will yield the &T type
+ let temp_lifetime =
+ self.rvalue_scopes.temporary_scope(self.region_scope_tree, expr.hir_id.local_id);
+ let fun = self.method_callee(expr, span, overloaded_callee);
+ let fun = self.thir.exprs.push(fun);
+ let fun_ty = self.thir[fun].ty;
+ let ref_expr = self.thir.exprs.push(Expr {
+ temp_lifetime,
+ ty: ref_ty,
+ span,
+ kind: ExprKind::Call { ty: fun_ty, fun, args, from_hir_call: false, fn_span: span },
+ });
+
+ // construct and return a deref wrapper `*foo()`
+ ExprKind::Deref { arg: ref_expr }
+ }
+
+ fn convert_captured_hir_place(
+ &mut self,
+ closure_expr: &'tcx hir::Expr<'tcx>,
+ place: HirPlace<'tcx>,
+ ) -> Expr<'tcx> {
+ let temp_lifetime = self
+ .rvalue_scopes
+ .temporary_scope(self.region_scope_tree, closure_expr.hir_id.local_id);
+ let var_ty = place.base_ty;
+
+ // The result of capture analysis in `rustc_typeck/check/upvar.rs`represents a captured path
+ // as it's seen for use within the closure and not at the time of closure creation.
+ //
+ // That is we see expect to see it start from a captured upvar and not something that is local
+ // to the closure's parent.
+ let var_hir_id = match place.base {
+ HirPlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id,
+ base => bug!("Expected an upvar, found {:?}", base),
+ };
+
+ let mut captured_place_expr = Expr {
+ temp_lifetime,
+ ty: var_ty,
+ span: closure_expr.span,
+ kind: self.convert_var(var_hir_id),
+ };
+
+ for proj in place.projections.iter() {
+ let kind = match proj.kind {
+ HirProjectionKind::Deref => {
+ ExprKind::Deref { arg: self.thir.exprs.push(captured_place_expr) }
+ }
+ HirProjectionKind::Field(field, variant_index) => ExprKind::Field {
+ lhs: self.thir.exprs.push(captured_place_expr),
+ variant_index,
+ name: Field::new(field as usize),
+ },
+ HirProjectionKind::Index | HirProjectionKind::Subslice => {
+ // We don't capture these projections, so we can ignore them here
+ continue;
+ }
+ };
+
+ captured_place_expr =
+ Expr { temp_lifetime, ty: proj.ty, span: closure_expr.span, kind };
+ }
+
+ captured_place_expr
+ }
+
+ fn capture_upvar(
+ &mut self,
+ closure_expr: &'tcx hir::Expr<'tcx>,
+ captured_place: &'tcx ty::CapturedPlace<'tcx>,
+ upvar_ty: Ty<'tcx>,
+ ) -> Expr<'tcx> {
+ let upvar_capture = captured_place.info.capture_kind;
+ let captured_place_expr =
+ self.convert_captured_hir_place(closure_expr, captured_place.place.clone());
+ let temp_lifetime = self
+ .rvalue_scopes
+ .temporary_scope(self.region_scope_tree, closure_expr.hir_id.local_id);
+
+ match upvar_capture {
+ ty::UpvarCapture::ByValue => captured_place_expr,
+ ty::UpvarCapture::ByRef(upvar_borrow) => {
+ let borrow_kind = match upvar_borrow {
+ ty::BorrowKind::ImmBorrow => BorrowKind::Shared,
+ ty::BorrowKind::UniqueImmBorrow => BorrowKind::Unique,
+ ty::BorrowKind::MutBorrow => BorrowKind::Mut { allow_two_phase_borrow: false },
+ };
+ Expr {
+ temp_lifetime,
+ ty: upvar_ty,
+ span: closure_expr.span,
+ kind: ExprKind::Borrow {
+ borrow_kind,
+ arg: self.thir.exprs.push(captured_place_expr),
+ },
+ }
+ }
+ }
+ }
+
+ /// Converts a list of named fields (i.e., for struct-like struct/enum ADTs) into FieldExpr.
+ fn field_refs(&mut self, fields: &'tcx [hir::ExprField<'tcx>]) -> Box<[FieldExpr]> {
+ fields
+ .iter()
+ .map(|field| FieldExpr {
+ name: Field::new(self.tcx.field_index(field.hir_id, self.typeck_results)),
+ expr: self.mirror_expr(field.expr),
+ })
+ .collect()
+ }
+}
+
+trait ToBorrowKind {
+ fn to_borrow_kind(&self) -> BorrowKind;
+}
+
+impl ToBorrowKind for AutoBorrowMutability {
+ fn to_borrow_kind(&self) -> BorrowKind {
+ use rustc_middle::ty::adjustment::AllowTwoPhase;
+ match *self {
+ AutoBorrowMutability::Mut { allow_two_phase_borrow } => BorrowKind::Mut {
+ allow_two_phase_borrow: match allow_two_phase_borrow {
+ AllowTwoPhase::Yes => true,
+ AllowTwoPhase::No => false,
+ },
+ },
+ AutoBorrowMutability::Not => BorrowKind::Shared,
+ }
+ }
+}
+
+impl ToBorrowKind for hir::Mutability {
+ fn to_borrow_kind(&self) -> BorrowKind {
+ match *self {
+ hir::Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
+ hir::Mutability::Not => BorrowKind::Shared,
+ }
+ }
+}
+
+fn bin_op(op: hir::BinOpKind) -> BinOp {
+ match op {
+ hir::BinOpKind::Add => BinOp::Add,
+ hir::BinOpKind::Sub => BinOp::Sub,
+ hir::BinOpKind::Mul => BinOp::Mul,
+ hir::BinOpKind::Div => BinOp::Div,
+ hir::BinOpKind::Rem => BinOp::Rem,
+ hir::BinOpKind::BitXor => BinOp::BitXor,
+ hir::BinOpKind::BitAnd => BinOp::BitAnd,
+ hir::BinOpKind::BitOr => BinOp::BitOr,
+ hir::BinOpKind::Shl => BinOp::Shl,
+ hir::BinOpKind::Shr => BinOp::Shr,
+ hir::BinOpKind::Eq => BinOp::Eq,
+ hir::BinOpKind::Lt => BinOp::Lt,
+ hir::BinOpKind::Le => BinOp::Le,
+ hir::BinOpKind::Ne => BinOp::Ne,
+ hir::BinOpKind::Ge => BinOp::Ge,
+ hir::BinOpKind::Gt => BinOp::Gt,
+ _ => bug!("no equivalent for ast binop {:?}", op),
+ }
+}
diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs
new file mode 100644
index 000000000..f7351a4ca
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs
@@ -0,0 +1,101 @@
+//! This module contains the functionality to convert from the wacky tcx data
+//! structures into the THIR. The `builder` is generally ignorant of the tcx,
+//! etc., and instead goes through the `Cx` for most of its work.
+
+use crate::thir::pattern::pat_from_hir;
+use crate::thir::util::UserAnnotatedTyHelpers;
+
+use rustc_data_structures::steal::Steal;
+use rustc_errors::ErrorGuaranteed;
+use rustc_hir as hir;
+use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_hir::HirId;
+use rustc_hir::Node;
+use rustc_middle::middle::region;
+use rustc_middle::thir::*;
+use rustc_middle::ty::{self, RvalueScopes, TyCtxt};
+use rustc_span::Span;
+
+pub(crate) fn thir_body<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ owner_def: ty::WithOptConstParam<LocalDefId>,
+) -> Result<(&'tcx Steal<Thir<'tcx>>, ExprId), ErrorGuaranteed> {
+ let hir = tcx.hir();
+ let body = hir.body(hir.body_owned_by(owner_def.did));
+ let mut cx = Cx::new(tcx, owner_def);
+ if let Some(reported) = cx.typeck_results.tainted_by_errors {
+ return Err(reported);
+ }
+ let expr = cx.mirror_expr(&body.value);
+ Ok((tcx.alloc_steal_thir(cx.thir), expr))
+}
+
+pub(crate) fn thir_tree<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ owner_def: ty::WithOptConstParam<LocalDefId>,
+) -> String {
+ match thir_body(tcx, owner_def) {
+ Ok((thir, _)) => format!("{:#?}", thir.steal()),
+ Err(_) => "error".into(),
+ }
+}
+
+struct Cx<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ thir: Thir<'tcx>,
+
+ pub(crate) param_env: ty::ParamEnv<'tcx>,
+
+ pub(crate) region_scope_tree: &'tcx region::ScopeTree,
+ pub(crate) typeck_results: &'tcx ty::TypeckResults<'tcx>,
+ pub(crate) rvalue_scopes: &'tcx RvalueScopes,
+
+ /// When applying adjustments to the expression
+ /// with the given `HirId`, use the given `Span`,
+ /// instead of the usual span. This is used to
+ /// assign the span of an overall method call
+ /// (e.g. `my_val.foo()`) to the adjustment expressions
+ /// for the receiver.
+ adjustment_span: Option<(HirId, Span)>,
+
+ /// The `DefId` of the owner of this body.
+ body_owner: DefId,
+}
+
+impl<'tcx> Cx<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>, def: ty::WithOptConstParam<LocalDefId>) -> Cx<'tcx> {
+ let typeck_results = tcx.typeck_opt_const_arg(def);
+ Cx {
+ tcx,
+ thir: Thir::new(),
+ param_env: tcx.param_env(def.did),
+ region_scope_tree: tcx.region_scope_tree(def.did),
+ typeck_results,
+ rvalue_scopes: &typeck_results.rvalue_scopes,
+ body_owner: def.did.to_def_id(),
+ adjustment_span: None,
+ }
+ }
+
+ #[tracing::instrument(level = "debug", skip(self))]
+ pub(crate) fn pattern_from_hir(&mut self, p: &hir::Pat<'_>) -> Pat<'tcx> {
+ let p = match self.tcx.hir().get(p.hir_id) {
+ Node::Pat(p) => p,
+ node => bug!("pattern became {:?}", node),
+ };
+ pat_from_hir(self.tcx, self.param_env, self.typeck_results(), p)
+ }
+}
+
+impl<'tcx> UserAnnotatedTyHelpers<'tcx> for Cx<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn typeck_results(&self) -> &ty::TypeckResults<'tcx> {
+ self.typeck_results
+ }
+}
+
+mod block;
+mod expr;
diff --git a/compiler/rustc_mir_build/src/thir/mod.rs b/compiler/rustc_mir_build/src/thir/mod.rs
new file mode 100644
index 000000000..e0e6ac266
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/mod.rs
@@ -0,0 +1,13 @@
+//! The MIR is built from some typed high-level IR
+//! (THIR). This section defines the THIR along with a trait for
+//! accessing it. The intention is to allow MIR construction to be
+//! unit-tested and separated from the Rust source and compiler data
+//! structures.
+
+pub(crate) mod constant;
+
+pub(crate) mod cx;
+
+pub(crate) mod pattern;
+
+mod util;
diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
new file mode 100644
index 000000000..063c07647
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
@@ -0,0 +1,1162 @@
+use super::deconstruct_pat::{Constructor, DeconstructedPat};
+use super::usefulness::{
+ compute_match_usefulness, MatchArm, MatchCheckCtxt, Reachability, UsefulnessReport,
+};
+use super::{PatCtxt, PatternError};
+
+use rustc_arena::TypedArena;
+use rustc_ast::Mutability;
+use rustc_errors::{
+ error_code, pluralize, struct_span_err, Applicability, Diagnostic, DiagnosticBuilder,
+ ErrorGuaranteed, MultiSpan,
+};
+use rustc_hir as hir;
+use rustc_hir::def::*;
+use rustc_hir::def_id::DefId;
+use rustc_hir::intravisit::{self, Visitor};
+use rustc_hir::{HirId, Pat};
+use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
+use rustc_session::lint::builtin::{
+ BINDINGS_WITH_VARIANT_NAME, IRREFUTABLE_LET_PATTERNS, UNREACHABLE_PATTERNS,
+};
+use rustc_session::Session;
+use rustc_span::source_map::Spanned;
+use rustc_span::{BytePos, Span};
+
+pub(crate) fn check_match(tcx: TyCtxt<'_>, def_id: DefId) {
+ let body_id = match def_id.as_local() {
+ None => return,
+ Some(def_id) => tcx.hir().body_owned_by(def_id),
+ };
+
+ let pattern_arena = TypedArena::default();
+ let mut visitor = MatchVisitor {
+ tcx,
+ typeck_results: tcx.typeck_body(body_id),
+ param_env: tcx.param_env(def_id),
+ pattern_arena: &pattern_arena,
+ };
+ visitor.visit_body(tcx.hir().body(body_id));
+}
+
+fn create_e0004(
+ sess: &Session,
+ sp: Span,
+ error_message: String,
+) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
+ struct_span_err!(sess, sp, E0004, "{}", &error_message)
+}
+
+#[derive(PartialEq)]
+enum RefutableFlag {
+ Irrefutable,
+ Refutable,
+}
+use RefutableFlag::*;
+
+struct MatchVisitor<'a, 'p, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
+}
+
+impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, '_, 'tcx> {
+ fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) {
+ intravisit::walk_expr(self, ex);
+ match &ex.kind {
+ hir::ExprKind::Match(scrut, arms, source) => {
+ self.check_match(scrut, arms, *source, ex.span)
+ }
+ hir::ExprKind::Let(hir::Let { pat, init, span, .. }) => {
+ self.check_let(pat, init, *span)
+ }
+ _ => {}
+ }
+ }
+
+ fn visit_local(&mut self, loc: &'tcx hir::Local<'tcx>) {
+ intravisit::walk_local(self, loc);
+ let els = loc.els;
+ if let Some(init) = loc.init && els.is_some() {
+ self.check_let(&loc.pat, init, loc.span);
+ }
+
+ let (msg, sp) = match loc.source {
+ hir::LocalSource::Normal => ("local binding", Some(loc.span)),
+ hir::LocalSource::AsyncFn => ("async fn binding", None),
+ hir::LocalSource::AwaitDesugar => ("`await` future binding", None),
+ hir::LocalSource::AssignDesugar(_) => ("destructuring assignment binding", None),
+ };
+ if els.is_none() {
+ self.check_irrefutable(&loc.pat, msg, sp);
+ }
+ }
+
+ fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
+ intravisit::walk_param(self, param);
+ self.check_irrefutable(&param.pat, "function argument", None);
+ }
+}
+
+impl PatCtxt<'_, '_> {
+ fn report_inlining_errors(&self) {
+ for error in &self.errors {
+ match *error {
+ PatternError::StaticInPattern(span) => {
+ self.span_e0158(span, "statics cannot be referenced in patterns")
+ }
+ PatternError::AssocConstInPattern(span) => {
+ self.span_e0158(span, "associated consts cannot be referenced in patterns")
+ }
+ PatternError::ConstParamInPattern(span) => {
+ self.span_e0158(span, "const parameters cannot be referenced in patterns")
+ }
+ PatternError::NonConstPath(span) => {
+ rustc_middle::mir::interpret::struct_error(
+ self.tcx.at(span),
+ "runtime values cannot be referenced in patterns",
+ )
+ .emit();
+ }
+ }
+ }
+ }
+
+ fn span_e0158(&self, span: Span, text: &str) {
+ struct_span_err!(self.tcx.sess, span, E0158, "{}", text).emit();
+ }
+}
+
+impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
+ fn check_patterns(&self, pat: &Pat<'_>, rf: RefutableFlag) {
+ pat.walk_always(|pat| check_borrow_conflicts_in_at_patterns(self, pat));
+ check_for_bindings_named_same_as_variants(self, pat, rf);
+ }
+
+ fn lower_pattern(
+ &self,
+ cx: &mut MatchCheckCtxt<'p, 'tcx>,
+ pat: &'tcx hir::Pat<'tcx>,
+ have_errors: &mut bool,
+ ) -> &'p DeconstructedPat<'p, 'tcx> {
+ let mut patcx = PatCtxt::new(self.tcx, self.param_env, self.typeck_results);
+ patcx.include_lint_checks();
+ let pattern = patcx.lower_pattern(pat);
+ let pattern: &_ = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
+ if !patcx.errors.is_empty() {
+ *have_errors = true;
+ patcx.report_inlining_errors();
+ }
+ pattern
+ }
+
+ fn new_cx(&self, hir_id: HirId) -> MatchCheckCtxt<'p, 'tcx> {
+ MatchCheckCtxt {
+ tcx: self.tcx,
+ param_env: self.param_env,
+ module: self.tcx.parent_module(hir_id).to_def_id(),
+ pattern_arena: &self.pattern_arena,
+ }
+ }
+
+ fn check_let(&mut self, pat: &'tcx hir::Pat<'tcx>, scrutinee: &hir::Expr<'_>, span: Span) {
+ self.check_patterns(pat, Refutable);
+ let mut cx = self.new_cx(scrutinee.hir_id);
+ let tpat = self.lower_pattern(&mut cx, pat, &mut false);
+ self.check_let_reachability(&mut cx, pat.hir_id, tpat, span);
+ }
+
+ fn check_match(
+ &mut self,
+ scrut: &hir::Expr<'_>,
+ hir_arms: &'tcx [hir::Arm<'tcx>],
+ source: hir::MatchSource,
+ expr_span: Span,
+ ) {
+ let mut cx = self.new_cx(scrut.hir_id);
+
+ for arm in hir_arms {
+ // Check the arm for some things unrelated to exhaustiveness.
+ self.check_patterns(&arm.pat, Refutable);
+ if let Some(hir::Guard::IfLet(ref let_expr)) = arm.guard {
+ self.check_patterns(let_expr.pat, Refutable);
+ let tpat = self.lower_pattern(&mut cx, let_expr.pat, &mut false);
+ self.check_let_reachability(&mut cx, let_expr.pat.hir_id, tpat, tpat.span());
+ }
+ }
+
+ let mut have_errors = false;
+
+ let arms: Vec<_> = hir_arms
+ .iter()
+ .map(|hir::Arm { pat, guard, .. }| MatchArm {
+ pat: self.lower_pattern(&mut cx, pat, &mut have_errors),
+ hir_id: pat.hir_id,
+ has_guard: guard.is_some(),
+ })
+ .collect();
+
+ // Bail out early if lowering failed.
+ if have_errors {
+ return;
+ }
+
+ let scrut_ty = self.typeck_results.expr_ty_adjusted(scrut);
+ let report = compute_match_usefulness(&cx, &arms, scrut.hir_id, scrut_ty);
+
+ match source {
+ // Don't report arm reachability of desugared `match $iter.into_iter() { iter => .. }`
+ // when the iterator is an uninhabited type. unreachable_code will trigger instead.
+ hir::MatchSource::ForLoopDesugar if arms.len() == 1 => {}
+ hir::MatchSource::ForLoopDesugar | hir::MatchSource::Normal => {
+ report_arm_reachability(&cx, &report)
+ }
+ // Unreachable patterns in try and await expressions occur when one of
+ // the arms are an uninhabited type. Which is OK.
+ hir::MatchSource::AwaitDesugar | hir::MatchSource::TryDesugar => {}
+ }
+
+ // Check if the match is exhaustive.
+ let witnesses = report.non_exhaustiveness_witnesses;
+ if !witnesses.is_empty() {
+ if source == hir::MatchSource::ForLoopDesugar && hir_arms.len() == 2 {
+ // the for loop pattern is not irrefutable
+ let pat = hir_arms[1].pat.for_loop_some().unwrap();
+ self.check_irrefutable(pat, "`for` loop binding", None);
+ } else {
+ non_exhaustive_match(&cx, scrut_ty, scrut.span, witnesses, hir_arms, expr_span);
+ }
+ }
+ }
+
+ fn check_let_reachability(
+ &mut self,
+ cx: &mut MatchCheckCtxt<'p, 'tcx>,
+ pat_id: HirId,
+ pat: &'p DeconstructedPat<'p, 'tcx>,
+ span: Span,
+ ) {
+ if self.check_let_chain(cx, pat_id) {
+ return;
+ }
+
+ if is_let_irrefutable(cx, pat_id, pat) {
+ irrefutable_let_pattern(cx.tcx, pat_id, span);
+ }
+ }
+
+ fn check_let_chain(&mut self, cx: &mut MatchCheckCtxt<'p, 'tcx>, pat_id: HirId) -> bool {
+ let hir = self.tcx.hir();
+ let parent = hir.get_parent_node(pat_id);
+
+ // First, figure out if the given pattern is part of a let chain,
+ // and if so, obtain the top node of the chain.
+ let mut top = parent;
+ let mut part_of_chain = false;
+ loop {
+ let new_top = hir.get_parent_node(top);
+ if let hir::Node::Expr(
+ hir::Expr {
+ kind: hir::ExprKind::Binary(Spanned { node: hir::BinOpKind::And, .. }, lhs, rhs),
+ ..
+ },
+ ..,
+ ) = hir.get(new_top)
+ {
+ // If this isn't the first iteration, we need to check
+ // if there is a let expr before us in the chain, so
+ // that we avoid doubly checking the let chain.
+
+ // The way a chain of &&s is encoded is ((let ... && let ...) && let ...) && let ...
+ // as && is left-to-right associative. Thus, we need to check rhs.
+ if part_of_chain && matches!(rhs.kind, hir::ExprKind::Let(..)) {
+ return true;
+ }
+ // If there is a let at the lhs, and we provide the rhs, we don't do any checking either.
+ if !part_of_chain && matches!(lhs.kind, hir::ExprKind::Let(..)) && rhs.hir_id == top
+ {
+ return true;
+ }
+ } else {
+ // We've reached the top.
+ break;
+ }
+
+ // Since this function is called within a let context, it is reasonable to assume that any parent
+ // `&&` infers a let chain
+ part_of_chain = true;
+ top = new_top;
+ }
+ if !part_of_chain {
+ return false;
+ }
+
+ // Second, obtain the refutabilities of all exprs in the chain,
+ // and record chain members that aren't let exprs.
+ let mut chain_refutabilities = Vec::new();
+ let hir::Node::Expr(top_expr) = hir.get(top) else {
+ // We ensure right above that it's an Expr
+ unreachable!()
+ };
+ let mut cur_expr = top_expr;
+ loop {
+ let mut add = |expr: &hir::Expr<'tcx>| {
+ let refutability = match expr.kind {
+ hir::ExprKind::Let(hir::Let { pat, init, span, .. }) => {
+ let mut ncx = self.new_cx(init.hir_id);
+ let tpat = self.lower_pattern(&mut ncx, pat, &mut false);
+
+ let refutable = !is_let_irrefutable(&mut ncx, pat.hir_id, tpat);
+ Some((*span, refutable))
+ }
+ _ => None,
+ };
+ chain_refutabilities.push(refutability);
+ };
+ if let hir::Expr {
+ kind: hir::ExprKind::Binary(Spanned { node: hir::BinOpKind::And, .. }, lhs, rhs),
+ ..
+ } = cur_expr
+ {
+ add(rhs);
+ cur_expr = lhs;
+ } else {
+ add(cur_expr);
+ break;
+ }
+ }
+ chain_refutabilities.reverse();
+
+ // Third, emit the actual warnings.
+
+ if chain_refutabilities.iter().all(|r| matches!(*r, Some((_, false)))) {
+ // The entire chain is made up of irrefutable `let` statements
+ let let_source = let_source_parent(self.tcx, top, None);
+ irrefutable_let_patterns(
+ cx.tcx,
+ top,
+ let_source,
+ chain_refutabilities.len(),
+ top_expr.span,
+ );
+ return true;
+ }
+ let lint_affix = |affix: &[Option<(Span, bool)>], kind, suggestion| {
+ let span_start = affix[0].unwrap().0;
+ let span_end = affix.last().unwrap().unwrap().0;
+ let span = span_start.to(span_end);
+ let cnt = affix.len();
+ cx.tcx.struct_span_lint_hir(IRREFUTABLE_LET_PATTERNS, top, span, |lint| {
+ let s = pluralize!(cnt);
+ let mut diag = lint.build(&format!("{kind} irrefutable pattern{s} in let chain"));
+ diag.note(&format!(
+ "{these} pattern{s} will always match",
+ these = pluralize!("this", cnt),
+ ));
+ diag.help(&format!(
+ "consider moving {} {suggestion}",
+ if cnt > 1 { "them" } else { "it" }
+ ));
+ diag.emit()
+ });
+ };
+ if let Some(until) = chain_refutabilities.iter().position(|r| !matches!(*r, Some((_, false)))) && until > 0 {
+ // The chain has a non-zero prefix of irrefutable `let` statements.
+
+ // Check if the let source is while, for there is no alternative place to put a prefix,
+ // and we shouldn't lint.
+ let let_source = let_source_parent(self.tcx, top, None);
+ if !matches!(let_source, LetSource::WhileLet) {
+ // Emit the lint
+ let prefix = &chain_refutabilities[..until];
+ lint_affix(prefix, "leading", "outside of the construct");
+ }
+ }
+ if let Some(from) = chain_refutabilities.iter().rposition(|r| !matches!(*r, Some((_, false)))) && from != (chain_refutabilities.len() - 1) {
+ // The chain has a non-empty suffix of irrefutable `let` statements
+ let suffix = &chain_refutabilities[from + 1..];
+ lint_affix(suffix, "trailing", "into the body");
+ }
+ true
+ }
+
+ fn check_irrefutable(&self, pat: &'tcx Pat<'tcx>, origin: &str, sp: Option<Span>) {
+ let mut cx = self.new_cx(pat.hir_id);
+
+ let pattern = self.lower_pattern(&mut cx, pat, &mut false);
+ let pattern_ty = pattern.ty();
+ let arms = vec![MatchArm { pat: pattern, hir_id: pat.hir_id, has_guard: false }];
+ let report = compute_match_usefulness(&cx, &arms, pat.hir_id, pattern_ty);
+
+ // Note: we ignore whether the pattern is unreachable (i.e. whether the type is empty). We
+ // only care about exhaustiveness here.
+ let witnesses = report.non_exhaustiveness_witnesses;
+ if witnesses.is_empty() {
+ // The pattern is irrefutable.
+ self.check_patterns(pat, Irrefutable);
+ return;
+ }
+
+ let joined_patterns = joined_uncovered_patterns(&cx, &witnesses);
+
+ let mut bindings = vec![];
+
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ pat.span,
+ E0005,
+ "refutable pattern in {}: {} not covered",
+ origin,
+ joined_patterns
+ );
+ let suggest_if_let = match &pat.kind {
+ hir::PatKind::Path(hir::QPath::Resolved(None, path))
+ if path.segments.len() == 1 && path.segments[0].args.is_none() =>
+ {
+ const_not_var(&mut err, cx.tcx, pat, path);
+ false
+ }
+ _ => {
+ pat.walk(&mut |pat: &hir::Pat<'_>| {
+ match pat.kind {
+ hir::PatKind::Binding(_, _, ident, _) => {
+ bindings.push(ident);
+ }
+ _ => {}
+ }
+ true
+ });
+
+ err.span_label(pat.span, pattern_not_covered_label(&witnesses, &joined_patterns));
+ true
+ }
+ };
+
+ if let (Some(span), true) = (sp, suggest_if_let) {
+ err.note(
+ "`let` bindings require an \"irrefutable pattern\", like a `struct` or \
+ an `enum` with only one variant",
+ );
+ if self.tcx.sess.source_map().is_span_accessible(span) {
+ let semi_span = span.shrink_to_hi().with_lo(span.hi() - BytePos(1));
+ let start_span = span.shrink_to_lo();
+ let end_span = semi_span.shrink_to_lo();
+ err.multipart_suggestion(
+ &format!(
+ "you might want to use `if let` to ignore the variant{} that {} matched",
+ pluralize!(witnesses.len()),
+ match witnesses.len() {
+ 1 => "isn't",
+ _ => "aren't",
+ },
+ ),
+ vec![
+ match &bindings[..] {
+ [] => (start_span, "if ".to_string()),
+ [binding] => (start_span, format!("let {} = if ", binding)),
+ bindings => (
+ start_span,
+ format!(
+ "let ({}) = if ",
+ bindings
+ .iter()
+ .map(|ident| ident.to_string())
+ .collect::<Vec<_>>()
+ .join(", ")
+ ),
+ ),
+ },
+ match &bindings[..] {
+ [] => (semi_span, " { todo!() }".to_string()),
+ [binding] => {
+ (end_span, format!(" {{ {} }} else {{ todo!() }}", binding))
+ }
+ bindings => (
+ end_span,
+ format!(
+ " {{ ({}) }} else {{ todo!() }}",
+ bindings
+ .iter()
+ .map(|ident| ident.to_string())
+ .collect::<Vec<_>>()
+ .join(", ")
+ ),
+ ),
+ },
+ ],
+ Applicability::HasPlaceholders,
+ );
+ if !bindings.is_empty() && cx.tcx.sess.is_nightly_build() {
+ err.span_suggestion_verbose(
+ semi_span.shrink_to_lo(),
+ &format!(
+ "alternatively, on nightly, you might want to use \
+ `#![feature(let_else)]` to handle the variant{} that {} matched",
+ pluralize!(witnesses.len()),
+ match witnesses.len() {
+ 1 => "isn't",
+ _ => "aren't",
+ },
+ ),
+ " else { todo!() }".to_string(),
+ Applicability::HasPlaceholders,
+ );
+ }
+ }
+ err.note(
+ "for more information, visit \
+ https://doc.rust-lang.org/book/ch18-02-refutability.html",
+ );
+ }
+
+ adt_defined_here(&cx, &mut err, pattern_ty, &witnesses);
+ err.note(&format!("the matched value is of type `{}`", pattern_ty));
+ err.emit();
+ }
+}
+
+/// A path pattern was interpreted as a constant, not a new variable.
+/// This caused an irrefutable match failure in e.g. `let`.
+fn const_not_var(err: &mut Diagnostic, tcx: TyCtxt<'_>, pat: &Pat<'_>, path: &hir::Path<'_>) {
+ let descr = path.res.descr();
+ err.span_label(
+ pat.span,
+ format!("interpreted as {} {} pattern, not a new variable", path.res.article(), descr,),
+ );
+
+ err.span_suggestion(
+ pat.span,
+ "introduce a variable instead",
+ format!("{}_var", path.segments[0].ident).to_lowercase(),
+ // Cannot use `MachineApplicable` as it's not really *always* correct
+ // because there may be such an identifier in scope or the user maybe
+ // really wanted to match against the constant. This is quite unlikely however.
+ Applicability::MaybeIncorrect,
+ );
+
+ if let Some(span) = tcx.hir().res_span(path.res) {
+ err.span_label(span, format!("{} defined here", descr));
+ }
+}
+
+fn check_for_bindings_named_same_as_variants(
+ cx: &MatchVisitor<'_, '_, '_>,
+ pat: &Pat<'_>,
+ rf: RefutableFlag,
+) {
+ pat.walk_always(|p| {
+ if let hir::PatKind::Binding(_, _, ident, None) = p.kind
+ && let Some(ty::BindByValue(hir::Mutability::Not)) =
+ cx.typeck_results.extract_binding_mode(cx.tcx.sess, p.hir_id, p.span)
+ && let pat_ty = cx.typeck_results.pat_ty(p).peel_refs()
+ && let ty::Adt(edef, _) = pat_ty.kind()
+ && edef.is_enum()
+ && edef.variants().iter().any(|variant| {
+ variant.ident(cx.tcx) == ident && variant.ctor_kind == CtorKind::Const
+ })
+ {
+ let variant_count = edef.variants().len();
+ cx.tcx.struct_span_lint_hir(
+ BINDINGS_WITH_VARIANT_NAME,
+ p.hir_id,
+ p.span,
+ |lint| {
+ let ty_path = cx.tcx.def_path_str(edef.did());
+ let mut err = lint.build(&format!(
+ "pattern binding `{}` is named the same as one \
+ of the variants of the type `{}`",
+ ident, ty_path
+ ));
+ err.code(error_code!(E0170));
+ // If this is an irrefutable pattern, and there's > 1 variant,
+ // then we can't actually match on this. Applying the below
+ // suggestion would produce code that breaks on `check_irrefutable`.
+ if rf == Refutable || variant_count == 1 {
+ err.span_suggestion(
+ p.span,
+ "to match on the variant, qualify the path",
+ format!("{}::{}", ty_path, ident),
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
+ },
+ )
+ }
+ });
+}
+
+/// Checks for common cases of "catchall" patterns that may not be intended as such.
+fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
+ use Constructor::*;
+ match pat.ctor() {
+ Wildcard => true,
+ Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
+ _ => false,
+ }
+}
+
+fn unreachable_pattern(tcx: TyCtxt<'_>, span: Span, id: HirId, catchall: Option<Span>) {
+ tcx.struct_span_lint_hir(UNREACHABLE_PATTERNS, id, span, |lint| {
+ let mut err = lint.build("unreachable pattern");
+ if let Some(catchall) = catchall {
+ // We had a catchall pattern, hint at that.
+ err.span_label(span, "unreachable pattern");
+ err.span_label(catchall, "matches any value");
+ }
+ err.emit();
+ });
+}
+
+fn irrefutable_let_pattern(tcx: TyCtxt<'_>, id: HirId, span: Span) {
+ let source = let_source(tcx, id);
+ irrefutable_let_patterns(tcx, id, source, 1, span);
+}
+
+fn irrefutable_let_patterns(
+ tcx: TyCtxt<'_>,
+ id: HirId,
+ source: LetSource,
+ count: usize,
+ span: Span,
+) {
+ macro_rules! emit_diag {
+ (
+ $lint:expr,
+ $source_name:expr,
+ $note_sufix:expr,
+ $help_sufix:expr
+ ) => {{
+ let s = pluralize!(count);
+ let these = pluralize!("this", count);
+ let mut diag = $lint.build(&format!("irrefutable {} pattern{s}", $source_name));
+ diag.note(&format!("{these} pattern{s} will always match, so the {}", $note_sufix));
+ diag.help(concat!("consider ", $help_sufix));
+ diag.emit()
+ }};
+ }
+
+ let span = match source {
+ LetSource::LetElse(span) => span,
+ _ => span,
+ };
+ tcx.struct_span_lint_hir(IRREFUTABLE_LET_PATTERNS, id, span, |lint| match source {
+ LetSource::GenericLet => {
+ emit_diag!(lint, "`let`", "`let` is useless", "removing `let`");
+ }
+ LetSource::IfLet => {
+ emit_diag!(
+ lint,
+ "`if let`",
+ "`if let` is useless",
+ "replacing the `if let` with a `let`"
+ );
+ }
+ LetSource::IfLetGuard => {
+ emit_diag!(
+ lint,
+ "`if let` guard",
+ "guard is useless",
+ "removing the guard and adding a `let` inside the match arm"
+ );
+ }
+ LetSource::LetElse(..) => {
+ emit_diag!(
+ lint,
+ "`let...else`",
+ "`else` clause is useless",
+ "removing the `else` clause"
+ );
+ }
+ LetSource::WhileLet => {
+ emit_diag!(
+ lint,
+ "`while let`",
+ "loop will never exit",
+ "instead using a `loop { ... }` with a `let` inside it"
+ );
+ }
+ });
+}
+
+fn is_let_irrefutable<'p, 'tcx>(
+ cx: &mut MatchCheckCtxt<'p, 'tcx>,
+ pat_id: HirId,
+ pat: &'p DeconstructedPat<'p, 'tcx>,
+) -> bool {
+ let arms = [MatchArm { pat, hir_id: pat_id, has_guard: false }];
+ let report = compute_match_usefulness(&cx, &arms, pat_id, pat.ty());
+
+ // Report if the pattern is unreachable, which can only occur when the type is uninhabited.
+ // This also reports unreachable sub-patterns though, so we can't just replace it with an
+ // `is_uninhabited` check.
+ report_arm_reachability(&cx, &report);
+
+ // If the list of witnesses is empty, the match is exhaustive,
+ // i.e. the `if let` pattern is irrefutable.
+ report.non_exhaustiveness_witnesses.is_empty()
+}
+
+/// Report unreachable arms, if any.
+fn report_arm_reachability<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ report: &UsefulnessReport<'p, 'tcx>,
+) {
+ use Reachability::*;
+ let mut catchall = None;
+ for (arm, is_useful) in report.arm_usefulness.iter() {
+ match is_useful {
+ Unreachable => unreachable_pattern(cx.tcx, arm.pat.span(), arm.hir_id, catchall),
+ Reachable(unreachables) if unreachables.is_empty() => {}
+ // The arm is reachable, but contains unreachable subpatterns (from or-patterns).
+ Reachable(unreachables) => {
+ let mut unreachables = unreachables.clone();
+ // Emit lints in the order in which they occur in the file.
+ unreachables.sort_unstable();
+ for span in unreachables {
+ unreachable_pattern(cx.tcx, span, arm.hir_id, None);
+ }
+ }
+ }
+ if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) {
+ catchall = Some(arm.pat.span());
+ }
+ }
+}
+
+/// Report that a match is not exhaustive.
+fn non_exhaustive_match<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ scrut_ty: Ty<'tcx>,
+ sp: Span,
+ witnesses: Vec<DeconstructedPat<'p, 'tcx>>,
+ arms: &[hir::Arm<'tcx>],
+ expr_span: Span,
+) {
+ let is_empty_match = arms.is_empty();
+ let non_empty_enum = match scrut_ty.kind() {
+ ty::Adt(def, _) => def.is_enum() && !def.variants().is_empty(),
+ _ => false,
+ };
+ // In the case of an empty match, replace the '`_` not covered' diagnostic with something more
+ // informative.
+ let mut err;
+ let pattern;
+ let mut patterns_len = 0;
+ if is_empty_match && !non_empty_enum {
+ err = create_e0004(
+ cx.tcx.sess,
+ sp,
+ format!("non-exhaustive patterns: type `{}` is non-empty", scrut_ty),
+ );
+ pattern = "_".to_string();
+ } else {
+ let joined_patterns = joined_uncovered_patterns(cx, &witnesses);
+ err = create_e0004(
+ cx.tcx.sess,
+ sp,
+ format!("non-exhaustive patterns: {} not covered", joined_patterns),
+ );
+ err.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns));
+ patterns_len = witnesses.len();
+ pattern = if witnesses.len() < 4 {
+ witnesses
+ .iter()
+ .map(|witness| witness.to_pat(cx).to_string())
+ .collect::<Vec<String>>()
+ .join(" | ")
+ } else {
+ "_".to_string()
+ };
+ };
+
+ let is_variant_list_non_exhaustive = match scrut_ty.kind() {
+ ty::Adt(def, _) if def.is_variant_list_non_exhaustive() && !def.did().is_local() => true,
+ _ => false,
+ };
+
+ adt_defined_here(cx, &mut err, scrut_ty, &witnesses);
+ err.note(&format!(
+ "the matched value is of type `{}`{}",
+ scrut_ty,
+ if is_variant_list_non_exhaustive { ", which is marked as non-exhaustive" } else { "" }
+ ));
+ if (scrut_ty == cx.tcx.types.usize || scrut_ty == cx.tcx.types.isize)
+ && !is_empty_match
+ && witnesses.len() == 1
+ && matches!(witnesses[0].ctor(), Constructor::NonExhaustive)
+ {
+ err.note(&format!(
+ "`{}` does not have a fixed maximum value, so a wildcard `_` is necessary to match \
+ exhaustively",
+ scrut_ty,
+ ));
+ if cx.tcx.sess.is_nightly_build() {
+ err.help(&format!(
+ "add `#![feature(precise_pointer_size_matching)]` to the crate attributes to \
+ enable precise `{}` matching",
+ scrut_ty,
+ ));
+ }
+ }
+ if let ty::Ref(_, sub_ty, _) = scrut_ty.kind() {
+ if cx.tcx.is_ty_uninhabited_from(cx.module, *sub_ty, cx.param_env) {
+ err.note("references are always considered inhabited");
+ }
+ }
+
+ let mut suggestion = None;
+ let sm = cx.tcx.sess.source_map();
+ match arms {
+ [] if sp.eq_ctxt(expr_span) => {
+ // Get the span for the empty match body `{}`.
+ let (indentation, more) = if let Some(snippet) = sm.indentation_before(sp) {
+ (format!("\n{}", snippet), " ")
+ } else {
+ (" ".to_string(), "")
+ };
+ suggestion = Some((
+ sp.shrink_to_hi().with_hi(expr_span.hi()),
+ format!(
+ " {{{indentation}{more}{pattern} => todo!(),{indentation}}}",
+ indentation = indentation,
+ more = more,
+ pattern = pattern,
+ ),
+ ));
+ }
+ [only] => {
+ let (pre_indentation, is_multiline) = if let Some(snippet) = sm.indentation_before(only.span)
+ && let Ok(with_trailing) = sm.span_extend_while(only.span, |c| c.is_whitespace() || c == ',')
+ && sm.is_multiline(with_trailing)
+ {
+ (format!("\n{}", snippet), true)
+ } else {
+ (" ".to_string(), false)
+ };
+ let comma = if matches!(only.body.kind, hir::ExprKind::Block(..))
+ && only.span.eq_ctxt(only.body.span)
+ && is_multiline
+ {
+ ""
+ } else {
+ ","
+ };
+ suggestion = Some((
+ only.span.shrink_to_hi(),
+ format!("{}{}{} => todo!()", comma, pre_indentation, pattern),
+ ));
+ }
+ [.., prev, last] if prev.span.eq_ctxt(last.span) => {
+ if let Ok(snippet) = sm.span_to_snippet(prev.span.between(last.span)) {
+ let comma = if matches!(last.body.kind, hir::ExprKind::Block(..))
+ && last.span.eq_ctxt(last.body.span)
+ {
+ ""
+ } else {
+ ","
+ };
+ suggestion = Some((
+ last.span.shrink_to_hi(),
+ format!(
+ "{}{}{} => todo!()",
+ comma,
+ snippet.strip_prefix(',').unwrap_or(&snippet),
+ pattern
+ ),
+ ));
+ }
+ }
+ _ => {}
+ }
+
+ let msg = format!(
+ "ensure that all possible cases are being handled by adding a match arm with a wildcard \
+ pattern{}{}",
+ if patterns_len > 1 && patterns_len < 4 && suggestion.is_some() {
+ ", a match arm with multiple or-patterns"
+ } else {
+ // we are either not suggesting anything, or suggesting `_`
+ ""
+ },
+ match patterns_len {
+ // non-exhaustive enum case
+ 0 if suggestion.is_some() => " as shown",
+ 0 => "",
+ 1 if suggestion.is_some() => " or an explicit pattern as shown",
+ 1 => " or an explicit pattern",
+ _ if suggestion.is_some() => " as shown, or multiple match arms",
+ _ => " or multiple match arms",
+ },
+ );
+ if let Some((span, sugg)) = suggestion {
+ err.span_suggestion_verbose(span, &msg, sugg, Applicability::HasPlaceholders);
+ } else {
+ err.help(&msg);
+ }
+ err.emit();
+}
+
+pub(crate) fn joined_uncovered_patterns<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ witnesses: &[DeconstructedPat<'p, 'tcx>],
+) -> String {
+ const LIMIT: usize = 3;
+ let pat_to_str = |pat: &DeconstructedPat<'p, 'tcx>| pat.to_pat(cx).to_string();
+ match witnesses {
+ [] => bug!(),
+ [witness] => format!("`{}`", witness.to_pat(cx)),
+ [head @ .., tail] if head.len() < LIMIT => {
+ let head: Vec<_> = head.iter().map(pat_to_str).collect();
+ format!("`{}` and `{}`", head.join("`, `"), tail.to_pat(cx))
+ }
+ _ => {
+ let (head, tail) = witnesses.split_at(LIMIT);
+ let head: Vec<_> = head.iter().map(pat_to_str).collect();
+ format!("`{}` and {} more", head.join("`, `"), tail.len())
+ }
+ }
+}
+
+pub(crate) fn pattern_not_covered_label(
+ witnesses: &[DeconstructedPat<'_, '_>],
+ joined_patterns: &str,
+) -> String {
+ format!("pattern{} {} not covered", rustc_errors::pluralize!(witnesses.len()), joined_patterns)
+}
+
+/// Point at the definition of non-covered `enum` variants.
+fn adt_defined_here<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ err: &mut Diagnostic,
+ ty: Ty<'tcx>,
+ witnesses: &[DeconstructedPat<'p, 'tcx>],
+) {
+ let ty = ty.peel_refs();
+ if let ty::Adt(def, _) = ty.kind() {
+ let mut spans = vec![];
+ if witnesses.len() < 5 {
+ for sp in maybe_point_at_variant(cx, *def, witnesses.iter()) {
+ spans.push(sp);
+ }
+ }
+ let def_span = cx
+ .tcx
+ .hir()
+ .get_if_local(def.did())
+ .and_then(|node| node.ident())
+ .map(|ident| ident.span)
+ .unwrap_or_else(|| cx.tcx.def_span(def.did()));
+ let mut span: MultiSpan =
+ if spans.is_empty() { def_span.into() } else { spans.clone().into() };
+
+ span.push_span_label(def_span, "");
+ for pat in spans {
+ span.push_span_label(pat, "not covered");
+ }
+ err.span_note(span, &format!("`{}` defined here", ty));
+ }
+}
+
+fn maybe_point_at_variant<'a, 'p: 'a, 'tcx: 'a>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ def: AdtDef<'tcx>,
+ patterns: impl Iterator<Item = &'a DeconstructedPat<'p, 'tcx>>,
+) -> Vec<Span> {
+ use Constructor::*;
+ let mut covered = vec![];
+ for pattern in patterns {
+ if let Variant(variant_index) = pattern.ctor() {
+ if let ty::Adt(this_def, _) = pattern.ty().kind() && this_def.did() != def.did() {
+ continue;
+ }
+ let sp = def.variant(*variant_index).ident(cx.tcx).span;
+ if covered.contains(&sp) {
+ // Don't point at variants that have already been covered due to other patterns to avoid
+ // visual clutter.
+ continue;
+ }
+ covered.push(sp);
+ }
+ covered.extend(maybe_point_at_variant(cx, def, pattern.iter_fields()));
+ }
+ covered
+}
+
+/// Check if a by-value binding is by-value. That is, check if the binding's type is not `Copy`.
+fn is_binding_by_move(cx: &MatchVisitor<'_, '_, '_>, hir_id: HirId, span: Span) -> bool {
+ !cx.typeck_results.node_type(hir_id).is_copy_modulo_regions(cx.tcx.at(span), cx.param_env)
+}
+
+/// Check that there are no borrow or move conflicts in `binding @ subpat` patterns.
+///
+/// For example, this would reject:
+/// - `ref x @ Some(ref mut y)`,
+/// - `ref mut x @ Some(ref y)`,
+/// - `ref mut x @ Some(ref mut y)`,
+/// - `ref mut? x @ Some(y)`, and
+/// - `x @ Some(ref mut? y)`.
+///
+/// This analysis is *not* subsumed by NLL.
+fn check_borrow_conflicts_in_at_patterns(cx: &MatchVisitor<'_, '_, '_>, pat: &Pat<'_>) {
+ // Extract `sub` in `binding @ sub`.
+ let (name, sub) = match &pat.kind {
+ hir::PatKind::Binding(.., name, Some(sub)) => (*name, sub),
+ _ => return,
+ };
+ let binding_span = pat.span.with_hi(name.span.hi());
+
+ let typeck_results = cx.typeck_results;
+ let sess = cx.tcx.sess;
+
+ // Get the binding move, extract the mutability if by-ref.
+ let mut_outer = match typeck_results.extract_binding_mode(sess, pat.hir_id, pat.span) {
+ Some(ty::BindByValue(_)) if is_binding_by_move(cx, pat.hir_id, pat.span) => {
+ // We have `x @ pat` where `x` is by-move. Reject all borrows in `pat`.
+ let mut conflicts_ref = Vec::new();
+ sub.each_binding(|_, hir_id, span, _| {
+ match typeck_results.extract_binding_mode(sess, hir_id, span) {
+ Some(ty::BindByValue(_)) | None => {}
+ Some(ty::BindByReference(_)) => conflicts_ref.push(span),
+ }
+ });
+ if !conflicts_ref.is_empty() {
+ let occurs_because = format!(
+ "move occurs because `{}` has type `{}` which does not implement the `Copy` trait",
+ name,
+ typeck_results.node_type(pat.hir_id),
+ );
+ sess.struct_span_err(pat.span, "borrow of moved value")
+ .span_label(binding_span, format!("value moved into `{}` here", name))
+ .span_label(binding_span, occurs_because)
+ .span_labels(conflicts_ref, "value borrowed here after move")
+ .emit();
+ }
+ return;
+ }
+ Some(ty::BindByValue(_)) | None => return,
+ Some(ty::BindByReference(m)) => m,
+ };
+
+ // We now have `ref $mut_outer binding @ sub` (semantically).
+ // Recurse into each binding in `sub` and find mutability or move conflicts.
+ let mut conflicts_move = Vec::new();
+ let mut conflicts_mut_mut = Vec::new();
+ let mut conflicts_mut_ref = Vec::new();
+ sub.each_binding(|_, hir_id, span, name| {
+ match typeck_results.extract_binding_mode(sess, hir_id, span) {
+ Some(ty::BindByReference(mut_inner)) => match (mut_outer, mut_inner) {
+ (Mutability::Not, Mutability::Not) => {} // Both sides are `ref`.
+ (Mutability::Mut, Mutability::Mut) => conflicts_mut_mut.push((span, name)), // 2x `ref mut`.
+ _ => conflicts_mut_ref.push((span, name)), // `ref` + `ref mut` in either direction.
+ },
+ Some(ty::BindByValue(_)) if is_binding_by_move(cx, hir_id, span) => {
+ conflicts_move.push((span, name)) // `ref mut?` + by-move conflict.
+ }
+ Some(ty::BindByValue(_)) | None => {} // `ref mut?` + by-copy is fine.
+ }
+ });
+
+ // Report errors if any.
+ if !conflicts_mut_mut.is_empty() {
+ // Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`.
+ let mut err = sess
+ .struct_span_err(pat.span, "cannot borrow value as mutable more than once at a time");
+ err.span_label(binding_span, format!("first mutable borrow, by `{}`, occurs here", name));
+ for (span, name) in conflicts_mut_mut {
+ err.span_label(span, format!("another mutable borrow, by `{}`, occurs here", name));
+ }
+ for (span, name) in conflicts_mut_ref {
+ err.span_label(span, format!("also borrowed as immutable, by `{}`, here", name));
+ }
+ for (span, name) in conflicts_move {
+ err.span_label(span, format!("also moved into `{}` here", name));
+ }
+ err.emit();
+ } else if !conflicts_mut_ref.is_empty() {
+ // Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse.
+ let (primary, also) = match mut_outer {
+ Mutability::Mut => ("mutable", "immutable"),
+ Mutability::Not => ("immutable", "mutable"),
+ };
+ let msg =
+ format!("cannot borrow value as {} because it is also borrowed as {}", also, primary);
+ let mut err = sess.struct_span_err(pat.span, &msg);
+ err.span_label(binding_span, format!("{} borrow, by `{}`, occurs here", primary, name));
+ for (span, name) in conflicts_mut_ref {
+ err.span_label(span, format!("{} borrow, by `{}`, occurs here", also, name));
+ }
+ for (span, name) in conflicts_move {
+ err.span_label(span, format!("also moved into `{}` here", name));
+ }
+ err.emit();
+ } else if !conflicts_move.is_empty() {
+ // Report by-ref and by-move conflicts, e.g. `ref x @ y`.
+ let mut err =
+ sess.struct_span_err(pat.span, "cannot move out of value because it is borrowed");
+ err.span_label(binding_span, format!("value borrowed, by `{}`, here", name));
+ for (span, name) in conflicts_move {
+ err.span_label(span, format!("value moved into `{}` here", name));
+ }
+ err.emit();
+ }
+}
+
+#[derive(Clone, Copy, Debug)]
+pub enum LetSource {
+ GenericLet,
+ IfLet,
+ IfLetGuard,
+ LetElse(Span),
+ WhileLet,
+}
+
+fn let_source(tcx: TyCtxt<'_>, pat_id: HirId) -> LetSource {
+ let hir = tcx.hir();
+
+ let parent = hir.get_parent_node(pat_id);
+ let_source_parent(tcx, parent, Some(pat_id))
+}
+
+fn let_source_parent(tcx: TyCtxt<'_>, parent: HirId, pat_id: Option<HirId>) -> LetSource {
+ let hir = tcx.hir();
+
+ let parent_node = hir.get(parent);
+
+ match parent_node {
+ hir::Node::Arm(hir::Arm {
+ guard: Some(hir::Guard::IfLet(&hir::Let { pat: hir::Pat { hir_id, .. }, .. })),
+ ..
+ }) if Some(*hir_id) == pat_id => {
+ return LetSource::IfLetGuard;
+ }
+ _ => {}
+ }
+
+ let parent_parent = hir.get_parent_node(parent);
+ let parent_parent_node = hir.get(parent_parent);
+ if let hir::Node::Stmt(hir::Stmt { kind: hir::StmtKind::Local(_), span, .. }) =
+ parent_parent_node
+ {
+ return LetSource::LetElse(*span);
+ }
+
+ let parent_parent_parent = hir.get_parent_node(parent_parent);
+ let parent_parent_parent_parent = hir.get_parent_node(parent_parent_parent);
+ let parent_parent_parent_parent_node = hir.get(parent_parent_parent_parent);
+
+ if let hir::Node::Expr(hir::Expr {
+ kind: hir::ExprKind::Loop(_, _, hir::LoopSource::While, _),
+ ..
+ }) = parent_parent_parent_parent_node
+ {
+ return LetSource::WhileLet;
+ }
+
+ if let hir::Node::Expr(hir::Expr { kind: hir::ExprKind::If(..), .. }) = parent_parent_node {
+ return LetSource::IfLet;
+ }
+
+ LetSource::GenericLet
+}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
new file mode 100644
index 000000000..d6dd0f017
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
@@ -0,0 +1,601 @@
+use rustc_hir as hir;
+use rustc_index::vec::Idx;
+use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
+use rustc_middle::mir::{self, Field};
+use rustc_middle::thir::{FieldPat, Pat, PatKind};
+use rustc_middle::ty::print::with_no_trimmed_paths;
+use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
+use rustc_session::lint;
+use rustc_span::Span;
+use rustc_trait_selection::traits::predicate_for_trait_def;
+use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
+use rustc_trait_selection::traits::{self, ObligationCause, PredicateObligation};
+
+use std::cell::Cell;
+
+use super::PatCtxt;
+
+impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
+ /// Converts an evaluated constant to a pattern (if possible).
+ /// This means aggregate values (like structs and enums) are converted
+ /// to a pattern that matches the value (as if you'd compared via structural equality).
+ #[instrument(level = "debug", skip(self))]
+ pub(super) fn const_to_pat(
+ &self,
+ cv: mir::ConstantKind<'tcx>,
+ id: hir::HirId,
+ span: Span,
+ mir_structural_match_violation: bool,
+ ) -> Pat<'tcx> {
+ let pat = self.tcx.infer_ctxt().enter(|infcx| {
+ let mut convert = ConstToPat::new(self, id, span, infcx);
+ convert.to_pat(cv, mir_structural_match_violation)
+ });
+
+ debug!(?pat);
+ pat
+ }
+}
+
+struct ConstToPat<'a, 'tcx> {
+ id: hir::HirId,
+ span: Span,
+ param_env: ty::ParamEnv<'tcx>,
+
+ // This tracks if we emitted some hard error for a given const value, so that
+ // we will not subsequently issue an irrelevant lint for the same const
+ // value.
+ saw_const_match_error: Cell<bool>,
+
+ // This tracks if we emitted some diagnostic for a given const value, so that
+ // we will not subsequently issue an irrelevant lint for the same const
+ // value.
+ saw_const_match_lint: Cell<bool>,
+
+ // For backcompat we need to keep allowing non-structurally-eq types behind references.
+ // See also all the `cant-hide-behind` tests.
+ behind_reference: Cell<bool>,
+
+ // inference context used for checking `T: Structural` bounds.
+ infcx: InferCtxt<'a, 'tcx>,
+
+ include_lint_checks: bool,
+
+ treat_byte_string_as_slice: bool,
+}
+
+mod fallback_to_const_ref {
+ #[derive(Debug)]
+ /// This error type signals that we encountered a non-struct-eq situation behind a reference.
+ /// We bubble this up in order to get back to the reference destructuring and make that emit
+ /// a const pattern instead of a deref pattern. This allows us to simply call `PartialEq::eq`
+ /// on such patterns (since that function takes a reference) and not have to jump through any
+ /// hoops to get a reference to the value.
+ pub(super) struct FallbackToConstRef(());
+
+ pub(super) fn fallback_to_const_ref<'a, 'tcx>(
+ c2p: &super::ConstToPat<'a, 'tcx>,
+ ) -> FallbackToConstRef {
+ assert!(c2p.behind_reference.get());
+ FallbackToConstRef(())
+ }
+}
+use fallback_to_const_ref::{fallback_to_const_ref, FallbackToConstRef};
+
+impl<'a, 'tcx> ConstToPat<'a, 'tcx> {
+ fn new(
+ pat_ctxt: &PatCtxt<'_, 'tcx>,
+ id: hir::HirId,
+ span: Span,
+ infcx: InferCtxt<'a, 'tcx>,
+ ) -> Self {
+ trace!(?pat_ctxt.typeck_results.hir_owner);
+ ConstToPat {
+ id,
+ span,
+ infcx,
+ param_env: pat_ctxt.param_env,
+ include_lint_checks: pat_ctxt.include_lint_checks,
+ saw_const_match_error: Cell::new(false),
+ saw_const_match_lint: Cell::new(false),
+ behind_reference: Cell::new(false),
+ treat_byte_string_as_slice: pat_ctxt
+ .typeck_results
+ .treat_byte_string_as_slice
+ .contains(&id.local_id),
+ }
+ }
+
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.infcx.tcx
+ }
+
+ fn adt_derive_msg(&self, adt_def: AdtDef<'tcx>) -> String {
+ let path = self.tcx().def_path_str(adt_def.did());
+ format!(
+ "to use a constant of type `{}` in a pattern, \
+ `{}` must be annotated with `#[derive(PartialEq, Eq)]`",
+ path, path,
+ )
+ }
+
+ fn search_for_structural_match_violation(&self, ty: Ty<'tcx>) -> Option<String> {
+ traits::search_for_structural_match_violation(self.span, self.tcx(), ty).map(|non_sm_ty| {
+ with_no_trimmed_paths!(match non_sm_ty.kind() {
+ ty::Adt(adt, _) => self.adt_derive_msg(*adt),
+ ty::Dynamic(..) => {
+ "trait objects cannot be used in patterns".to_string()
+ }
+ ty::Opaque(..) => {
+ "opaque types cannot be used in patterns".to_string()
+ }
+ ty::Closure(..) => {
+ "closures cannot be used in patterns".to_string()
+ }
+ ty::Generator(..) | ty::GeneratorWitness(..) => {
+ "generators cannot be used in patterns".to_string()
+ }
+ ty::Float(..) => {
+ "floating-point numbers cannot be used in patterns".to_string()
+ }
+ ty::FnPtr(..) => {
+ "function pointers cannot be used in patterns".to_string()
+ }
+ ty::RawPtr(..) => {
+ "raw pointers cannot be used in patterns".to_string()
+ }
+ _ => {
+ bug!("use of a value of `{non_sm_ty}` inside a pattern")
+ }
+ })
+ })
+ }
+
+ fn type_marked_structural(&self, ty: Ty<'tcx>) -> bool {
+ ty.is_structural_eq_shallow(self.infcx.tcx)
+ }
+
+ fn to_pat(
+ &mut self,
+ cv: mir::ConstantKind<'tcx>,
+ mir_structural_match_violation: bool,
+ ) -> Pat<'tcx> {
+ trace!(self.treat_byte_string_as_slice);
+ // This method is just a wrapper handling a validity check; the heavy lifting is
+ // performed by the recursive `recur` method, which is not meant to be
+ // invoked except by this method.
+ //
+ // once indirect_structural_match is a full fledged error, this
+ // level of indirection can be eliminated
+
+ let inlined_const_as_pat = self.recur(cv, mir_structural_match_violation).unwrap();
+
+ if self.include_lint_checks && !self.saw_const_match_error.get() {
+ // If we were able to successfully convert the const to some pat,
+ // double-check that all types in the const implement `Structural`.
+
+ let structural = self.search_for_structural_match_violation(cv.ty());
+ debug!(
+ "search_for_structural_match_violation cv.ty: {:?} returned: {:?}",
+ cv.ty(),
+ structural
+ );
+
+ // This can occur because const qualification treats all associated constants as
+ // opaque, whereas `search_for_structural_match_violation` tries to monomorphize them
+ // before it runs.
+ //
+ // FIXME(#73448): Find a way to bring const qualification into parity with
+ // `search_for_structural_match_violation`.
+ if structural.is_none() && mir_structural_match_violation {
+ warn!("MIR const-checker found novel structural match violation. See #73448.");
+ return inlined_const_as_pat;
+ }
+
+ if let Some(msg) = structural {
+ if !self.type_may_have_partial_eq_impl(cv.ty()) {
+ // span_fatal avoids ICE from resolution of non-existent method (rare case).
+ self.tcx().sess.span_fatal(self.span, &msg);
+ } else if mir_structural_match_violation && !self.saw_const_match_lint.get() {
+ self.tcx().struct_span_lint_hir(
+ lint::builtin::INDIRECT_STRUCTURAL_MATCH,
+ self.id,
+ self.span,
+ |lint| {
+ lint.build(&msg).emit();
+ },
+ );
+ } else {
+ debug!(
+ "`search_for_structural_match_violation` found one, but `CustomEq` was \
+ not in the qualifs for that `const`"
+ );
+ }
+ }
+ }
+
+ inlined_const_as_pat
+ }
+
+ fn type_may_have_partial_eq_impl(&self, ty: Ty<'tcx>) -> bool {
+ // double-check there even *is* a semantic `PartialEq` to dispatch to.
+ //
+ // (If there isn't, then we can safely issue a hard
+ // error, because that's never worked, due to compiler
+ // using `PartialEq::eq` in this scenario in the past.)
+ let partial_eq_trait_id =
+ self.tcx().require_lang_item(hir::LangItem::PartialEq, Some(self.span));
+ let obligation: PredicateObligation<'_> = predicate_for_trait_def(
+ self.tcx(),
+ self.param_env,
+ ObligationCause::misc(self.span, self.id),
+ partial_eq_trait_id,
+ 0,
+ ty,
+ &[],
+ );
+ // FIXME: should this call a `predicate_must_hold` variant instead?
+
+ let has_impl = self.infcx.predicate_may_hold(&obligation);
+
+ // Note: To fix rust-lang/rust#65466, we could just remove this type
+ // walk hack for function pointers, and unconditionally error
+ // if `PartialEq` is not implemented. However, that breaks stable
+ // code at the moment, because types like `for <'a> fn(&'a ())` do
+ // not *yet* implement `PartialEq`. So for now we leave this here.
+ has_impl
+ || ty.walk().any(|t| match t.unpack() {
+ ty::subst::GenericArgKind::Lifetime(_) => false,
+ ty::subst::GenericArgKind::Type(t) => t.is_fn_ptr(),
+ ty::subst::GenericArgKind::Const(_) => false,
+ })
+ }
+
+ fn field_pats(
+ &self,
+ vals: impl Iterator<Item = mir::ConstantKind<'tcx>>,
+ ) -> Result<Vec<FieldPat<'tcx>>, FallbackToConstRef> {
+ vals.enumerate()
+ .map(|(idx, val)| {
+ let field = Field::new(idx);
+ Ok(FieldPat { field, pattern: self.recur(val, false)? })
+ })
+ .collect()
+ }
+
+ // Recursive helper for `to_pat`; invoke that (instead of calling this directly).
+ #[instrument(skip(self), level = "debug")]
+ fn recur(
+ &self,
+ cv: mir::ConstantKind<'tcx>,
+ mir_structural_match_violation: bool,
+ ) -> Result<Pat<'tcx>, FallbackToConstRef> {
+ let id = self.id;
+ let span = self.span;
+ let tcx = self.tcx();
+ let param_env = self.param_env;
+
+ let kind = match cv.ty().kind() {
+ ty::Float(_) => {
+ if self.include_lint_checks {
+ tcx.struct_span_lint_hir(
+ lint::builtin::ILLEGAL_FLOATING_POINT_LITERAL_PATTERN,
+ id,
+ span,
+ |lint| {
+ lint.build("floating-point types cannot be used in patterns").emit();
+ },
+ );
+ }
+ PatKind::Constant { value: cv }
+ }
+ ty::Adt(adt_def, _) if adt_def.is_union() => {
+ // Matching on union fields is unsafe, we can't hide it in constants
+ self.saw_const_match_error.set(true);
+ let msg = "cannot use unions in constant patterns";
+ if self.include_lint_checks {
+ tcx.sess.span_err(span, msg);
+ } else {
+ tcx.sess.delay_span_bug(span, msg);
+ }
+ PatKind::Wild
+ }
+ ty::Adt(..)
+ if !self.type_may_have_partial_eq_impl(cv.ty())
+ // FIXME(#73448): Find a way to bring const qualification into parity with
+ // `search_for_structural_match_violation` and then remove this condition.
+ && self.search_for_structural_match_violation(cv.ty()).is_some() =>
+ {
+ // Obtain the actual type that isn't annotated. If we just looked at `cv.ty` we
+ // could get `Option<NonStructEq>`, even though `Option` is annotated with derive.
+ let msg = self.search_for_structural_match_violation(cv.ty()).unwrap();
+ self.saw_const_match_error.set(true);
+ if self.include_lint_checks {
+ tcx.sess.span_err(self.span, &msg);
+ } else {
+ tcx.sess.delay_span_bug(self.span, &msg);
+ }
+ PatKind::Wild
+ }
+ // If the type is not structurally comparable, just emit the constant directly,
+ // causing the pattern match code to treat it opaquely.
+ // FIXME: This code doesn't emit errors itself, the caller emits the errors.
+ // So instead of specific errors, you just get blanket errors about the whole
+ // const type. See
+ // https://github.com/rust-lang/rust/pull/70743#discussion_r404701963 for
+ // details.
+ // Backwards compatibility hack because we can't cause hard errors on these
+ // types, so we compare them via `PartialEq::eq` at runtime.
+ ty::Adt(..) if !self.type_marked_structural(cv.ty()) && self.behind_reference.get() => {
+ if self.include_lint_checks
+ && !self.saw_const_match_error.get()
+ && !self.saw_const_match_lint.get()
+ {
+ self.saw_const_match_lint.set(true);
+ tcx.struct_span_lint_hir(
+ lint::builtin::INDIRECT_STRUCTURAL_MATCH,
+ id,
+ span,
+ |lint| {
+ let msg = format!(
+ "to use a constant of type `{}` in a pattern, \
+ `{}` must be annotated with `#[derive(PartialEq, Eq)]`",
+ cv.ty(),
+ cv.ty(),
+ );
+ lint.build(&msg).emit();
+ },
+ );
+ }
+ // Since we are behind a reference, we can just bubble the error up so we get a
+ // constant at reference type, making it easy to let the fallback call
+ // `PartialEq::eq` on it.
+ return Err(fallback_to_const_ref(self));
+ }
+ ty::Adt(adt_def, _) if !self.type_marked_structural(cv.ty()) => {
+ debug!(
+ "adt_def {:?} has !type_marked_structural for cv.ty: {:?}",
+ adt_def,
+ cv.ty()
+ );
+ let path = tcx.def_path_str(adt_def.did());
+ let msg = format!(
+ "to use a constant of type `{}` in a pattern, \
+ `{}` must be annotated with `#[derive(PartialEq, Eq)]`",
+ path, path,
+ );
+ self.saw_const_match_error.set(true);
+ if self.include_lint_checks {
+ tcx.sess.span_err(span, &msg);
+ } else {
+ tcx.sess.delay_span_bug(span, &msg);
+ }
+ PatKind::Wild
+ }
+ ty::Adt(adt_def, substs) if adt_def.is_enum() => {
+ let destructured = tcx.destructure_mir_constant(param_env, cv);
+
+ PatKind::Variant {
+ adt_def: *adt_def,
+ substs,
+ variant_index: destructured
+ .variant
+ .expect("destructed const of adt without variant id"),
+ subpatterns: self.field_pats(destructured.fields.iter().copied())?,
+ }
+ }
+ ty::Tuple(_) | ty::Adt(_, _) => {
+ let destructured = tcx.destructure_mir_constant(param_env, cv);
+ PatKind::Leaf { subpatterns: self.field_pats(destructured.fields.iter().copied())? }
+ }
+ ty::Array(..) => PatKind::Array {
+ prefix: tcx
+ .destructure_mir_constant(param_env, cv)
+ .fields
+ .iter()
+ .map(|val| self.recur(*val, false))
+ .collect::<Result<_, _>>()?,
+ slice: None,
+ suffix: Vec::new(),
+ },
+ ty::Ref(_, pointee_ty, ..) => match *pointee_ty.kind() {
+ // These are not allowed and will error elsewhere anyway.
+ ty::Dynamic(..) => {
+ self.saw_const_match_error.set(true);
+ let msg = format!("`{}` cannot be used in patterns", cv.ty());
+ if self.include_lint_checks {
+ tcx.sess.span_err(span, &msg);
+ } else {
+ tcx.sess.delay_span_bug(span, &msg);
+ }
+ PatKind::Wild
+ }
+ // `&str` is represented as `ConstValue::Slice`, let's keep using this
+ // optimization for now.
+ ty::Str => PatKind::Constant { value: cv },
+ // `b"foo"` produces a `&[u8; 3]`, but you can't use constants of array type when
+ // matching against references, you can only use byte string literals.
+ // The typechecker has a special case for byte string literals, by treating them
+ // as slices. This means we turn `&[T; N]` constants into slice patterns, which
+ // has no negative effects on pattern matching, even if we're actually matching on
+ // arrays.
+ ty::Array(..) if !self.treat_byte_string_as_slice => {
+ let old = self.behind_reference.replace(true);
+ let array = tcx.deref_mir_constant(self.param_env.and(cv));
+ let val = PatKind::Deref {
+ subpattern: Pat {
+ kind: Box::new(PatKind::Array {
+ prefix: tcx
+ .destructure_mir_constant(param_env, array)
+ .fields
+ .iter()
+ .map(|val| self.recur(*val, false))
+ .collect::<Result<_, _>>()?,
+ slice: None,
+ suffix: vec![],
+ }),
+ span,
+ ty: *pointee_ty,
+ },
+ };
+ self.behind_reference.set(old);
+ val
+ }
+ ty::Array(elem_ty, _) |
+ // Cannot merge this with the catch all branch below, because the `const_deref`
+ // changes the type from slice to array, we need to keep the original type in the
+ // pattern.
+ ty::Slice(elem_ty) => {
+ let old = self.behind_reference.replace(true);
+ let array = tcx.deref_mir_constant(self.param_env.and(cv));
+ let val = PatKind::Deref {
+ subpattern: Pat {
+ kind: Box::new(PatKind::Slice {
+ prefix: tcx
+ .destructure_mir_constant(param_env, array)
+ .fields
+ .iter()
+ .map(|val| self.recur(*val, false))
+ .collect::<Result<_, _>>()?,
+ slice: None,
+ suffix: vec![],
+ }),
+ span,
+ ty: tcx.mk_slice(elem_ty),
+ },
+ };
+ self.behind_reference.set(old);
+ val
+ }
+ // Backwards compatibility hack: support references to non-structural types.
+ // We'll lower
+ // this pattern to a `PartialEq::eq` comparison and `PartialEq::eq` takes a
+ // reference. This makes the rest of the matching logic simpler as it doesn't have
+ // to figure out how to get a reference again.
+ ty::Adt(adt_def, _) if !self.type_marked_structural(*pointee_ty) => {
+ if self.behind_reference.get() {
+ if self.include_lint_checks
+ && !self.saw_const_match_error.get()
+ && !self.saw_const_match_lint.get()
+ {
+ self.saw_const_match_lint.set(true);
+ let msg = self.adt_derive_msg(adt_def);
+ self.tcx().struct_span_lint_hir(
+ lint::builtin::INDIRECT_STRUCTURAL_MATCH,
+ self.id,
+ self.span,
+ |lint| {lint.build(&msg).emit();},
+ );
+ }
+ PatKind::Constant { value: cv }
+ } else {
+ if !self.saw_const_match_error.get() {
+ self.saw_const_match_error.set(true);
+ let msg = self.adt_derive_msg(adt_def);
+ if self.include_lint_checks {
+ tcx.sess.span_err(span, &msg);
+ } else {
+ tcx.sess.delay_span_bug(span, &msg);
+ }
+ }
+ PatKind::Wild
+ }
+ }
+ // All other references are converted into deref patterns and then recursively
+ // convert the dereferenced constant to a pattern that is the sub-pattern of the
+ // deref pattern.
+ _ => {
+ if !pointee_ty.is_sized(tcx.at(span), param_env) {
+ // `tcx.deref_mir_constant()` below will ICE with an unsized type
+ // (except slices, which are handled in a separate arm above).
+ let msg = format!("cannot use unsized non-slice type `{}` in constant patterns", pointee_ty);
+ if self.include_lint_checks {
+ tcx.sess.span_err(span, &msg);
+ } else {
+ tcx.sess.delay_span_bug(span, &msg);
+ }
+ PatKind::Wild
+ } else {
+ let old = self.behind_reference.replace(true);
+ // In case there are structural-match violations somewhere in this subpattern,
+ // we fall back to a const pattern. If we do not do this, we may end up with
+ // a !structural-match constant that is not of reference type, which makes it
+ // very hard to invoke `PartialEq::eq` on it as a fallback.
+ let val = match self.recur(tcx.deref_mir_constant(self.param_env.and(cv)), false) {
+ Ok(subpattern) => PatKind::Deref { subpattern },
+ Err(_) => PatKind::Constant { value: cv },
+ };
+ self.behind_reference.set(old);
+ val
+ }
+ }
+ },
+ ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::FnDef(..) => {
+ PatKind::Constant { value: cv }
+ }
+ ty::RawPtr(pointee) if pointee.ty.is_sized(tcx.at(span), param_env) => {
+ PatKind::Constant { value: cv }
+ }
+ // FIXME: these can have very surprising behaviour where optimization levels or other
+ // compilation choices change the runtime behaviour of the match.
+ // See https://github.com/rust-lang/rust/issues/70861 for examples.
+ ty::FnPtr(..) | ty::RawPtr(..) => {
+ if self.include_lint_checks
+ && !self.saw_const_match_error.get()
+ && !self.saw_const_match_lint.get()
+ {
+ self.saw_const_match_lint.set(true);
+ let msg = "function pointers and unsized pointers in patterns behave \
+ unpredictably and should not be relied upon. \
+ See https://github.com/rust-lang/rust/issues/70861 for details.";
+ tcx.struct_span_lint_hir(
+ lint::builtin::POINTER_STRUCTURAL_MATCH,
+ id,
+ span,
+ |lint| {
+ lint.build(msg).emit();
+ },
+ );
+ }
+ PatKind::Constant { value: cv }
+ }
+ _ => {
+ self.saw_const_match_error.set(true);
+ let msg = format!("`{}` cannot be used in patterns", cv.ty());
+ if self.include_lint_checks {
+ tcx.sess.span_err(span, &msg);
+ } else {
+ tcx.sess.delay_span_bug(span, &msg);
+ }
+ PatKind::Wild
+ }
+ };
+
+ if self.include_lint_checks
+ && !self.saw_const_match_error.get()
+ && !self.saw_const_match_lint.get()
+ && mir_structural_match_violation
+ // FIXME(#73448): Find a way to bring const qualification into parity with
+ // `search_for_structural_match_violation` and then remove this condition.
+ && self.search_for_structural_match_violation(cv.ty()).is_some()
+ {
+ self.saw_const_match_lint.set(true);
+ // Obtain the actual type that isn't annotated. If we just looked at `cv.ty` we
+ // could get `Option<NonStructEq>`, even though `Option` is annotated with derive.
+ let msg = self.search_for_structural_match_violation(cv.ty()).unwrap().replace(
+ "in a pattern,",
+ "in a pattern, the constant's initializer must be trivial or",
+ );
+ tcx.struct_span_lint_hir(
+ lint::builtin::NONTRIVIAL_STRUCTURAL_MATCH,
+ id,
+ span,
+ |lint| {
+ lint.build(&msg).emit();
+ },
+ );
+ }
+
+ Ok(Pat { span, ty: cv.ty(), kind: Box::new(kind) })
+ }
+}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
new file mode 100644
index 000000000..8d6f8efb6
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
@@ -0,0 +1,1711 @@
+//! [`super::usefulness`] explains most of what is happening in this file. As explained there,
+//! values and patterns are made from constructors applied to fields. This file defines a
+//! `Constructor` enum, a `Fields` struct, and various operations to manipulate them and convert
+//! them from/to patterns.
+//!
+//! There's one idea that is not detailed in [`super::usefulness`] because the details are not
+//! needed there: _constructor splitting_.
+//!
+//! # Constructor splitting
+//!
+//! The idea is as follows: given a constructor `c` and a matrix, we want to specialize in turn
+//! with all the value constructors that are covered by `c`, and compute usefulness for each.
+//! Instead of listing all those constructors (which is intractable), we group those value
+//! constructors together as much as possible. Example:
+//!
+//! ```compile_fail,E0004
+//! match (0, false) {
+//! (0 ..=100, true) => {} // `p_1`
+//! (50..=150, false) => {} // `p_2`
+//! (0 ..=200, _) => {} // `q`
+//! }
+//! ```
+//!
+//! The naive approach would try all numbers in the range `0..=200`. But we can be a lot more
+//! clever: `0` and `1` for example will match the exact same rows, and return equivalent
+//! witnesses. In fact all of `0..50` would. We can thus restrict our exploration to 4
+//! constructors: `0..50`, `50..=100`, `101..=150` and `151..=200`. That is enough and infinitely
+//! more tractable.
+//!
+//! We capture this idea in a function `split(p_1 ... p_n, c)` which returns a list of constructors
+//! `c'` covered by `c`. Given such a `c'`, we require that all value ctors `c''` covered by `c'`
+//! return an equivalent set of witnesses after specializing and computing usefulness.
+//! In the example above, witnesses for specializing by `c''` covered by `0..50` will only differ
+//! in their first element.
+//!
+//! We usually also ask that the `c'` together cover all of the original `c`. However we allow
+//! skipping some constructors as long as it doesn't change whether the resulting list of witnesses
+//! is empty of not. We use this in the wildcard `_` case.
+//!
+//! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for
+//! or-patterns; instead we just try the alternatives one-by-one. For details on splitting
+//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`]; for slices, see
+//! [`SplitVarLenSlice`].
+
+use self::Constructor::*;
+use self::SliceKind::*;
+
+use super::compare_const_vals;
+use super::usefulness::{MatchCheckCtxt, PatCtxt};
+
+use rustc_data_structures::captures::Captures;
+use rustc_index::vec::Idx;
+
+use rustc_hir::{HirId, RangeEnd};
+use rustc_middle::mir::{self, Field};
+use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange};
+use rustc_middle::ty::layout::IntegerExt;
+use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef};
+use rustc_middle::{middle::stability::EvalResult, mir::interpret::ConstValue};
+use rustc_session::lint;
+use rustc_span::{Span, DUMMY_SP};
+use rustc_target::abi::{Integer, Size, VariantIdx};
+
+use smallvec::{smallvec, SmallVec};
+use std::cell::Cell;
+use std::cmp::{self, max, min, Ordering};
+use std::fmt;
+use std::iter::{once, IntoIterator};
+use std::ops::RangeInclusive;
+
+/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
+fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
+ fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
+ if let PatKind::Or { pats } = pat.kind.as_ref() {
+ for pat in pats {
+ expand(pat, vec);
+ }
+ } else {
+ vec.push(pat)
+ }
+ }
+
+ let mut pats = Vec::new();
+ expand(pat, &mut pats);
+ pats
+}
+
+/// An inclusive interval, used for precise integer exhaustiveness checking.
+/// `IntRange`s always store a contiguous range. This means that values are
+/// encoded such that `0` encodes the minimum value for the integer,
+/// regardless of the signedness.
+/// For example, the pattern `-128..=127i8` is encoded as `0..=255`.
+/// This makes comparisons and arithmetic on interval endpoints much more
+/// straightforward. See `signed_bias` for details.
+///
+/// `IntRange` is never used to encode an empty range or a "range" that wraps
+/// around the (offset) space: i.e., `range.lo <= range.hi`.
+#[derive(Clone, PartialEq, Eq)]
+pub(super) struct IntRange {
+ range: RangeInclusive<u128>,
+ /// Keeps the bias used for encoding the range. It depends on the type of the range and
+ /// possibly the pointer size of the current architecture. The algorithm ensures we never
+ /// compare `IntRange`s with different types/architectures.
+ bias: u128,
+}
+
+impl IntRange {
+ #[inline]
+ fn is_integral(ty: Ty<'_>) -> bool {
+ matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool)
+ }
+
+ fn is_singleton(&self) -> bool {
+ self.range.start() == self.range.end()
+ }
+
+ fn boundaries(&self) -> (u128, u128) {
+ (*self.range.start(), *self.range.end())
+ }
+
+ #[inline]
+ fn integral_size_and_signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> Option<(Size, u128)> {
+ match *ty.kind() {
+ ty::Bool => Some((Size::from_bytes(1), 0)),
+ ty::Char => Some((Size::from_bytes(4), 0)),
+ ty::Int(ity) => {
+ let size = Integer::from_int_ty(&tcx, ity).size();
+ Some((size, 1u128 << (size.bits() as u128 - 1)))
+ }
+ ty::Uint(uty) => Some((Integer::from_uint_ty(&tcx, uty).size(), 0)),
+ _ => None,
+ }
+ }
+
+ #[inline]
+ fn from_constant<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ value: mir::ConstantKind<'tcx>,
+ ) -> Option<IntRange> {
+ let ty = value.ty();
+ if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, ty) {
+ let val = (|| {
+ match value {
+ mir::ConstantKind::Val(ConstValue::Scalar(scalar), _) => {
+ // For this specific pattern we can skip a lot of effort and go
+ // straight to the result, after doing a bit of checking. (We
+ // could remove this branch and just fall through, which
+ // is more general but much slower.)
+ if let Ok(Ok(bits)) = scalar.to_bits_or_ptr_internal(target_size) {
+ return Some(bits);
+ } else {
+ return None;
+ }
+ }
+ mir::ConstantKind::Ty(c) => match c.kind() {
+ ty::ConstKind::Value(_) => bug!(
+ "encountered ConstValue in mir::ConstantKind::Ty, whereas this is expected to be in ConstantKind::Val"
+ ),
+ _ => {}
+ },
+ _ => {}
+ }
+
+ // This is a more general form of the previous case.
+ value.try_eval_bits(tcx, param_env, ty)
+ })()?;
+ let val = val ^ bias;
+ Some(IntRange { range: val..=val, bias })
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ fn from_range<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ lo: u128,
+ hi: u128,
+ ty: Ty<'tcx>,
+ end: &RangeEnd,
+ ) -> Option<IntRange> {
+ if Self::is_integral(ty) {
+ // Perform a shift if the underlying types are signed,
+ // which makes the interval arithmetic simpler.
+ let bias = IntRange::signed_bias(tcx, ty);
+ let (lo, hi) = (lo ^ bias, hi ^ bias);
+ let offset = (*end == RangeEnd::Excluded) as u128;
+ if lo > hi || (lo == hi && *end == RangeEnd::Excluded) {
+ // This should have been caught earlier by E0030.
+ bug!("malformed range pattern: {}..={}", lo, (hi - offset));
+ }
+ Some(IntRange { range: lo..=(hi - offset), bias })
+ } else {
+ None
+ }
+ }
+
+ // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it.
+ fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 {
+ match *ty.kind() {
+ ty::Int(ity) => {
+ let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128;
+ 1u128 << (bits - 1)
+ }
+ _ => 0,
+ }
+ }
+
+ fn is_subrange(&self, other: &Self) -> bool {
+ other.range.start() <= self.range.start() && self.range.end() <= other.range.end()
+ }
+
+ fn intersection(&self, other: &Self) -> Option<Self> {
+ let (lo, hi) = self.boundaries();
+ let (other_lo, other_hi) = other.boundaries();
+ if lo <= other_hi && other_lo <= hi {
+ Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), bias: self.bias })
+ } else {
+ None
+ }
+ }
+
+ fn suspicious_intersection(&self, other: &Self) -> bool {
+ // `false` in the following cases:
+ // 1 ---- // 1 ---------- // 1 ---- // 1 ----
+ // 2 ---------- // 2 ---- // 2 ---- // 2 ----
+ //
+ // The following are currently `false`, but could be `true` in the future (#64007):
+ // 1 --------- // 1 ---------
+ // 2 ---------- // 2 ----------
+ //
+ // `true` in the following cases:
+ // 1 ------- // 1 -------
+ // 2 -------- // 2 -------
+ let (lo, hi) = self.boundaries();
+ let (other_lo, other_hi) = other.boundaries();
+ (lo == other_hi || hi == other_lo) && !self.is_singleton() && !other.is_singleton()
+ }
+
+ /// Only used for displaying the range properly.
+ fn to_pat<'tcx>(&self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Pat<'tcx> {
+ let (lo, hi) = self.boundaries();
+
+ let bias = self.bias;
+ let (lo, hi) = (lo ^ bias, hi ^ bias);
+
+ let env = ty::ParamEnv::empty().and(ty);
+ let lo_const = mir::ConstantKind::from_bits(tcx, lo, env);
+ let hi_const = mir::ConstantKind::from_bits(tcx, hi, env);
+
+ let kind = if lo == hi {
+ PatKind::Constant { value: lo_const }
+ } else {
+ PatKind::Range(PatRange { lo: lo_const, hi: hi_const, end: RangeEnd::Included })
+ };
+
+ Pat { ty, span: DUMMY_SP, kind: Box::new(kind) }
+ }
+
+ /// Lint on likely incorrect range patterns (#63987)
+ pub(super) fn lint_overlapping_range_endpoints<'a, 'p: 'a, 'tcx: 'a>(
+ &self,
+ pcx: &PatCtxt<'_, 'p, 'tcx>,
+ pats: impl Iterator<Item = &'a DeconstructedPat<'p, 'tcx>>,
+ column_count: usize,
+ hir_id: HirId,
+ ) {
+ if self.is_singleton() {
+ return;
+ }
+
+ if column_count != 1 {
+ // FIXME: for now, only check for overlapping ranges on simple range
+ // patterns. Otherwise with the current logic the following is detected
+ // as overlapping:
+ // ```
+ // match (0u8, true) {
+ // (0 ..= 125, false) => {}
+ // (125 ..= 255, true) => {}
+ // _ => {}
+ // }
+ // ```
+ return;
+ }
+
+ let overlaps: Vec<_> = pats
+ .filter_map(|pat| Some((pat.ctor().as_int_range()?, pat.span())))
+ .filter(|(range, _)| self.suspicious_intersection(range))
+ .map(|(range, span)| (self.intersection(&range).unwrap(), span))
+ .collect();
+
+ if !overlaps.is_empty() {
+ pcx.cx.tcx.struct_span_lint_hir(
+ lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
+ hir_id,
+ pcx.span,
+ |lint| {
+ let mut err = lint.build("multiple patterns overlap on their endpoints");
+ for (int_range, span) in overlaps {
+ err.span_label(
+ span,
+ &format!(
+ "this range overlaps on `{}`...",
+ int_range.to_pat(pcx.cx.tcx, pcx.ty)
+ ),
+ );
+ }
+ err.span_label(pcx.span, "... with this range");
+ err.note("you likely meant to write mutually exclusive ranges");
+ err.emit();
+ },
+ );
+ }
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(&self, other: &Self) -> bool {
+ if self.intersection(other).is_some() {
+ // Constructor splitting should ensure that all intersections we encounter are actually
+ // inclusions.
+ assert!(self.is_subrange(other));
+ true
+ } else {
+ false
+ }
+ }
+}
+
+/// Note: this is often not what we want: e.g. `false` is converted into the range `0..=0` and
+/// would be displayed as such. To render properly, convert to a pattern first.
+impl fmt::Debug for IntRange {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let (lo, hi) = self.boundaries();
+ let bias = self.bias;
+ let (lo, hi) = (lo ^ bias, hi ^ bias);
+ write!(f, "{}", lo)?;
+ write!(f, "{}", RangeEnd::Included)?;
+ write!(f, "{}", hi)
+ }
+}
+
+/// Represents a border between 2 integers. Because the intervals spanning borders must be able to
+/// cover every integer, we need to be able to represent 2^128 + 1 such borders.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+enum IntBorder {
+ JustBefore(u128),
+ AfterMax,
+}
+
+/// A range of integers that is partitioned into disjoint subranges. This does constructor
+/// splitting for integer ranges as explained at the top of the file.
+///
+/// This is fed multiple ranges, and returns an output that covers the input, but is split so that
+/// the only intersections between an output range and a seen range are inclusions. No output range
+/// straddles the boundary of one of the inputs.
+///
+/// The following input:
+/// ```text
+/// |-------------------------| // `self`
+/// |------| |----------| |----|
+/// |-------| |-------|
+/// ```
+/// would be iterated over as follows:
+/// ```text
+/// ||---|--||-|---|---|---|--|
+/// ```
+#[derive(Debug, Clone)]
+struct SplitIntRange {
+ /// The range we are splitting
+ range: IntRange,
+ /// The borders of ranges we have seen. They are all contained within `range`. This is kept
+ /// sorted.
+ borders: Vec<IntBorder>,
+}
+
+impl SplitIntRange {
+ fn new(range: IntRange) -> Self {
+ SplitIntRange { range, borders: Vec::new() }
+ }
+
+ /// Internal use
+ fn to_borders(r: IntRange) -> [IntBorder; 2] {
+ use IntBorder::*;
+ let (lo, hi) = r.boundaries();
+ let lo = JustBefore(lo);
+ let hi = match hi.checked_add(1) {
+ Some(m) => JustBefore(m),
+ None => AfterMax,
+ };
+ [lo, hi]
+ }
+
+ /// Add ranges relative to which we split.
+ fn split(&mut self, ranges: impl Iterator<Item = IntRange>) {
+ let this_range = &self.range;
+ let included_ranges = ranges.filter_map(|r| this_range.intersection(&r));
+ let included_borders = included_ranges.flat_map(|r| {
+ let borders = Self::to_borders(r);
+ once(borders[0]).chain(once(borders[1]))
+ });
+ self.borders.extend(included_borders);
+ self.borders.sort_unstable();
+ }
+
+ /// Iterate over the contained ranges.
+ fn iter<'a>(&'a self) -> impl Iterator<Item = IntRange> + Captures<'a> {
+ use IntBorder::*;
+
+ let self_range = Self::to_borders(self.range.clone());
+ // Start with the start of the range.
+ let mut prev_border = self_range[0];
+ self.borders
+ .iter()
+ .copied()
+ // End with the end of the range.
+ .chain(once(self_range[1]))
+ // List pairs of adjacent borders.
+ .map(move |border| {
+ let ret = (prev_border, border);
+ prev_border = border;
+ ret
+ })
+ // Skip duplicates.
+ .filter(|(prev_border, border)| prev_border != border)
+ // Finally, convert to ranges.
+ .map(move |(prev_border, border)| {
+ let range = match (prev_border, border) {
+ (JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1),
+ (JustBefore(n), AfterMax) => n..=u128::MAX,
+ _ => unreachable!(), // Ruled out by the sorting and filtering we did
+ };
+ IntRange { range, bias: self.range.bias }
+ })
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum SliceKind {
+ /// Patterns of length `n` (`[x, y]`).
+ FixedLen(usize),
+ /// Patterns using the `..` notation (`[x, .., y]`).
+ /// Captures any array constructor of `length >= i + j`.
+ /// In the case where `array_len` is `Some(_)`,
+ /// this indicates that we only care about the first `i` and the last `j` values of the array,
+ /// and everything in between is a wildcard `_`.
+ VarLen(usize, usize),
+}
+
+impl SliceKind {
+ fn arity(self) -> usize {
+ match self {
+ FixedLen(length) => length,
+ VarLen(prefix, suffix) => prefix + suffix,
+ }
+ }
+
+ /// Whether this pattern includes patterns of length `other_len`.
+ fn covers_length(self, other_len: usize) -> bool {
+ match self {
+ FixedLen(len) => len == other_len,
+ VarLen(prefix, suffix) => prefix + suffix <= other_len,
+ }
+ }
+}
+
+/// A constructor for array and slice patterns.
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(super) struct Slice {
+ /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`.
+ array_len: Option<usize>,
+ /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`.
+ kind: SliceKind,
+}
+
+impl Slice {
+ fn new(array_len: Option<usize>, kind: SliceKind) -> Self {
+ let kind = match (array_len, kind) {
+ // If the middle `..` is empty, we effectively have a fixed-length pattern.
+ (Some(len), VarLen(prefix, suffix)) if prefix + suffix >= len => FixedLen(len),
+ _ => kind,
+ };
+ Slice { array_len, kind }
+ }
+
+ fn arity(self) -> usize {
+ self.kind.arity()
+ }
+
+ /// See `Constructor::is_covered_by`
+ fn is_covered_by(self, other: Self) -> bool {
+ other.kind.covers_length(self.arity())
+ }
+}
+
+/// This computes constructor splitting for variable-length slices, as explained at the top of the
+/// file.
+///
+/// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x, _,
+/// _, y] | ...`. The corresponding value constructors are fixed-length array constructors above a
+/// given minimum length. We obviously can't list this infinitude of constructors. Thankfully,
+/// it turns out that for each finite set of slice patterns, all sufficiently large array lengths
+/// are equivalent.
+///
+/// Let's look at an example, where we are trying to split the last pattern:
+/// ```
+/// # fn foo(x: &[bool]) {
+/// match x {
+/// [true, true, ..] => {}
+/// [.., false, false] => {}
+/// [..] => {}
+/// }
+/// # }
+/// ```
+/// Here are the results of specialization for the first few lengths:
+/// ```
+/// # fn foo(x: &[bool]) { match x {
+/// // length 0
+/// [] => {}
+/// // length 1
+/// [_] => {}
+/// // length 2
+/// [true, true] => {}
+/// [false, false] => {}
+/// [_, _] => {}
+/// // length 3
+/// [true, true, _ ] => {}
+/// [_, false, false] => {}
+/// [_, _, _ ] => {}
+/// // length 4
+/// [true, true, _, _ ] => {}
+/// [_, _, false, false] => {}
+/// [_, _, _, _ ] => {}
+/// // length 5
+/// [true, true, _, _, _ ] => {}
+/// [_, _, _, false, false] => {}
+/// [_, _, _, _, _ ] => {}
+/// # _ => {}
+/// # }}
+/// ```
+///
+/// If we went above length 5, we would simply be inserting more columns full of wildcards in the
+/// middle. This means that the set of witnesses for length `l >= 5` if equivalent to the set for
+/// any other `l' >= 5`: simply add or remove wildcards in the middle to convert between them.
+///
+/// This applies to any set of slice patterns: there will be a length `L` above which all lengths
+/// behave the same. This is exactly what we need for constructor splitting. Therefore a
+/// variable-length slice can be split into a variable-length slice of minimal length `L`, and many
+/// fixed-length slices of lengths `< L`.
+///
+/// For each variable-length pattern `p` with a prefix of length `plâ‚š` and suffix of length `slâ‚š`,
+/// only the first `plâ‚š` and the last `slâ‚š` elements are examined. Therefore, as long as `L` is
+/// positive (to avoid concerns about empty types), all elements after the maximum prefix length
+/// and before the maximum suffix length are not examined by any variable-length pattern, and
+/// therefore can be added/removed without affecting them - creating equivalent patterns from any
+/// sufficiently-large length.
+///
+/// Of course, if fixed-length patterns exist, we must be sure that our length is large enough to
+/// miss them all, so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`
+///
+/// `max_slice` below will be made to have arity `L`.
+#[derive(Debug)]
+struct SplitVarLenSlice {
+ /// If the type is an array, this is its size.
+ array_len: Option<usize>,
+ /// The arity of the input slice.
+ arity: usize,
+ /// The smallest slice bigger than any slice seen. `max_slice.arity()` is the length `L`
+ /// described above.
+ max_slice: SliceKind,
+}
+
+impl SplitVarLenSlice {
+ fn new(prefix: usize, suffix: usize, array_len: Option<usize>) -> Self {
+ SplitVarLenSlice { array_len, arity: prefix + suffix, max_slice: VarLen(prefix, suffix) }
+ }
+
+ /// Pass a set of slices relative to which to split this one.
+ fn split(&mut self, slices: impl Iterator<Item = SliceKind>) {
+ let VarLen(max_prefix_len, max_suffix_len) = &mut self.max_slice else {
+ // No need to split
+ return;
+ };
+ // We grow `self.max_slice` to be larger than all slices encountered, as described above.
+ // For diagnostics, we keep the prefix and suffix lengths separate, but grow them so that
+ // `L = max_prefix_len + max_suffix_len`.
+ let mut max_fixed_len = 0;
+ for slice in slices {
+ match slice {
+ FixedLen(len) => {
+ max_fixed_len = cmp::max(max_fixed_len, len);
+ }
+ VarLen(prefix, suffix) => {
+ *max_prefix_len = cmp::max(*max_prefix_len, prefix);
+ *max_suffix_len = cmp::max(*max_suffix_len, suffix);
+ }
+ }
+ }
+ // We want `L = max(L, max_fixed_len + 1)`, modulo the fact that we keep prefix and
+ // suffix separate.
+ if max_fixed_len + 1 >= *max_prefix_len + *max_suffix_len {
+ // The subtraction can't overflow thanks to the above check.
+ // The new `max_prefix_len` is larger than its previous value.
+ *max_prefix_len = max_fixed_len + 1 - *max_suffix_len;
+ }
+
+ // We cap the arity of `max_slice` at the array size.
+ match self.array_len {
+ Some(len) if self.max_slice.arity() >= len => self.max_slice = FixedLen(len),
+ _ => {}
+ }
+ }
+
+ /// Iterate over the partition of this slice.
+ fn iter<'a>(&'a self) -> impl Iterator<Item = Slice> + Captures<'a> {
+ let smaller_lengths = match self.array_len {
+ // The only admissible fixed-length slice is one of the array size. Whether `max_slice`
+ // is fixed-length or variable-length, it will be the only relevant slice to output
+ // here.
+ Some(_) => 0..0, // empty range
+ // We cover all arities in the range `(self.arity..infinity)`. We split that range into
+ // two: lengths smaller than `max_slice.arity()` are treated independently as
+ // fixed-lengths slices, and lengths above are captured by `max_slice`.
+ None => self.arity..self.max_slice.arity(),
+ };
+ smaller_lengths
+ .map(FixedLen)
+ .chain(once(self.max_slice))
+ .map(move |kind| Slice::new(self.array_len, kind))
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// the constructor. See also `Fields`.
+///
+/// `pat_constructor` retrieves the constructor corresponding to a pattern.
+/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
+/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
+/// `Fields`.
+#[derive(Clone, Debug, PartialEq)]
+pub(super) enum Constructor<'tcx> {
+ /// The constructor for patterns that have a single constructor, like tuples, struct patterns
+ /// and fixed-length arrays.
+ Single,
+ /// Enum variants.
+ Variant(VariantIdx),
+ /// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
+ IntRange(IntRange),
+ /// Ranges of floating-point literal values (`2.0..=5.2`).
+ FloatRange(mir::ConstantKind<'tcx>, mir::ConstantKind<'tcx>, RangeEnd),
+ /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
+ Str(mir::ConstantKind<'tcx>),
+ /// Array and slice patterns.
+ Slice(Slice),
+ /// Constants that must not be matched structurally. They are treated as black
+ /// boxes for the purposes of exhaustiveness: we must not inspect them, and they
+ /// don't count towards making a match exhaustive.
+ Opaque,
+ /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
+ /// for those types for which we cannot list constructors explicitly, like `f64` and `str`.
+ NonExhaustive,
+ /// Stands for constructors that are not seen in the matrix, as explained in the documentation
+ /// for [`SplitWildcard`]. The carried `bool` is used for the `non_exhaustive_omitted_patterns`
+ /// lint.
+ Missing { nonexhaustive_enum_missing_real_variants: bool },
+ /// Wildcard pattern.
+ Wildcard,
+ /// Or-pattern.
+ Or,
+}
+
+impl<'tcx> Constructor<'tcx> {
+ pub(super) fn is_wildcard(&self) -> bool {
+ matches!(self, Wildcard)
+ }
+
+ pub(super) fn is_non_exhaustive(&self) -> bool {
+ matches!(self, NonExhaustive)
+ }
+
+ fn as_int_range(&self) -> Option<&IntRange> {
+ match self {
+ IntRange(range) => Some(range),
+ _ => None,
+ }
+ }
+
+ fn as_slice(&self) -> Option<Slice> {
+ match self {
+ Slice(slice) => Some(*slice),
+ _ => None,
+ }
+ }
+
+ /// Checks if the `Constructor` is a variant and `TyCtxt::eval_stability` returns
+ /// `EvalResult::Deny { .. }`.
+ ///
+ /// This means that the variant has a stdlib unstable feature marking it.
+ pub(super) fn is_unstable_variant(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> bool {
+ if let Constructor::Variant(idx) = self && let ty::Adt(adt, _) = pcx.ty.kind() {
+ let variant_def_id = adt.variant(*idx).def_id;
+ // Filter variants that depend on a disabled unstable feature.
+ return matches!(
+ pcx.cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None),
+ EvalResult::Deny { .. }
+ );
+ }
+ false
+ }
+
+ /// Checks if the `Constructor` is a `Constructor::Variant` with a `#[doc(hidden)]`
+ /// attribute from a type not local to the current crate.
+ pub(super) fn is_doc_hidden_variant(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> bool {
+ if let Constructor::Variant(idx) = self && let ty::Adt(adt, _) = pcx.ty.kind() {
+ let variant_def_id = adt.variants()[*idx].def_id;
+ return pcx.cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local();
+ }
+ false
+ }
+
+ fn variant_index_for_adt(&self, adt: ty::AdtDef<'tcx>) -> VariantIdx {
+ match *self {
+ Variant(idx) => idx,
+ Single => {
+ assert!(!adt.is_enum());
+ VariantIdx::new(0)
+ }
+ _ => bug!("bad constructor {:?} for adt {:?}", self, adt),
+ }
+ }
+
+ /// The number of fields for this constructor. This must be kept in sync with
+ /// `Fields::wildcards`.
+ pub(super) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize {
+ match self {
+ Single | Variant(_) => match pcx.ty.kind() {
+ ty::Tuple(fs) => fs.len(),
+ ty::Ref(..) => 1,
+ ty::Adt(adt, ..) => {
+ if adt.is_box() {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ 1
+ } else {
+ let variant = &adt.variant(self.variant_index_for_adt(*adt));
+ Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
+ }
+ }
+ _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx.ty),
+ },
+ Slice(slice) => slice.arity(),
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => 0,
+ Or => bug!("The `Or` constructor doesn't have a fixed arity"),
+ }
+ }
+
+ /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual
+ /// constructors (like variants, integers or fixed-sized slices). When specializing for these
+ /// constructors, we want to be specialising for the actual underlying constructors.
+ /// Naively, we would simply return the list of constructors they correspond to. We instead are
+ /// more clever: if there are constructors that we know will behave the same wrt the current
+ /// matrix, we keep them grouped. For example, all slices of a sufficiently large length
+ /// will either be all useful or all non-useful with a given matrix.
+ ///
+ /// See the branches for details on how the splitting is done.
+ ///
+ /// This function may discard some irrelevant constructors if this preserves behavior and
+ /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the
+ /// matrix, unless all of them are.
+ pub(super) fn split<'a>(
+ &self,
+ pcx: &PatCtxt<'_, '_, 'tcx>,
+ ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone,
+ ) -> SmallVec<[Self; 1]>
+ where
+ 'tcx: 'a,
+ {
+ match self {
+ Wildcard => {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, ctors);
+ split_wildcard.into_ctors(pcx)
+ }
+ // Fast-track if the range is trivial. In particular, we don't do the overlapping
+ // ranges check.
+ IntRange(ctor_range) if !ctor_range.is_singleton() => {
+ let mut split_range = SplitIntRange::new(ctor_range.clone());
+ let int_ranges = ctors.filter_map(|ctor| ctor.as_int_range());
+ split_range.split(int_ranges.cloned());
+ split_range.iter().map(IntRange).collect()
+ }
+ &Slice(Slice { kind: VarLen(self_prefix, self_suffix), array_len }) => {
+ let mut split_self = SplitVarLenSlice::new(self_prefix, self_suffix, array_len);
+ let slices = ctors.filter_map(|c| c.as_slice()).map(|s| s.kind);
+ split_self.split(slices);
+ split_self.iter().map(Slice).collect()
+ }
+ // Any other constructor can be used unchanged.
+ _ => smallvec![self.clone()],
+ }
+ }
+
+ /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
+ /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
+ /// this checks for inclusion.
+ // We inline because this has a single call site in `Matrix::specialize_constructor`.
+ #[inline]
+ pub(super) fn is_covered_by<'p>(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool {
+ // This must be kept in sync with `is_covered_by_any`.
+ match (self, other) {
+ // Wildcards cover anything
+ (_, Wildcard) => true,
+ // The missing ctors are not covered by anything in the matrix except wildcards.
+ (Missing { .. } | Wildcard, _) => false,
+
+ (Single, Single) => true,
+ (Variant(self_id), Variant(other_id)) => self_id == other_id,
+
+ (IntRange(self_range), IntRange(other_range)) => self_range.is_covered_by(other_range),
+ (
+ FloatRange(self_from, self_to, self_end),
+ FloatRange(other_from, other_to, other_end),
+ ) => {
+ match (
+ compare_const_vals(pcx.cx.tcx, *self_to, *other_to, pcx.cx.param_env),
+ compare_const_vals(pcx.cx.tcx, *self_from, *other_from, pcx.cx.param_env),
+ ) {
+ (Some(to), Some(from)) => {
+ (from == Ordering::Greater || from == Ordering::Equal)
+ && (to == Ordering::Less
+ || (other_end == self_end && to == Ordering::Equal))
+ }
+ _ => false,
+ }
+ }
+ (Str(self_val), Str(other_val)) => {
+ // FIXME Once valtrees are available we can directly use the bytes
+ // in the `Str` variant of the valtree for the comparison here.
+ self_val == other_val
+ }
+ (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
+
+ // We are trying to inspect an opaque constant. Thus we skip the row.
+ (Opaque, _) | (_, Opaque) => false,
+ // Only a wildcard pattern can match the special extra constructor.
+ (NonExhaustive, _) => false,
+
+ _ => span_bug!(
+ pcx.span,
+ "trying to compare incompatible constructors {:?} and {:?}",
+ self,
+ other
+ ),
+ }
+ }
+
+ /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is
+ /// assumed to be built from `matrix.head_ctors()` with wildcards filtered out, and `self` is
+ /// assumed to have been split from a wildcard.
+ fn is_covered_by_any<'p>(
+ &self,
+ pcx: &PatCtxt<'_, 'p, 'tcx>,
+ used_ctors: &[Constructor<'tcx>],
+ ) -> bool {
+ if used_ctors.is_empty() {
+ return false;
+ }
+
+ // This must be kept in sync with `is_covered_by`.
+ match self {
+ // If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s.
+ Single => !used_ctors.is_empty(),
+ Variant(vid) => used_ctors.iter().any(|c| matches!(c, Variant(i) if i == vid)),
+ IntRange(range) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_int_range())
+ .any(|other| range.is_covered_by(other)),
+ Slice(slice) => used_ctors
+ .iter()
+ .filter_map(|c| c.as_slice())
+ .any(|other| slice.is_covered_by(other)),
+ // This constructor is never covered by anything else
+ NonExhaustive => false,
+ Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => {
+ span_bug!(pcx.span, "found unexpected ctor in all_ctors: {:?}", self)
+ }
+ }
+ }
+}
+
+/// A wildcard constructor that we split relative to the constructors in the matrix, as explained
+/// at the top of the file.
+///
+/// A constructor that is not present in the matrix rows will only be covered by the rows that have
+/// wildcards. Thus we can group all of those constructors together; we call them "missing
+/// constructors". Splitting a wildcard would therefore list all present constructors individually
+/// (or grouped if they are integers or slices), and then all missing constructors together as a
+/// group.
+///
+/// However we can go further: since any constructor will match the wildcard rows, and having more
+/// rows can only reduce the amount of usefulness witnesses, we can skip the present constructors
+/// and only try the missing ones.
+/// This will not preserve the whole list of witnesses, but will preserve whether the list is empty
+/// or not. In fact this is quite natural from the point of view of diagnostics too. This is done
+/// in `to_ctors`: in some cases we only return `Missing`.
+#[derive(Debug)]
+pub(super) struct SplitWildcard<'tcx> {
+ /// Constructors seen in the matrix.
+ matrix_ctors: Vec<Constructor<'tcx>>,
+ /// All the constructors for this type
+ all_ctors: SmallVec<[Constructor<'tcx>; 1]>,
+}
+
+impl<'tcx> SplitWildcard<'tcx> {
+ pub(super) fn new<'p>(pcx: &PatCtxt<'_, 'p, 'tcx>) -> Self {
+ debug!("SplitWildcard::new({:?})", pcx.ty);
+ let cx = pcx.cx;
+ let make_range = |start, end| {
+ IntRange(
+ // `unwrap()` is ok because we know the type is an integer.
+ IntRange::from_range(cx.tcx, start, end, pcx.ty, &RangeEnd::Included).unwrap(),
+ )
+ };
+ // This determines the set of all possible constructors for the type `pcx.ty`. For numbers,
+ // arrays and slices we use ranges and variable-length slices when appropriate.
+ //
+ // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
+ // are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
+ // returned list of constructors.
+ // Invariant: this is empty if and only if the type is uninhabited (as determined by
+ // `cx.is_uninhabited()`).
+ let all_ctors = match pcx.ty.kind() {
+ ty::Bool => smallvec![make_range(0, 1)],
+ ty::Array(sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => {
+ let len = len.eval_usize(cx.tcx, cx.param_env) as usize;
+ if len != 0 && cx.is_uninhabited(*sub_ty) {
+ smallvec![]
+ } else {
+ smallvec![Slice(Slice::new(Some(len), VarLen(0, 0)))]
+ }
+ }
+ // Treat arrays of a constant but unknown length like slices.
+ ty::Array(sub_ty, _) | ty::Slice(sub_ty) => {
+ let kind = if cx.is_uninhabited(*sub_ty) { FixedLen(0) } else { VarLen(0, 0) };
+ smallvec![Slice(Slice::new(None, kind))]
+ }
+ ty::Adt(def, substs) if def.is_enum() => {
+ // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an
+ // additional "unknown" constructor.
+ // There is no point in enumerating all possible variants, because the user can't
+ // actually match against them all themselves. So we always return only the fictitious
+ // constructor.
+ // E.g., in an example like:
+ //
+ // ```
+ // let err: io::ErrorKind = ...;
+ // match err {
+ // io::ErrorKind::NotFound => {},
+ // }
+ // ```
+ //
+ // we don't want to show every possible IO error, but instead have only `_` as the
+ // witness.
+ let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty);
+
+ let is_exhaustive_pat_feature = cx.tcx.features().exhaustive_patterns;
+
+ // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it
+ // as though it had an "unknown" constructor to avoid exposing its emptiness. The
+ // exception is if the pattern is at the top level, because we want empty matches to be
+ // considered exhaustive.
+ let is_secretly_empty =
+ def.variants().is_empty() && !is_exhaustive_pat_feature && !pcx.is_top_level;
+
+ let mut ctors: SmallVec<[_; 1]> = def
+ .variants()
+ .iter_enumerated()
+ .filter(|(_, v)| {
+ // If `exhaustive_patterns` is enabled, we exclude variants known to be
+ // uninhabited.
+ let is_uninhabited = is_exhaustive_pat_feature
+ && v.uninhabited_from(cx.tcx, substs, def.adt_kind(), cx.param_env)
+ .contains(cx.tcx, cx.module);
+ !is_uninhabited
+ })
+ .map(|(idx, _)| Variant(idx))
+ .collect();
+
+ if is_secretly_empty || is_declared_nonexhaustive {
+ ctors.push(NonExhaustive);
+ }
+ ctors
+ }
+ ty::Char => {
+ smallvec![
+ // The valid Unicode Scalar Value ranges.
+ make_range('\u{0000}' as u128, '\u{D7FF}' as u128),
+ make_range('\u{E000}' as u128, '\u{10FFFF}' as u128),
+ ]
+ }
+ ty::Int(_) | ty::Uint(_)
+ if pcx.ty.is_ptr_sized_integral()
+ && !cx.tcx.features().precise_pointer_size_matching =>
+ {
+ // `usize`/`isize` are not allowed to be matched exhaustively unless the
+ // `precise_pointer_size_matching` feature is enabled. So we treat those types like
+ // `#[non_exhaustive]` enums by returning a special unmatchable constructor.
+ smallvec![NonExhaustive]
+ }
+ &ty::Int(ity) => {
+ let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128;
+ let min = 1u128 << (bits - 1);
+ let max = min - 1;
+ smallvec![make_range(min, max)]
+ }
+ &ty::Uint(uty) => {
+ let size = Integer::from_uint_ty(&cx.tcx, uty).size();
+ let max = size.truncate(u128::MAX);
+ smallvec![make_range(0, max)]
+ }
+ // If `exhaustive_patterns` is disabled and our scrutinee is the never type, we cannot
+ // expose its emptiness. The exception is if the pattern is at the top level, because we
+ // want empty matches to be considered exhaustive.
+ ty::Never if !cx.tcx.features().exhaustive_patterns && !pcx.is_top_level => {
+ smallvec![NonExhaustive]
+ }
+ ty::Never => smallvec![],
+ _ if cx.is_uninhabited(pcx.ty) => smallvec![],
+ ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => smallvec![Single],
+ // This type is one for which we cannot list constructors, like `str` or `f64`.
+ _ => smallvec![NonExhaustive],
+ };
+
+ SplitWildcard { matrix_ctors: Vec::new(), all_ctors }
+ }
+
+ /// Pass a set of constructors relative to which to split this one. Don't call twice, it won't
+ /// do what you want.
+ pub(super) fn split<'a>(
+ &mut self,
+ pcx: &PatCtxt<'_, '_, 'tcx>,
+ ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone,
+ ) where
+ 'tcx: 'a,
+ {
+ // Since `all_ctors` never contains wildcards, this won't recurse further.
+ self.all_ctors =
+ self.all_ctors.iter().flat_map(|ctor| ctor.split(pcx, ctors.clone())).collect();
+ self.matrix_ctors = ctors.filter(|c| !c.is_wildcard()).cloned().collect();
+ }
+
+ /// Whether there are any value constructors for this type that are not present in the matrix.
+ fn any_missing(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> bool {
+ self.iter_missing(pcx).next().is_some()
+ }
+
+ /// Iterate over the constructors for this type that are not present in the matrix.
+ pub(super) fn iter_missing<'a, 'p>(
+ &'a self,
+ pcx: &'a PatCtxt<'a, 'p, 'tcx>,
+ ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'p> {
+ self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.matrix_ctors))
+ }
+
+ /// Return the set of constructors resulting from splitting the wildcard. As explained at the
+ /// top of the file, if any constructors are missing we can ignore the present ones.
+ fn into_ctors(self, pcx: &PatCtxt<'_, '_, 'tcx>) -> SmallVec<[Constructor<'tcx>; 1]> {
+ if self.any_missing(pcx) {
+ // Some constructors are missing, thus we can specialize with the special `Missing`
+ // constructor, which stands for those constructors that are not seen in the matrix,
+ // and matches the same rows as any of them (namely the wildcard rows). See the top of
+ // the file for details.
+ // However, when all constructors are missing we can also specialize with the full
+ // `Wildcard` constructor. The difference will depend on what we want in diagnostics.
+
+ // If some constructors are missing, we typically want to report those constructors,
+ // e.g.:
+ // ```
+ // enum Direction { N, S, E, W }
+ // let Direction::N = ...;
+ // ```
+ // we can report 3 witnesses: `S`, `E`, and `W`.
+ //
+ // However, if the user didn't actually specify a constructor
+ // in this arm, e.g., in
+ // ```
+ // let x: (Direction, Direction, bool) = ...;
+ // let (_, _, false) = x;
+ // ```
+ // we don't want to show all 16 possible witnesses `(<direction-1>, <direction-2>,
+ // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we
+ // prefer to report just a wildcard `_`.
+ //
+ // The exception is: if we are at the top-level, for example in an empty match, we
+ // sometimes prefer reporting the list of constructors instead of just `_`.
+ let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty);
+ let ctor = if !self.matrix_ctors.is_empty() || report_when_all_missing {
+ if pcx.is_non_exhaustive {
+ Missing {
+ nonexhaustive_enum_missing_real_variants: self
+ .iter_missing(pcx)
+ .any(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))),
+ }
+ } else {
+ Missing { nonexhaustive_enum_missing_real_variants: false }
+ }
+ } else {
+ Wildcard
+ };
+ return smallvec![ctor];
+ }
+
+ // All the constructors are present in the matrix, so we just go through them all.
+ self.all_ctors
+ }
+}
+
+/// A value can be decomposed into a constructor applied to some fields. This struct represents
+/// those fields, generalized to allow patterns in each field. See also `Constructor`.
+///
+/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that
+/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then
+/// given a pattern we fill some of the fields with its subpatterns.
+/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in
+/// `extract_pattern_arguments` we fill some of the entries, and the result is
+/// `[Some(0), _, _, _]`.
+/// ```compile_fail,E0004
+/// # fn foo() -> [Option<u8>; 4] { [None; 4] }
+/// let x: [Option<u8>; 4] = foo();
+/// match x {
+/// [Some(0), ..] => {}
+/// }
+/// ```
+///
+/// Note that the number of fields of a constructor may not match the fields declared in the
+/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited,
+/// because the code mustn't observe that it is uninhabited. In that case that field is not
+/// included in `fields`. For that reason, when you have a `mir::Field` you must use
+/// `index_with_declared_idx`.
+#[derive(Debug, Clone, Copy)]
+pub(super) struct Fields<'p, 'tcx> {
+ fields: &'p [DeconstructedPat<'p, 'tcx>],
+}
+
+impl<'p, 'tcx> Fields<'p, 'tcx> {
+ fn empty() -> Self {
+ Fields { fields: &[] }
+ }
+
+ fn singleton(cx: &MatchCheckCtxt<'p, 'tcx>, field: DeconstructedPat<'p, 'tcx>) -> Self {
+ let field: &_ = cx.pattern_arena.alloc(field);
+ Fields { fields: std::slice::from_ref(field) }
+ }
+
+ pub(super) fn from_iter(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ fields: impl IntoIterator<Item = DeconstructedPat<'p, 'tcx>>,
+ ) -> Self {
+ let fields: &[_] = cx.pattern_arena.alloc_from_iter(fields);
+ Fields { fields }
+ }
+
+ fn wildcards_from_tys(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ tys: impl IntoIterator<Item = Ty<'tcx>>,
+ ) -> Self {
+ Fields::from_iter(cx, tys.into_iter().map(DeconstructedPat::wildcard))
+ }
+
+ // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
+ // uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
+ // This lists the fields we keep along with their types.
+ fn list_variant_nonhidden_fields<'a>(
+ cx: &'a MatchCheckCtxt<'p, 'tcx>,
+ ty: Ty<'tcx>,
+ variant: &'a VariantDef,
+ ) -> impl Iterator<Item = (Field, Ty<'tcx>)> + Captures<'a> + Captures<'p> {
+ let ty::Adt(adt, substs) = ty.kind() else { bug!() };
+ // Whether we must not match the fields of this variant exhaustively.
+ let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did().is_local();
+
+ variant.fields.iter().enumerate().filter_map(move |(i, field)| {
+ let ty = field.ty(cx.tcx, substs);
+ // `field.ty()` doesn't normalize after substituting.
+ let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
+ let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx);
+ let is_uninhabited = cx.is_uninhabited(ty);
+
+ if is_uninhabited && (!is_visible || is_non_exhaustive) {
+ None
+ } else {
+ Some((Field::new(i), ty))
+ }
+ })
+ }
+
+ /// Creates a new list of wildcard fields for a given constructor. The result must have a
+ /// length of `constructor.arity()`.
+ #[instrument(level = "trace")]
+ pub(super) fn wildcards(pcx: &PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self {
+ let ret = match constructor {
+ Single | Variant(_) => match pcx.ty.kind() {
+ ty::Tuple(fs) => Fields::wildcards_from_tys(pcx.cx, fs.iter()),
+ ty::Ref(_, rty, _) => Fields::wildcards_from_tys(pcx.cx, once(*rty)),
+ ty::Adt(adt, substs) => {
+ if adt.is_box() {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ Fields::wildcards_from_tys(pcx.cx, once(substs.type_at(0)))
+ } else {
+ let variant = &adt.variant(constructor.variant_index_for_adt(*adt));
+ let tys = Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant)
+ .map(|(_, ty)| ty);
+ Fields::wildcards_from_tys(pcx.cx, tys)
+ }
+ }
+ _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx),
+ },
+ Slice(slice) => match *pcx.ty.kind() {
+ ty::Slice(ty) | ty::Array(ty, _) => {
+ let arity = slice.arity();
+ Fields::wildcards_from_tys(pcx.cx, (0..arity).map(|_| ty))
+ }
+ _ => bug!("bad slice pattern {:?} {:?}", constructor, pcx),
+ },
+ Str(..)
+ | FloatRange(..)
+ | IntRange(..)
+ | NonExhaustive
+ | Opaque
+ | Missing { .. }
+ | Wildcard => Fields::empty(),
+ Or => {
+ bug!("called `Fields::wildcards` on an `Or` ctor")
+ }
+ };
+ debug!(?ret);
+ ret
+ }
+
+ /// Returns the list of patterns.
+ pub(super) fn iter_patterns<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
+ self.fields.iter()
+ }
+}
+
+/// Values and patterns can be represented as a constructor applied to some fields. This represents
+/// a pattern in this form.
+/// This also keeps track of whether the pattern has been found reachable during analysis. For this
+/// reason we should be careful not to clone patterns for which we care about that. Use
+/// `clone_and_forget_reachability` if you're sure.
+pub(crate) struct DeconstructedPat<'p, 'tcx> {
+ ctor: Constructor<'tcx>,
+ fields: Fields<'p, 'tcx>,
+ ty: Ty<'tcx>,
+ span: Span,
+ reachable: Cell<bool>,
+}
+
+impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
+ pub(super) fn wildcard(ty: Ty<'tcx>) -> Self {
+ Self::new(Wildcard, Fields::empty(), ty, DUMMY_SP)
+ }
+
+ pub(super) fn new(
+ ctor: Constructor<'tcx>,
+ fields: Fields<'p, 'tcx>,
+ ty: Ty<'tcx>,
+ span: Span,
+ ) -> Self {
+ DeconstructedPat { ctor, fields, ty, span, reachable: Cell::new(false) }
+ }
+
+ /// Construct a pattern that matches everything that starts with this constructor.
+ /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
+ /// `Some(_)`.
+ pub(super) fn wild_from_ctor(pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: Constructor<'tcx>) -> Self {
+ let fields = Fields::wildcards(pcx, &ctor);
+ DeconstructedPat::new(ctor, fields, pcx.ty, DUMMY_SP)
+ }
+
+ /// Clone this value. This method emphasizes that cloning loses reachability information and
+ /// should be done carefully.
+ pub(super) fn clone_and_forget_reachability(&self) -> Self {
+ DeconstructedPat::new(self.ctor.clone(), self.fields, self.ty, self.span)
+ }
+
+ pub(crate) fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self {
+ let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
+ let ctor;
+ let fields;
+ match pat.kind.as_ref() {
+ PatKind::AscribeUserType { subpattern, .. } => return mkpat(subpattern),
+ PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
+ PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
+ ctor = Wildcard;
+ fields = Fields::empty();
+ }
+ PatKind::Deref { subpattern } => {
+ ctor = Single;
+ fields = Fields::singleton(cx, mkpat(subpattern));
+ }
+ PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
+ match pat.ty.kind() {
+ ty::Tuple(fs) => {
+ ctor = Single;
+ let mut wilds: SmallVec<[_; 2]> =
+ fs.iter().map(DeconstructedPat::wildcard).collect();
+ for pat in subpatterns {
+ wilds[pat.field.index()] = mkpat(&pat.pattern);
+ }
+ fields = Fields::from_iter(cx, wilds);
+ }
+ ty::Adt(adt, substs) if adt.is_box() => {
+ // The only legal patterns of type `Box` (outside `std`) are `_` and box
+ // patterns. If we're here we can assume this is a box pattern.
+ // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
+ // _)` or a box pattern. As a hack to avoid an ICE with the former, we
+ // ignore other fields than the first one. This will trigger an error later
+ // anyway.
+ // See https://github.com/rust-lang/rust/issues/82772 ,
+ // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
+ // The problem is that we can't know from the type whether we'll match
+ // normally or through box-patterns. We'll have to figure out a proper
+ // solution when we introduce generalized deref patterns. Also need to
+ // prevent mixing of those two options.
+ let pat = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
+ let pat = if let Some(pat) = pat {
+ mkpat(&pat.pattern)
+ } else {
+ DeconstructedPat::wildcard(substs.type_at(0))
+ };
+ ctor = Single;
+ fields = Fields::singleton(cx, pat);
+ }
+ ty::Adt(adt, _) => {
+ ctor = match pat.kind.as_ref() {
+ PatKind::Leaf { .. } => Single,
+ PatKind::Variant { variant_index, .. } => Variant(*variant_index),
+ _ => bug!(),
+ };
+ let variant = &adt.variant(ctor.variant_index_for_adt(*adt));
+ // For each field in the variant, we store the relevant index into `self.fields` if any.
+ let mut field_id_to_id: Vec<Option<usize>> =
+ (0..variant.fields.len()).map(|_| None).collect();
+ let tys = Fields::list_variant_nonhidden_fields(cx, pat.ty, variant)
+ .enumerate()
+ .map(|(i, (field, ty))| {
+ field_id_to_id[field.index()] = Some(i);
+ ty
+ });
+ let mut wilds: SmallVec<[_; 2]> =
+ tys.map(DeconstructedPat::wildcard).collect();
+ for pat in subpatterns {
+ if let Some(i) = field_id_to_id[pat.field.index()] {
+ wilds[i] = mkpat(&pat.pattern);
+ }
+ }
+ fields = Fields::from_iter(cx, wilds);
+ }
+ _ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty),
+ }
+ }
+ PatKind::Constant { value } => {
+ if let Some(int_range) = IntRange::from_constant(cx.tcx, cx.param_env, *value) {
+ ctor = IntRange(int_range);
+ fields = Fields::empty();
+ } else {
+ match pat.ty.kind() {
+ ty::Float(_) => {
+ ctor = FloatRange(*value, *value, RangeEnd::Included);
+ fields = Fields::empty();
+ }
+ ty::Ref(_, t, _) if t.is_str() => {
+ // We want a `&str` constant to behave like a `Deref` pattern, to be compatible
+ // with other `Deref` patterns. This could have been done in `const_to_pat`,
+ // but that causes issues with the rest of the matching code.
+ // So here, the constructor for a `"foo"` pattern is `&` (represented by
+ // `Single`), and has one field. That field has constructor `Str(value)` and no
+ // fields.
+ // Note: `t` is `str`, not `&str`.
+ let subpattern =
+ DeconstructedPat::new(Str(*value), Fields::empty(), *t, pat.span);
+ ctor = Single;
+ fields = Fields::singleton(cx, subpattern)
+ }
+ // All constants that can be structurally matched have already been expanded
+ // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
+ // opaque.
+ _ => {
+ ctor = Opaque;
+ fields = Fields::empty();
+ }
+ }
+ }
+ }
+ &PatKind::Range(PatRange { lo, hi, end }) => {
+ let ty = lo.ty();
+ ctor = if let Some(int_range) = IntRange::from_range(
+ cx.tcx,
+ lo.eval_bits(cx.tcx, cx.param_env, lo.ty()),
+ hi.eval_bits(cx.tcx, cx.param_env, hi.ty()),
+ ty,
+ &end,
+ ) {
+ IntRange(int_range)
+ } else {
+ FloatRange(lo, hi, end)
+ };
+ fields = Fields::empty();
+ }
+ PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
+ let array_len = match pat.ty.kind() {
+ ty::Array(_, length) => Some(length.eval_usize(cx.tcx, cx.param_env) as usize),
+ ty::Slice(_) => None,
+ _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
+ };
+ let kind = if slice.is_some() {
+ VarLen(prefix.len(), suffix.len())
+ } else {
+ FixedLen(prefix.len() + suffix.len())
+ };
+ ctor = Slice(Slice::new(array_len, kind));
+ fields = Fields::from_iter(cx, prefix.iter().chain(suffix).map(mkpat));
+ }
+ PatKind::Or { .. } => {
+ ctor = Or;
+ let pats = expand_or_pat(pat);
+ fields = Fields::from_iter(cx, pats.into_iter().map(mkpat));
+ }
+ }
+ DeconstructedPat::new(ctor, fields, pat.ty, pat.span)
+ }
+
+ pub(crate) fn to_pat(&self, cx: &MatchCheckCtxt<'p, 'tcx>) -> Pat<'tcx> {
+ let is_wildcard = |pat: &Pat<'_>| {
+ matches!(*pat.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild)
+ };
+ let mut subpatterns = self.iter_fields().map(|p| p.to_pat(cx));
+ let pat = match &self.ctor {
+ Single | Variant(_) => match self.ty.kind() {
+ ty::Tuple(..) => PatKind::Leaf {
+ subpatterns: subpatterns
+ .enumerate()
+ .map(|(i, p)| FieldPat { field: Field::new(i), pattern: p })
+ .collect(),
+ },
+ ty::Adt(adt_def, _) if adt_def.is_box() => {
+ // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
+ // of `std`). So this branch is only reachable when the feature is enabled and
+ // the pattern is a box pattern.
+ PatKind::Deref { subpattern: subpatterns.next().unwrap() }
+ }
+ ty::Adt(adt_def, substs) => {
+ let variant_index = self.ctor.variant_index_for_adt(*adt_def);
+ let variant = &adt_def.variant(variant_index);
+ let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant)
+ .zip(subpatterns)
+ .map(|((field, _ty), pattern)| FieldPat { field, pattern })
+ .collect();
+
+ if adt_def.is_enum() {
+ PatKind::Variant { adt_def: *adt_def, substs, variant_index, subpatterns }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+ // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+ // be careful to reconstruct the correct constant pattern here. However a string
+ // literal pattern will never be reported as a non-exhaustiveness witness, so we
+ // ignore this issue.
+ ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
+ _ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty),
+ },
+ Slice(slice) => {
+ match slice.kind {
+ FixedLen(_) => PatKind::Slice {
+ prefix: subpatterns.collect(),
+ slice: None,
+ suffix: vec![],
+ },
+ VarLen(prefix, _) => {
+ let mut subpatterns = subpatterns.peekable();
+ let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
+ if slice.array_len.is_some() {
+ // Improves diagnostics a bit: if the type is a known-size array, instead
+ // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
+ // This is incorrect if the size is not known, since `[_, ..]` captures
+ // arrays of lengths `>= 1` whereas `[..]` captures any length.
+ while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
+ prefix.pop();
+ }
+ while subpatterns.peek().is_some()
+ && is_wildcard(subpatterns.peek().unwrap())
+ {
+ subpatterns.next();
+ }
+ }
+ let suffix: Vec<_> = subpatterns.collect();
+ let wild = Pat::wildcard_from_ty(self.ty);
+ PatKind::Slice { prefix, slice: Some(wild), suffix }
+ }
+ }
+ }
+ &Str(value) => PatKind::Constant { value },
+ &FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }),
+ IntRange(range) => return range.to_pat(cx.tcx, self.ty),
+ Wildcard | NonExhaustive => PatKind::Wild,
+ Missing { .. } => bug!(
+ "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
+ `Missing` should have been processed in `apply_constructors`"
+ ),
+ Opaque | Or => {
+ bug!("can't convert to pattern: {:?}", self)
+ }
+ };
+
+ Pat { ty: self.ty, span: DUMMY_SP, kind: Box::new(pat) }
+ }
+
+ pub(super) fn is_or_pat(&self) -> bool {
+ matches!(self.ctor, Or)
+ }
+
+ pub(super) fn ctor(&self) -> &Constructor<'tcx> {
+ &self.ctor
+ }
+ pub(super) fn ty(&self) -> Ty<'tcx> {
+ self.ty
+ }
+ pub(super) fn span(&self) -> Span {
+ self.span
+ }
+
+ pub(super) fn iter_fields<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
+ self.fields.iter_patterns()
+ }
+
+ /// Specialize this pattern with a constructor.
+ /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
+ pub(super) fn specialize<'a>(
+ &'a self,
+ pcx: &PatCtxt<'_, 'p, 'tcx>,
+ other_ctor: &Constructor<'tcx>,
+ ) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> {
+ match (&self.ctor, other_ctor) {
+ (Wildcard, _) => {
+ // We return a wildcard for each field of `other_ctor`.
+ Fields::wildcards(pcx, other_ctor).iter_patterns().collect()
+ }
+ (Slice(self_slice), Slice(other_slice))
+ if self_slice.arity() != other_slice.arity() =>
+ {
+ // The only tricky case: two slices of different arity. Since `self_slice` covers
+ // `other_slice`, `self_slice` must be `VarLen`, i.e. of the form
+ // `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger
+ // arity. So we fill the middle part with enough wildcards to reach the length of
+ // the new, larger slice.
+ match self_slice.kind {
+ FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice),
+ VarLen(prefix, suffix) => {
+ let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else {
+ bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty);
+ };
+ let prefix = &self.fields.fields[..prefix];
+ let suffix = &self.fields.fields[self_slice.arity() - suffix..];
+ let wildcard: &_ =
+ pcx.cx.pattern_arena.alloc(DeconstructedPat::wildcard(inner_ty));
+ let extra_wildcards = other_slice.arity() - self_slice.arity();
+ let extra_wildcards = (0..extra_wildcards).map(|_| wildcard);
+ prefix.iter().chain(extra_wildcards).chain(suffix).collect()
+ }
+ }
+ }
+ _ => self.fields.iter_patterns().collect(),
+ }
+ }
+
+ /// We keep track for each pattern if it was ever reachable during the analysis. This is used
+ /// with `unreachable_spans` to report unreachable subpatterns arising from or patterns.
+ pub(super) fn set_reachable(&self) {
+ self.reachable.set(true)
+ }
+ pub(super) fn is_reachable(&self) -> bool {
+ self.reachable.get()
+ }
+
+ /// Report the spans of subpatterns that were not reachable, if any.
+ pub(super) fn unreachable_spans(&self) -> Vec<Span> {
+ let mut spans = Vec::new();
+ self.collect_unreachable_spans(&mut spans);
+ spans
+ }
+
+ fn collect_unreachable_spans(&self, spans: &mut Vec<Span>) {
+ // We don't look at subpatterns if we already reported the whole pattern as unreachable.
+ if !self.is_reachable() {
+ spans.push(self.span);
+ } else {
+ for p in self.iter_fields() {
+ p.collect_unreachable_spans(spans);
+ }
+ }
+ }
+}
+
+/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a
+/// `Display` impl.
+impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Printing lists is a chore.
+ let mut first = true;
+ let mut start_or_continue = |s| {
+ if first {
+ first = false;
+ ""
+ } else {
+ s
+ }
+ };
+ let mut start_or_comma = || start_or_continue(", ");
+
+ match &self.ctor {
+ Single | Variant(_) => match self.ty.kind() {
+ ty::Adt(def, _) if def.is_box() => {
+ // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
+ // of `std`). So this branch is only reachable when the feature is enabled and
+ // the pattern is a box pattern.
+ let subpattern = self.iter_fields().next().unwrap();
+ write!(f, "box {:?}", subpattern)
+ }
+ ty::Adt(..) | ty::Tuple(..) => {
+ let variant = match self.ty.kind() {
+ ty::Adt(adt, _) => Some(adt.variant(self.ctor.variant_index_for_adt(*adt))),
+ ty::Tuple(_) => None,
+ _ => unreachable!(),
+ };
+
+ if let Some(variant) = variant {
+ write!(f, "{}", variant.name)?;
+ }
+
+ // Without `cx`, we can't know which field corresponds to which, so we can't
+ // get the names of the fields. Instead we just display everything as a tuple
+ // struct, which should be good enough.
+ write!(f, "(")?;
+ for p in self.iter_fields() {
+ write!(f, "{}", start_or_comma())?;
+ write!(f, "{:?}", p)?;
+ }
+ write!(f, ")")
+ }
+ // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
+ // be careful to detect strings here. However a string literal pattern will never
+ // be reported as a non-exhaustiveness witness, so we can ignore this issue.
+ ty::Ref(_, _, mutbl) => {
+ let subpattern = self.iter_fields().next().unwrap();
+ write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern)
+ }
+ _ => write!(f, "_"),
+ },
+ Slice(slice) => {
+ let mut subpatterns = self.fields.iter_patterns();
+ write!(f, "[")?;
+ match slice.kind {
+ FixedLen(_) => {
+ for p in subpatterns {
+ write!(f, "{}{:?}", start_or_comma(), p)?;
+ }
+ }
+ VarLen(prefix_len, _) => {
+ for p in subpatterns.by_ref().take(prefix_len) {
+ write!(f, "{}{:?}", start_or_comma(), p)?;
+ }
+ write!(f, "{}", start_or_comma())?;
+ write!(f, "..")?;
+ for p in subpatterns {
+ write!(f, "{}{:?}", start_or_comma(), p)?;
+ }
+ }
+ }
+ write!(f, "]")
+ }
+ &FloatRange(lo, hi, end) => {
+ write!(f, "{}", lo)?;
+ write!(f, "{}", end)?;
+ write!(f, "{}", hi)
+ }
+ IntRange(range) => write!(f, "{:?}", range), // Best-effort, will render e.g. `false` as `0..=0`
+ Wildcard | Missing { .. } | NonExhaustive => write!(f, "_ : {:?}", self.ty),
+ Or => {
+ for pat in self.iter_fields() {
+ write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
+ }
+ Ok(())
+ }
+ Str(value) => write!(f, "{}", value),
+ Opaque => write!(f, "<constant pattern>"),
+ }
+ }
+}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
new file mode 100644
index 000000000..a13748a2d
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
@@ -0,0 +1,802 @@
+//! Validation of patterns/matches.
+
+mod check_match;
+mod const_to_pat;
+mod deconstruct_pat;
+mod usefulness;
+
+pub(crate) use self::check_match::check_match;
+
+use crate::thir::util::UserAnnotatedTyHelpers;
+
+use rustc_errors::struct_span_err;
+use rustc_hir as hir;
+use rustc_hir::def::{CtorOf, DefKind, Res};
+use rustc_hir::pat_util::EnumerateAndAdjustIterator;
+use rustc_hir::RangeEnd;
+use rustc_index::vec::Idx;
+use rustc_middle::mir::interpret::{
+ ConstValue, ErrorHandled, LitToConstError, LitToConstInput, Scalar,
+};
+use rustc_middle::mir::{self, UserTypeProjection};
+use rustc_middle::mir::{BorrowKind, Field, Mutability};
+use rustc_middle::thir::{Ascription, BindingMode, FieldPat, LocalVarId, Pat, PatKind, PatRange};
+use rustc_middle::ty::subst::{GenericArg, SubstsRef};
+use rustc_middle::ty::CanonicalUserTypeAnnotation;
+use rustc_middle::ty::{self, AdtDef, ConstKind, DefIdTree, Region, Ty, TyCtxt, UserType};
+use rustc_span::{Span, Symbol};
+
+use std::cmp::Ordering;
+
+#[derive(Clone, Debug)]
+pub(crate) enum PatternError {
+ AssocConstInPattern(Span),
+ ConstParamInPattern(Span),
+ StaticInPattern(Span),
+ NonConstPath(Span),
+}
+
+pub(crate) struct PatCtxt<'a, 'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
+ pub(crate) param_env: ty::ParamEnv<'tcx>,
+ pub(crate) typeck_results: &'a ty::TypeckResults<'tcx>,
+ pub(crate) errors: Vec<PatternError>,
+ include_lint_checks: bool,
+}
+
+pub(crate) fn pat_from_hir<'a, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
+ pat: &'tcx hir::Pat<'tcx>,
+) -> Pat<'tcx> {
+ let mut pcx = PatCtxt::new(tcx, param_env, typeck_results);
+ let result = pcx.lower_pattern(pat);
+ if !pcx.errors.is_empty() {
+ let msg = format!("encountered errors lowering pattern: {:?}", pcx.errors);
+ tcx.sess.delay_span_bug(pat.span, &msg);
+ }
+ debug!("pat_from_hir({:?}) = {:?}", pat, result);
+ result
+}
+
+impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
+ pub(crate) fn new(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
+ ) -> Self {
+ PatCtxt { tcx, param_env, typeck_results, errors: vec![], include_lint_checks: false }
+ }
+
+ pub(crate) fn include_lint_checks(&mut self) -> &mut Self {
+ self.include_lint_checks = true;
+ self
+ }
+
+ pub(crate) fn lower_pattern(&mut self, pat: &'tcx hir::Pat<'tcx>) -> Pat<'tcx> {
+ // When implicit dereferences have been inserted in this pattern, the unadjusted lowered
+ // pattern has the type that results *after* dereferencing. For example, in this code:
+ //
+ // ```
+ // match &&Some(0i32) {
+ // Some(n) => { ... },
+ // _ => { ... },
+ // }
+ // ```
+ //
+ // the type assigned to `Some(n)` in `unadjusted_pat` would be `Option<i32>` (this is
+ // determined in rustc_typeck::check::match). The adjustments would be
+ //
+ // `vec![&&Option<i32>, &Option<i32>]`.
+ //
+ // Applying the adjustments, we want to instead output `&&Some(n)` (as a THIR pattern). So
+ // we wrap the unadjusted pattern in `PatKind::Deref` repeatedly, consuming the
+ // adjustments in *reverse order* (last-in-first-out, so that the last `Deref` inserted
+ // gets the least-dereferenced type).
+ let unadjusted_pat = self.lower_pattern_unadjusted(pat);
+ self.typeck_results.pat_adjustments().get(pat.hir_id).unwrap_or(&vec![]).iter().rev().fold(
+ unadjusted_pat,
+ |pat, ref_ty| {
+ debug!("{:?}: wrapping pattern with type {:?}", pat, ref_ty);
+ Pat {
+ span: pat.span,
+ ty: *ref_ty,
+ kind: Box::new(PatKind::Deref { subpattern: pat }),
+ }
+ },
+ )
+ }
+
+ fn lower_range_expr(
+ &mut self,
+ expr: &'tcx hir::Expr<'tcx>,
+ ) -> (PatKind<'tcx>, Option<Ascription<'tcx>>) {
+ match self.lower_lit(expr) {
+ PatKind::AscribeUserType { ascription, subpattern: Pat { kind: box kind, .. } } => {
+ (kind, Some(ascription))
+ }
+ kind => (kind, None),
+ }
+ }
+
+ fn lower_pattern_range(
+ &mut self,
+ ty: Ty<'tcx>,
+ lo: mir::ConstantKind<'tcx>,
+ hi: mir::ConstantKind<'tcx>,
+ end: RangeEnd,
+ span: Span,
+ ) -> PatKind<'tcx> {
+ assert_eq!(lo.ty(), ty);
+ assert_eq!(hi.ty(), ty);
+ let cmp = compare_const_vals(self.tcx, lo, hi, self.param_env);
+ match (end, cmp) {
+ // `x..y` where `x < y`.
+ // Non-empty because the range includes at least `x`.
+ (RangeEnd::Excluded, Some(Ordering::Less)) => PatKind::Range(PatRange { lo, hi, end }),
+ // `x..y` where `x >= y`. The range is empty => error.
+ (RangeEnd::Excluded, _) => {
+ struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0579,
+ "lower range bound must be less than upper"
+ )
+ .emit();
+ PatKind::Wild
+ }
+ // `x..=y` where `x == y`.
+ (RangeEnd::Included, Some(Ordering::Equal)) => PatKind::Constant { value: lo },
+ // `x..=y` where `x < y`.
+ (RangeEnd::Included, Some(Ordering::Less)) => PatKind::Range(PatRange { lo, hi, end }),
+ // `x..=y` where `x > y` hence the range is empty => error.
+ (RangeEnd::Included, _) => {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0030,
+ "lower range bound must be less than or equal to upper"
+ );
+ err.span_label(span, "lower bound larger than upper bound");
+ if self.tcx.sess.teach(&err.get_code().unwrap()) {
+ err.note(
+ "When matching against a range, the compiler \
+ verifies that the range is non-empty. Range \
+ patterns include both end-points, so this is \
+ equivalent to requiring the start of the range \
+ to be less than or equal to the end of the range.",
+ );
+ }
+ err.emit();
+ PatKind::Wild
+ }
+ }
+ }
+
+ fn normalize_range_pattern_ends(
+ &self,
+ ty: Ty<'tcx>,
+ lo: Option<&PatKind<'tcx>>,
+ hi: Option<&PatKind<'tcx>>,
+ ) -> Option<(mir::ConstantKind<'tcx>, mir::ConstantKind<'tcx>)> {
+ match (lo, hi) {
+ (Some(PatKind::Constant { value: lo }), Some(PatKind::Constant { value: hi })) => {
+ Some((*lo, *hi))
+ }
+ (Some(PatKind::Constant { value: lo }), None) => {
+ let hi = ty.numeric_max_val(self.tcx)?;
+ Some((*lo, mir::ConstantKind::from_const(hi, self.tcx)))
+ }
+ (None, Some(PatKind::Constant { value: hi })) => {
+ let lo = ty.numeric_min_val(self.tcx)?;
+ Some((mir::ConstantKind::from_const(lo, self.tcx), *hi))
+ }
+ _ => None,
+ }
+ }
+
+ fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat<'tcx>) -> Pat<'tcx> {
+ let mut ty = self.typeck_results.node_type(pat.hir_id);
+
+ let kind = match pat.kind {
+ hir::PatKind::Wild => PatKind::Wild,
+
+ hir::PatKind::Lit(value) => self.lower_lit(value),
+
+ hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => {
+ let (lo_expr, hi_expr) = (lo_expr.as_deref(), hi_expr.as_deref());
+ let lo_span = lo_expr.map_or(pat.span, |e| e.span);
+ let lo = lo_expr.map(|e| self.lower_range_expr(e));
+ let hi = hi_expr.map(|e| self.lower_range_expr(e));
+
+ let (lp, hp) = (lo.as_ref().map(|x| &x.0), hi.as_ref().map(|x| &x.0));
+ let mut kind = match self.normalize_range_pattern_ends(ty, lp, hp) {
+ Some((lc, hc)) => self.lower_pattern_range(ty, lc, hc, end, lo_span),
+ None => {
+ let msg = &format!(
+ "found bad range pattern `{:?}` outside of error recovery",
+ (&lo, &hi),
+ );
+ self.tcx.sess.delay_span_bug(pat.span, msg);
+ PatKind::Wild
+ }
+ };
+
+ // If we are handling a range with associated constants (e.g.
+ // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated
+ // constants somewhere. Have them on the range pattern.
+ for end in &[lo, hi] {
+ if let Some((_, Some(ascription))) = end {
+ let subpattern = Pat { span: pat.span, ty, kind: Box::new(kind) };
+ kind =
+ PatKind::AscribeUserType { ascription: ascription.clone(), subpattern };
+ }
+ }
+
+ kind
+ }
+
+ hir::PatKind::Path(ref qpath) => {
+ return self.lower_path(qpath, pat.hir_id, pat.span);
+ }
+
+ hir::PatKind::Ref(ref subpattern, _) | hir::PatKind::Box(ref subpattern) => {
+ PatKind::Deref { subpattern: self.lower_pattern(subpattern) }
+ }
+
+ hir::PatKind::Slice(ref prefix, ref slice, ref suffix) => {
+ self.slice_or_array_pattern(pat.span, ty, prefix, slice, suffix)
+ }
+
+ hir::PatKind::Tuple(ref pats, ddpos) => {
+ let ty::Tuple(ref tys) = ty.kind() else {
+ span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", ty);
+ };
+ let subpatterns = self.lower_tuple_subpats(pats, tys.len(), ddpos);
+ PatKind::Leaf { subpatterns }
+ }
+
+ hir::PatKind::Binding(_, id, ident, ref sub) => {
+ let bm = *self
+ .typeck_results
+ .pat_binding_modes()
+ .get(pat.hir_id)
+ .expect("missing binding mode");
+ let (mutability, mode) = match bm {
+ ty::BindByValue(mutbl) => (mutbl, BindingMode::ByValue),
+ ty::BindByReference(hir::Mutability::Mut) => (
+ Mutability::Not,
+ BindingMode::ByRef(BorrowKind::Mut { allow_two_phase_borrow: false }),
+ ),
+ ty::BindByReference(hir::Mutability::Not) => {
+ (Mutability::Not, BindingMode::ByRef(BorrowKind::Shared))
+ }
+ };
+
+ // A ref x pattern is the same node used for x, and as such it has
+ // x's type, which is &T, where we want T (the type being matched).
+ let var_ty = ty;
+ if let ty::BindByReference(_) = bm {
+ if let ty::Ref(_, rty, _) = ty.kind() {
+ ty = *rty;
+ } else {
+ bug!("`ref {}` has wrong type {}", ident, ty);
+ }
+ };
+
+ PatKind::Binding {
+ mutability,
+ mode,
+ name: ident.name,
+ var: LocalVarId(id),
+ ty: var_ty,
+ subpattern: self.lower_opt_pattern(sub),
+ is_primary: id == pat.hir_id,
+ }
+ }
+
+ hir::PatKind::TupleStruct(ref qpath, ref pats, ddpos) => {
+ let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
+ let ty::Adt(adt_def, _) = ty.kind() else {
+ span_bug!(pat.span, "tuple struct pattern not applied to an ADT {:?}", ty);
+ };
+ let variant_def = adt_def.variant_of_res(res);
+ let subpatterns = self.lower_tuple_subpats(pats, variant_def.fields.len(), ddpos);
+ self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns)
+ }
+
+ hir::PatKind::Struct(ref qpath, ref fields, _) => {
+ let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
+ let subpatterns = fields
+ .iter()
+ .map(|field| FieldPat {
+ field: Field::new(self.tcx.field_index(field.hir_id, self.typeck_results)),
+ pattern: self.lower_pattern(&field.pat),
+ })
+ .collect();
+
+ self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns)
+ }
+
+ hir::PatKind::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
+ };
+
+ Pat { span: pat.span, ty, kind: Box::new(kind) }
+ }
+
+ fn lower_tuple_subpats(
+ &mut self,
+ pats: &'tcx [hir::Pat<'tcx>],
+ expected_len: usize,
+ gap_pos: Option<usize>,
+ ) -> Vec<FieldPat<'tcx>> {
+ pats.iter()
+ .enumerate_and_adjust(expected_len, gap_pos)
+ .map(|(i, subpattern)| FieldPat {
+ field: Field::new(i),
+ pattern: self.lower_pattern(subpattern),
+ })
+ .collect()
+ }
+
+ fn lower_patterns(&mut self, pats: &'tcx [hir::Pat<'tcx>]) -> Vec<Pat<'tcx>> {
+ pats.iter().map(|p| self.lower_pattern(p)).collect()
+ }
+
+ fn lower_opt_pattern(&mut self, pat: &'tcx Option<&'tcx hir::Pat<'tcx>>) -> Option<Pat<'tcx>> {
+ pat.as_ref().map(|p| self.lower_pattern(p))
+ }
+
+ fn slice_or_array_pattern(
+ &mut self,
+ span: Span,
+ ty: Ty<'tcx>,
+ prefix: &'tcx [hir::Pat<'tcx>],
+ slice: &'tcx Option<&'tcx hir::Pat<'tcx>>,
+ suffix: &'tcx [hir::Pat<'tcx>],
+ ) -> PatKind<'tcx> {
+ let prefix = self.lower_patterns(prefix);
+ let slice = self.lower_opt_pattern(slice);
+ let suffix = self.lower_patterns(suffix);
+ match ty.kind() {
+ // Matching a slice, `[T]`.
+ ty::Slice(..) => PatKind::Slice { prefix, slice, suffix },
+ // Fixed-length array, `[T; len]`.
+ ty::Array(_, len) => {
+ let len = len.eval_usize(self.tcx, self.param_env);
+ assert!(len >= prefix.len() as u64 + suffix.len() as u64);
+ PatKind::Array { prefix, slice, suffix }
+ }
+ _ => span_bug!(span, "bad slice pattern type {:?}", ty),
+ }
+ }
+
+ fn lower_variant_or_leaf(
+ &mut self,
+ res: Res,
+ hir_id: hir::HirId,
+ span: Span,
+ ty: Ty<'tcx>,
+ subpatterns: Vec<FieldPat<'tcx>>,
+ ) -> PatKind<'tcx> {
+ let res = match res {
+ Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_id) => {
+ let variant_id = self.tcx.parent(variant_ctor_id);
+ Res::Def(DefKind::Variant, variant_id)
+ }
+ res => res,
+ };
+
+ let mut kind = match res {
+ Res::Def(DefKind::Variant, variant_id) => {
+ let enum_id = self.tcx.parent(variant_id);
+ let adt_def = self.tcx.adt_def(enum_id);
+ if adt_def.is_enum() {
+ let substs = match ty.kind() {
+ ty::Adt(_, substs) | ty::FnDef(_, substs) => substs,
+ ty::Error(_) => {
+ // Avoid ICE (#50585)
+ return PatKind::Wild;
+ }
+ _ => bug!("inappropriate type for def: {:?}", ty),
+ };
+ PatKind::Variant {
+ adt_def,
+ substs,
+ variant_index: adt_def.variant_index_with_id(variant_id),
+ subpatterns,
+ }
+ } else {
+ PatKind::Leaf { subpatterns }
+ }
+ }
+
+ Res::Def(
+ DefKind::Struct
+ | DefKind::Ctor(CtorOf::Struct, ..)
+ | DefKind::Union
+ | DefKind::TyAlias
+ | DefKind::AssocTy,
+ _,
+ )
+ | Res::SelfTy { .. }
+ | Res::SelfCtor(..) => PatKind::Leaf { subpatterns },
+ _ => {
+ let pattern_error = match res {
+ Res::Def(DefKind::ConstParam, _) => PatternError::ConstParamInPattern(span),
+ Res::Def(DefKind::Static(_), _) => PatternError::StaticInPattern(span),
+ _ => PatternError::NonConstPath(span),
+ };
+ self.errors.push(pattern_error);
+ PatKind::Wild
+ }
+ };
+
+ if let Some(user_ty) = self.user_substs_applied_to_ty_of_hir_id(hir_id) {
+ debug!("lower_variant_or_leaf: kind={:?} user_ty={:?} span={:?}", kind, user_ty, span);
+ let annotation = CanonicalUserTypeAnnotation {
+ user_ty,
+ span,
+ inferred_ty: self.typeck_results.node_type(hir_id),
+ };
+ kind = PatKind::AscribeUserType {
+ subpattern: Pat { span, ty, kind: Box::new(kind) },
+ ascription: Ascription { annotation, variance: ty::Variance::Covariant },
+ };
+ }
+
+ kind
+ }
+
+ /// Takes a HIR Path. If the path is a constant, evaluates it and feeds
+ /// it to `const_to_pat`. Any other path (like enum variants without fields)
+ /// is converted to the corresponding pattern via `lower_variant_or_leaf`.
+ #[instrument(skip(self), level = "debug")]
+ fn lower_path(&mut self, qpath: &hir::QPath<'_>, id: hir::HirId, span: Span) -> Pat<'tcx> {
+ let ty = self.typeck_results.node_type(id);
+ let res = self.typeck_results.qpath_res(qpath, id);
+
+ let pat_from_kind = |kind| Pat { span, ty, kind: Box::new(kind) };
+
+ let (def_id, is_associated_const) = match res {
+ Res::Def(DefKind::Const, def_id) => (def_id, false),
+ Res::Def(DefKind::AssocConst, def_id) => (def_id, true),
+
+ _ => return pat_from_kind(self.lower_variant_or_leaf(res, id, span, ty, vec![])),
+ };
+
+ // Use `Reveal::All` here because patterns are always monomorphic even if their function
+ // isn't.
+ let param_env_reveal_all = self.param_env.with_reveal_all_normalized(self.tcx);
+ let substs = self.typeck_results.node_substs(id);
+ let instance = match ty::Instance::resolve(self.tcx, param_env_reveal_all, def_id, substs) {
+ Ok(Some(i)) => i,
+ Ok(None) => {
+ // It should be assoc consts if there's no error but we cannot resolve it.
+ debug_assert!(is_associated_const);
+
+ self.errors.push(PatternError::AssocConstInPattern(span));
+
+ return pat_from_kind(PatKind::Wild);
+ }
+
+ Err(_) => {
+ self.tcx.sess.span_err(span, "could not evaluate constant pattern");
+ return pat_from_kind(PatKind::Wild);
+ }
+ };
+
+ // `mir_const_qualif` must be called with the `DefId` of the item where the const is
+ // defined, not where it is declared. The difference is significant for associated
+ // constants.
+ let mir_structural_match_violation = self.tcx.mir_const_qualif(instance.def_id()).custom_eq;
+ debug!("mir_structural_match_violation({:?}) -> {}", qpath, mir_structural_match_violation);
+
+ match self.tcx.const_eval_instance(param_env_reveal_all, instance, Some(span)) {
+ Ok(literal) => {
+ let const_ = mir::ConstantKind::Val(literal, ty);
+ let pattern = self.const_to_pat(const_, id, span, mir_structural_match_violation);
+
+ if !is_associated_const {
+ return pattern;
+ }
+
+ let user_provided_types = self.typeck_results().user_provided_types();
+ if let Some(&user_ty) = user_provided_types.get(id) {
+ let annotation = CanonicalUserTypeAnnotation {
+ user_ty,
+ span,
+ inferred_ty: self.typeck_results().node_type(id),
+ };
+ Pat {
+ span,
+ kind: Box::new(PatKind::AscribeUserType {
+ subpattern: pattern,
+ ascription: Ascription {
+ annotation,
+ /// Note that use `Contravariant` here. See the
+ /// `variance` field documentation for details.
+ variance: ty::Variance::Contravariant,
+ },
+ }),
+ ty: const_.ty(),
+ }
+ } else {
+ pattern
+ }
+ }
+ Err(ErrorHandled::TooGeneric) => {
+ // While `Reported | Linted` cases will have diagnostics emitted already
+ // it is not true for TooGeneric case, so we need to give user more information.
+ self.tcx.sess.span_err(span, "constant pattern depends on a generic parameter");
+ pat_from_kind(PatKind::Wild)
+ }
+ Err(_) => {
+ self.tcx.sess.span_err(span, "could not evaluate constant pattern");
+ pat_from_kind(PatKind::Wild)
+ }
+ }
+ }
+
+ /// Converts inline const patterns.
+ fn lower_inline_const(
+ &mut self,
+ anon_const: &'tcx hir::AnonConst,
+ id: hir::HirId,
+ span: Span,
+ ) -> PatKind<'tcx> {
+ let anon_const_def_id = self.tcx.hir().local_def_id(anon_const.hir_id);
+ let value = mir::ConstantKind::from_inline_const(self.tcx, anon_const_def_id);
+
+ // Evaluate early like we do in `lower_path`.
+ let value = value.eval(self.tcx, self.param_env);
+
+ match value {
+ mir::ConstantKind::Ty(c) => {
+ match c.kind() {
+ ConstKind::Param(_) => {
+ self.errors.push(PatternError::ConstParamInPattern(span));
+ return PatKind::Wild;
+ }
+ ConstKind::Unevaluated(_) => {
+ // If we land here it means the const can't be evaluated because it's `TooGeneric`.
+ self.tcx
+ .sess
+ .span_err(span, "constant pattern depends on a generic parameter");
+ return PatKind::Wild;
+ }
+ _ => bug!("Expected either ConstKind::Param or ConstKind::Unevaluated"),
+ }
+ }
+ mir::ConstantKind::Val(_, _) => *self.const_to_pat(value, id, span, false).kind,
+ }
+ }
+
+ /// Converts literals, paths and negation of literals to patterns.
+ /// The special case for negation exists to allow things like `-128_i8`
+ /// which would overflow if we tried to evaluate `128_i8` and then negate
+ /// afterwards.
+ fn lower_lit(&mut self, expr: &'tcx hir::Expr<'tcx>) -> PatKind<'tcx> {
+ let (lit, neg) = match expr.kind {
+ hir::ExprKind::Path(ref qpath) => {
+ return *self.lower_path(qpath, expr.hir_id, expr.span).kind;
+ }
+ hir::ExprKind::ConstBlock(ref anon_const) => {
+ return self.lower_inline_const(anon_const, expr.hir_id, expr.span);
+ }
+ hir::ExprKind::Lit(ref lit) => (lit, false),
+ hir::ExprKind::Unary(hir::UnOp::Neg, ref expr) => {
+ let hir::ExprKind::Lit(ref lit) = expr.kind else {
+ span_bug!(expr.span, "not a literal: {:?}", expr);
+ };
+ (lit, true)
+ }
+ _ => span_bug!(expr.span, "not a literal: {:?}", expr),
+ };
+
+ let lit_input =
+ LitToConstInput { lit: &lit.node, ty: self.typeck_results.expr_ty(expr), neg };
+ match self.tcx.at(expr.span).lit_to_mir_constant(lit_input) {
+ Ok(constant) => *self.const_to_pat(constant, expr.hir_id, lit.span, false).kind,
+ Err(LitToConstError::Reported) => PatKind::Wild,
+ Err(LitToConstError::TypeError) => bug!("lower_lit: had type error"),
+ }
+ }
+}
+
+impl<'tcx> UserAnnotatedTyHelpers<'tcx> for PatCtxt<'_, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn typeck_results(&self) -> &ty::TypeckResults<'tcx> {
+ self.typeck_results
+ }
+}
+
+pub(crate) trait PatternFoldable<'tcx>: Sized {
+ fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ self.super_fold_with(folder)
+ }
+
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self;
+}
+
+pub(crate) trait PatternFolder<'tcx>: Sized {
+ fn fold_pattern(&mut self, pattern: &Pat<'tcx>) -> Pat<'tcx> {
+ pattern.super_fold_with(self)
+ }
+
+ fn fold_pattern_kind(&mut self, kind: &PatKind<'tcx>) -> PatKind<'tcx> {
+ kind.super_fold_with(self)
+ }
+}
+
+impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Box<T> {
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ let content: T = (**self).fold_with(folder);
+ Box::new(content)
+ }
+}
+
+impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Vec<T> {
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ self.iter().map(|t| t.fold_with(folder)).collect()
+ }
+}
+
+impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Option<T> {
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ self.as_ref().map(|t| t.fold_with(folder))
+ }
+}
+
+macro_rules! ClonePatternFoldableImpls {
+ (<$lt_tcx:tt> $($ty:ty),+) => {
+ $(
+ impl<$lt_tcx> PatternFoldable<$lt_tcx> for $ty {
+ fn super_fold_with<F: PatternFolder<$lt_tcx>>(&self, _: &mut F) -> Self {
+ Clone::clone(self)
+ }
+ }
+ )+
+ }
+}
+
+ClonePatternFoldableImpls! { <'tcx>
+ Span, Field, Mutability, Symbol, LocalVarId, usize, ty::Const<'tcx>,
+ Region<'tcx>, Ty<'tcx>, BindingMode, AdtDef<'tcx>,
+ SubstsRef<'tcx>, &'tcx GenericArg<'tcx>, UserType<'tcx>,
+ UserTypeProjection, CanonicalUserTypeAnnotation<'tcx>
+}
+
+impl<'tcx> PatternFoldable<'tcx> for FieldPat<'tcx> {
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
+ }
+}
+
+impl<'tcx> PatternFoldable<'tcx> for Pat<'tcx> {
+ fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern(self)
+ }
+
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ Pat {
+ ty: self.ty.fold_with(folder),
+ span: self.span.fold_with(folder),
+ kind: self.kind.fold_with(folder),
+ }
+ }
+}
+
+impl<'tcx> PatternFoldable<'tcx> for PatKind<'tcx> {
+ fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ folder.fold_pattern_kind(self)
+ }
+
+ fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
+ match *self {
+ PatKind::Wild => PatKind::Wild,
+ PatKind::AscribeUserType {
+ ref subpattern,
+ ascription: Ascription { ref annotation, variance },
+ } => PatKind::AscribeUserType {
+ subpattern: subpattern.fold_with(folder),
+ ascription: Ascription { annotation: annotation.fold_with(folder), variance },
+ },
+ PatKind::Binding { mutability, name, mode, var, ty, ref subpattern, is_primary } => {
+ PatKind::Binding {
+ mutability: mutability.fold_with(folder),
+ name: name.fold_with(folder),
+ mode: mode.fold_with(folder),
+ var: var.fold_with(folder),
+ ty: ty.fold_with(folder),
+ subpattern: subpattern.fold_with(folder),
+ is_primary,
+ }
+ }
+ PatKind::Variant { adt_def, substs, variant_index, ref subpatterns } => {
+ PatKind::Variant {
+ adt_def: adt_def.fold_with(folder),
+ substs: substs.fold_with(folder),
+ variant_index,
+ subpatterns: subpatterns.fold_with(folder),
+ }
+ }
+ PatKind::Leaf { ref subpatterns } => {
+ PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
+ }
+ PatKind::Deref { ref subpattern } => {
+ PatKind::Deref { subpattern: subpattern.fold_with(folder) }
+ }
+ PatKind::Constant { value } => PatKind::Constant { value },
+ PatKind::Range(range) => PatKind::Range(range),
+ PatKind::Slice { ref prefix, ref slice, ref suffix } => PatKind::Slice {
+ prefix: prefix.fold_with(folder),
+ slice: slice.fold_with(folder),
+ suffix: suffix.fold_with(folder),
+ },
+ PatKind::Array { ref prefix, ref slice, ref suffix } => PatKind::Array {
+ prefix: prefix.fold_with(folder),
+ slice: slice.fold_with(folder),
+ suffix: suffix.fold_with(folder),
+ },
+ PatKind::Or { ref pats } => PatKind::Or { pats: pats.fold_with(folder) },
+ }
+ }
+}
+
+#[instrument(skip(tcx), level = "debug")]
+pub(crate) fn compare_const_vals<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ a: mir::ConstantKind<'tcx>,
+ b: mir::ConstantKind<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+) -> Option<Ordering> {
+ assert_eq!(a.ty(), b.ty());
+
+ let ty = a.ty();
+
+ // This code is hot when compiling matches with many ranges. So we
+ // special-case extraction of evaluated scalars for speed, for types where
+ // raw data comparisons are appropriate. E.g. `unicode-normalization` has
+ // many ranges such as '\u{037A}'..='\u{037F}', and chars can be compared
+ // in this way.
+ match ty.kind() {
+ ty::Float(_) | ty::Int(_) => {} // require special handling, see below
+ _ => match (a, b) {
+ (
+ mir::ConstantKind::Val(ConstValue::Scalar(Scalar::Int(a)), _a_ty),
+ mir::ConstantKind::Val(ConstValue::Scalar(Scalar::Int(b)), _b_ty),
+ ) => return Some(a.cmp(&b)),
+ _ => {}
+ },
+ }
+
+ let a = a.eval_bits(tcx, param_env, ty);
+ let b = b.eval_bits(tcx, param_env, ty);
+
+ use rustc_apfloat::Float;
+ match *ty.kind() {
+ ty::Float(ty::FloatTy::F32) => {
+ let a = rustc_apfloat::ieee::Single::from_bits(a);
+ let b = rustc_apfloat::ieee::Single::from_bits(b);
+ a.partial_cmp(&b)
+ }
+ ty::Float(ty::FloatTy::F64) => {
+ let a = rustc_apfloat::ieee::Double::from_bits(a);
+ let b = rustc_apfloat::ieee::Double::from_bits(b);
+ a.partial_cmp(&b)
+ }
+ ty::Int(ity) => {
+ use rustc_middle::ty::layout::IntegerExt;
+ let size = rustc_target::abi::Integer::from_int_ty(&tcx, ity).size();
+ let a = size.sign_extend(a);
+ let b = size.sign_extend(b);
+ Some((a as i128).cmp(&(b as i128)))
+ }
+ _ => Some(a.cmp(&b)),
+ }
+}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
new file mode 100644
index 000000000..0a660ef30
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
@@ -0,0 +1,978 @@
+//! Note: tests specific to this file can be found in:
+//!
+//! - `ui/pattern/usefulness`
+//! - `ui/or-patterns`
+//! - `ui/consts/const_in_pattern`
+//! - `ui/rfc-2008-non-exhaustive`
+//! - `ui/half-open-range-patterns`
+//! - probably many others
+//!
+//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific
+//! reason not to, for example if they depend on a particular feature like `or_patterns`.
+//!
+//! -----
+//!
+//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
+//! Specifically, given a list of patterns for a type, we can tell whether:
+//! (a) each pattern is reachable (reachability)
+//! (b) the patterns cover every possible value for the type (exhaustiveness)
+//!
+//! The algorithm implemented here is a modified version of the one described in [this
+//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
+//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
+//! without being as rigorous.
+//!
+//!
+//! # Summary
+//!
+//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
+//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
+//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
+//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
+//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
+//! file is to compute it efficiently.
+//!
+//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
+//! is useful w.r.t. the patterns above it:
+//! ```rust
+//! # fn foo(x: Option<i32>) {
+//! match x {
+//! Some(_) => {},
+//! None => {}, // reachable: `None` is matched by this but not the branch above
+//! Some(0) => {}, // unreachable: all the values this matches are already matched by
+//! // `Some(_)` above
+//! }
+//! # }
+//! ```
+//!
+//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
+//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
+//! are used to tell the user which values are missing.
+//! ```compile_fail,E0004
+//! # fn foo(x: Option<i32>) {
+//! match x {
+//! Some(0) => {},
+//! None => {},
+//! // not exhaustive: `_` is useful because it matches `Some(1)`
+//! }
+//! # }
+//! ```
+//!
+//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
+//! reachability for each match branch and exhaustiveness for the whole match.
+//!
+//!
+//! # Constructors and fields
+//!
+//! Note: we will often abbreviate "constructor" as "ctor".
+//!
+//! The idea that powers everything that is done in this file is the following: a (matchable)
+//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
+//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
+//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
+//! pattern-matching, and this is the basis for what follows.
+//!
+//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
+//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
+//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
+//! `enum`, with one variant for each number. This allows us to see any matchable value as made up
+//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
+//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
+//!
+//! This idea can be extended to patterns: they are also made from constructors applied to fields.
+//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
+//! call "value constructors"), but there are also pattern-only ctors. The most important one is
+//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
+//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
+//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
+//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
+//!
+//! From this deconstruction we can compute whether a given value matches a given pattern; we
+//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
+//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
+//! we compare their fields recursively. A few representative examples:
+//!
+//! - `matches!(v, _) := true`
+//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
+//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
+//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
+//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
+//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
+//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
+//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
+//!
+//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
+//!
+//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
+//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
+//! infinitude of constructors. There are also subtleties with visibility of fields and
+//! uninhabitedness and various other things. The constructors idea can be extended to handle most
+//! of these subtleties though; caveats are documented where relevant throughout the code.
+//!
+//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
+//!
+//!
+//! # Specialization
+//!
+//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
+//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
+//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
+//! enumerate all possible values. From the discussion above we see that we can proceed
+//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
+//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
+//! say from knowing only the first constructor of our candidate value.
+//!
+//! Let's take the following example:
+//! ```compile_fail,E0004
+//! # enum Enum { Variant1(()), Variant2(Option<bool>, u32)}
+//! # fn foo(x: Enum) {
+//! match x {
+//! Enum::Variant1(_) => {} // `p1`
+//! Enum::Variant2(None, 0) => {} // `p2`
+//! Enum::Variant2(Some(_), 0) => {} // `q`
+//! }
+//! # }
+//! ```
+//!
+//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
+//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
+//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
+//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
+//!
+//! ```compile_fail,E0004
+//! # fn foo(x: (Option<bool>, u32)) {
+//! match x {
+//! (None, 0) => {} // `p2'`
+//! (Some(_), 0) => {} // `q'`
+//! }
+//! # }
+//! ```
+//!
+//! This motivates a new step in computing usefulness, that we call _specialization_.
+//! Specialization consist of filtering a list of patterns for those that match a constructor, and
+//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
+//!
+//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
+//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
+//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
+//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
+//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
+//! happening:
+//! ```ignore (illustrative)
+//! [Enum::Variant1(_)]
+//! [Enum::Variant2(None, 0)]
+//! [Enum::Variant2(Some(_), 0)]
+//! //==>> specialize with `Variant2`
+//! [None, 0]
+//! [Some(_), 0]
+//! //==>> specialize with `Some`
+//! [_, 0]
+//! //==>> specialize with `true` (say the type was `bool`)
+//! [0]
+//! //==>> specialize with `0`
+//! []
+//! ```
+//!
+//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
+//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
+//! otherwise if returns the fields of the constructor. This only returns more than one
+//! pattern-stack if `p` has a pattern-only constructor.
+//!
+//! - Specializing for the wrong constructor returns nothing
+//!
+//! `specialize(None, Some(p0)) := []`
+//!
+//! - Specializing for the correct constructor returns a single row with the fields
+//!
+//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
+//!
+//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
+//!
+//! - For or-patterns, we specialize each branch and concatenate the results
+//!
+//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
+//!
+//! - We treat the other pattern constructors as if they were a large or-pattern of all the
+//! possibilities:
+//!
+//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
+//!
+//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
+//!
+//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
+//!
+//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
+//! the discussion about constructor splitting in [`super::deconstruct_pat`].
+//!
+//!
+//! We then extend this function to work with pattern-stacks as input, by acting on the first
+//! column and keeping the other columns untouched.
+//!
+//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
+//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
+//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
+//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
+//! [`Fields`] struct.
+//!
+//!
+//! # Computing usefulness
+//!
+//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
+//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
+//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
+//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
+//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
+//!
+//! - base case: `n_columns == 0`.
+//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
+//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
+//!
+//! - inductive case: `n_columns > 0`.
+//! We need a way to list the constructors we want to try. We will be more clever in the next
+//! section but for now assume we list all value constructors for the type of the first column.
+//!
+//! - for each such ctor `c`:
+//!
+//! - for each `q'` returned by `specialize(c, q)`:
+//!
+//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
+//!
+//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
+//!
+//! - We return the concatenation of all the witnesses found, if any.
+//!
+//! Example:
+//! ```ignore (illustrative)
+//! [Some(true)] // p_1
+//! [None] // p_2
+//! [Some(_)] // q
+//! //==>> try `None`: `specialize(None, q)` returns nothing
+//! //==>> try `Some`: `specialize(Some, q)` returns a single row
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `true`: `specialize(true, q')` returns a single row
+//! [] // p_1''
+//! [] // q''
+//! //==>> base case; `n != 0` so `q''` is not useful.
+//! //==>> go back up a step
+//! [true] // p_1'
+//! [_] // q'
+//! //==>> try `false`: `specialize(false, q')` returns a single row
+//! [] // q''
+//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
+//! witnesses:
+//! []
+//! //==>> undo the specialization with `false`
+//! witnesses:
+//! [false]
+//! //==>> undo the specialization with `Some`
+//! witnesses:
+//! [Some(false)]
+//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
+//! ```
+//!
+//! This computation is done in [`is_useful`]. In practice we don't care about the list of
+//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
+//! witnesses when computing exhaustiveness to report them to the user.
+//!
+//!
+//! # Making usefulness tractable: constructor splitting
+//!
+//! We're missing one last detail: which constructors do we list? Naively listing all value
+//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
+//! first obvious insight is that we only want to list constructors that are covered by the head
+//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
+//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
+//! group together constructors that behave the same.
+//!
+//! The details are not necessary to understand this file, so we explain them in
+//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
+
+use self::ArmType::*;
+use self::Usefulness::*;
+
+use super::check_match::{joined_uncovered_patterns, pattern_not_covered_label};
+use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
+
+use rustc_data_structures::captures::Captures;
+
+use rustc_arena::TypedArena;
+use rustc_data_structures::stack::ensure_sufficient_stack;
+use rustc_hir::def_id::DefId;
+use rustc_hir::HirId;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
+use rustc_span::{Span, DUMMY_SP};
+
+use smallvec::{smallvec, SmallVec};
+use std::fmt;
+use std::iter::once;
+
+pub(crate) struct MatchCheckCtxt<'p, 'tcx> {
+ pub(crate) tcx: TyCtxt<'tcx>,
+ /// The module in which the match occurs. This is necessary for
+ /// checking inhabited-ness of types because whether a type is (visibly)
+ /// inhabited can depend on whether it was defined in the current module or
+ /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty
+ /// outside its module and should not be matchable with an empty match statement.
+ pub(crate) module: DefId,
+ pub(crate) param_env: ty::ParamEnv<'tcx>,
+ pub(crate) pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
+}
+
+impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
+ pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
+ if self.tcx.features().exhaustive_patterns {
+ self.tcx.is_ty_uninhabited_from(self.module, ty, self.param_env)
+ } else {
+ false
+ }
+ }
+
+ /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
+ pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool {
+ match ty.kind() {
+ ty::Adt(def, ..) => {
+ def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local()
+ }
+ _ => false,
+ }
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(super) struct PatCtxt<'a, 'p, 'tcx> {
+ pub(super) cx: &'a MatchCheckCtxt<'p, 'tcx>,
+ /// Type of the current column under investigation.
+ pub(super) ty: Ty<'tcx>,
+ /// Span of the current pattern under investigation.
+ pub(super) span: Span,
+ /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
+ /// subpattern.
+ pub(super) is_top_level: bool,
+ /// Whether the current pattern is from a `non_exhaustive` enum.
+ pub(super) is_non_exhaustive: bool,
+}
+
+impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("PatCtxt").field("ty", &self.ty).finish()
+ }
+}
+
+/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
+/// works well.
+#[derive(Clone)]
+struct PatStack<'p, 'tcx> {
+ pats: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>,
+}
+
+impl<'p, 'tcx> PatStack<'p, 'tcx> {
+ fn from_pattern(pat: &'p DeconstructedPat<'p, 'tcx>) -> Self {
+ Self::from_vec(smallvec![pat])
+ }
+
+ fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>) -> Self {
+ PatStack { pats: vec }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.pats.is_empty()
+ }
+
+ fn len(&self) -> usize {
+ self.pats.len()
+ }
+
+ fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> {
+ self.pats[0]
+ }
+
+ fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> {
+ self.pats.iter().copied()
+ }
+
+ // Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
+ // or-pattern. Panics if `self` is empty.
+ fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = PatStack<'p, 'tcx>> + Captures<'a> {
+ self.head().iter_fields().map(move |pat| {
+ let mut new_patstack = PatStack::from_pattern(pat);
+ new_patstack.pats.extend_from_slice(&self.pats[1..]);
+ new_patstack
+ })
+ }
+
+ /// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
+ ///
+ /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
+ /// fields filled with wild patterns.
+ ///
+ /// This is roughly the inverse of `Constructor::apply`.
+ fn pop_head_constructor(
+ &self,
+ pcx: &PatCtxt<'_, 'p, 'tcx>,
+ ctor: &Constructor<'tcx>,
+ ) -> PatStack<'p, 'tcx> {
+ // We pop the head pattern and push the new fields extracted from the arguments of
+ // `self.head()`.
+ let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(pcx, ctor);
+ new_fields.extend_from_slice(&self.pats[1..]);
+ PatStack::from_vec(new_fields)
+ }
+}
+
+/// Pretty-printing for matrix row.
+impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "+")?;
+ for pat in self.iter() {
+ write!(f, " {:?} +", pat)?;
+ }
+ Ok(())
+ }
+}
+
+/// A 2D matrix.
+#[derive(Clone)]
+pub(super) struct Matrix<'p, 'tcx> {
+ patterns: Vec<PatStack<'p, 'tcx>>,
+}
+
+impl<'p, 'tcx> Matrix<'p, 'tcx> {
+ fn empty() -> Self {
+ Matrix { patterns: vec![] }
+ }
+
+ /// Number of columns of this matrix. `None` is the matrix is empty.
+ pub(super) fn column_count(&self) -> Option<usize> {
+ self.patterns.get(0).map(|r| r.len())
+ }
+
+ /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
+ /// expands it.
+ fn push(&mut self, row: PatStack<'p, 'tcx>) {
+ if !row.is_empty() && row.head().is_or_pat() {
+ self.patterns.extend(row.expand_or_pat());
+ } else {
+ self.patterns.push(row);
+ }
+ }
+
+ /// Iterate over the first component of each row
+ fn heads<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Clone + Captures<'a> {
+ self.patterns.iter().map(|r| r.head())
+ }
+
+ /// This computes `S(constructor, self)`. See top of the file for explanations.
+ fn specialize_constructor(
+ &self,
+ pcx: &PatCtxt<'_, 'p, 'tcx>,
+ ctor: &Constructor<'tcx>,
+ ) -> Matrix<'p, 'tcx> {
+ let mut matrix = Matrix::empty();
+ for row in &self.patterns {
+ if ctor.is_covered_by(pcx, row.head().ctor()) {
+ let new_row = row.pop_head_constructor(pcx, ctor);
+ matrix.push(new_row);
+ }
+ }
+ matrix
+ }
+}
+
+/// Pretty-printer for matrices of patterns, example:
+///
+/// ```text
+/// + _ + [] +
+/// + true + [First] +
+/// + true + [Second(true)] +
+/// + false + [_] +
+/// + _ + [_, _, tail @ ..] +
+/// ```
+impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "\n")?;
+
+ let Matrix { patterns: m, .. } = self;
+ let pretty_printed_matrix: Vec<Vec<String>> =
+ m.iter().map(|row| row.iter().map(|pat| format!("{:?}", pat)).collect()).collect();
+
+ let column_count = m.iter().map(|row| row.len()).next().unwrap_or(0);
+ assert!(m.iter().all(|row| row.len() == column_count));
+ let column_widths: Vec<usize> = (0..column_count)
+ .map(|col| pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0))
+ .collect();
+
+ for row in pretty_printed_matrix {
+ write!(f, "+")?;
+ for (column, pat_str) in row.into_iter().enumerate() {
+ write!(f, " ")?;
+ write!(f, "{:1$}", pat_str, column_widths[column])?;
+ write!(f, " +")?;
+ }
+ write!(f, "\n")?;
+ }
+ Ok(())
+ }
+}
+
+/// This carries the results of computing usefulness, as described at the top of the file. When
+/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
+/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
+/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
+/// witnesses of non-exhaustiveness when there are any.
+/// Which variant to use is dictated by `ArmType`.
+#[derive(Debug)]
+enum Usefulness<'p, 'tcx> {
+ /// If we don't care about witnesses, simply remember if the pattern was useful.
+ NoWitnesses { useful: bool },
+ /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
+ /// pattern is unreachable.
+ WithWitnesses(Vec<Witness<'p, 'tcx>>),
+}
+
+impl<'p, 'tcx> Usefulness<'p, 'tcx> {
+ fn new_useful(preference: ArmType) -> Self {
+ match preference {
+ // A single (empty) witness of reachability.
+ FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
+ RealArm => NoWitnesses { useful: true },
+ }
+ }
+
+ fn new_not_useful(preference: ArmType) -> Self {
+ match preference {
+ FakeExtraWildcard => WithWitnesses(vec![]),
+ RealArm => NoWitnesses { useful: false },
+ }
+ }
+
+ fn is_useful(&self) -> bool {
+ match self {
+ Usefulness::NoWitnesses { useful } => *useful,
+ Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
+ }
+ }
+
+ /// Combine usefulnesses from two branches. This is an associative operation.
+ fn extend(&mut self, other: Self) {
+ match (&mut *self, other) {
+ (WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
+ (WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
+ (WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
+ (NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
+ *s_useful = *s_useful || o_useful
+ }
+ _ => unreachable!(),
+ }
+ }
+
+ /// After calculating usefulness after a specialization, call this to reconstruct a usefulness
+ /// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
+ /// with the results of specializing with the other constructors.
+ fn apply_constructor(
+ self,
+ pcx: &PatCtxt<'_, 'p, 'tcx>,
+ matrix: &Matrix<'p, 'tcx>, // used to compute missing ctors
+ ctor: &Constructor<'tcx>,
+ ) -> Self {
+ match self {
+ NoWitnesses { .. } => self,
+ WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
+ WithWitnesses(witnesses) => {
+ let new_witnesses = if let Constructor::Missing { .. } = ctor {
+ // We got the special `Missing` constructor, so each of the missing constructors
+ // gives a new pattern that is not caught by the match. We list those patterns.
+ let new_patterns = if pcx.is_non_exhaustive {
+ // Here we don't want the user to try to list all variants, we want them to add
+ // a wildcard, so we only suggest that.
+ vec![DeconstructedPat::wildcard(pcx.ty)]
+ } else {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+
+ // This lets us know if we skipped any variants because they are marked
+ // `doc(hidden)` or they are unstable feature gate (only stdlib types).
+ let mut hide_variant_show_wild = false;
+ // Construct for each missing constructor a "wild" version of this
+ // constructor, that matches everything that can be built with
+ // it. For example, if `ctor` is a `Constructor::Variant` for
+ // `Option::Some`, we get the pattern `Some(_)`.
+ let mut new: Vec<DeconstructedPat<'_, '_>> = split_wildcard
+ .iter_missing(pcx)
+ .filter_map(|missing_ctor| {
+ // Check if this variant is marked `doc(hidden)`
+ if missing_ctor.is_doc_hidden_variant(pcx)
+ || missing_ctor.is_unstable_variant(pcx)
+ {
+ hide_variant_show_wild = true;
+ return None;
+ }
+ Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone()))
+ })
+ .collect();
+
+ if hide_variant_show_wild {
+ new.push(DeconstructedPat::wildcard(pcx.ty));
+ }
+
+ new
+ };
+
+ witnesses
+ .into_iter()
+ .flat_map(|witness| {
+ new_patterns.iter().map(move |pat| {
+ Witness(
+ witness
+ .0
+ .iter()
+ .chain(once(pat))
+ .map(DeconstructedPat::clone_and_forget_reachability)
+ .collect(),
+ )
+ })
+ })
+ .collect()
+ } else {
+ witnesses
+ .into_iter()
+ .map(|witness| witness.apply_constructor(pcx, &ctor))
+ .collect()
+ };
+ WithWitnesses(new_witnesses)
+ }
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum ArmType {
+ FakeExtraWildcard,
+ RealArm,
+}
+
+/// A witness of non-exhaustiveness for error reporting, represented
+/// as a list of patterns (in reverse order of construction) with
+/// wildcards inside to represent elements that can take any inhabitant
+/// of the type as a value.
+///
+/// A witness against a list of patterns should have the same types
+/// and length as the pattern matched against. Because Rust `match`
+/// is always against a single pattern, at the end the witness will
+/// have length 1, but in the middle of the algorithm, it can contain
+/// multiple patterns.
+///
+/// For example, if we are constructing a witness for the match against
+///
+/// ```compile_fail,E0004
+/// # #![feature(type_ascription)]
+/// struct Pair(Option<(u32, u32)>, bool);
+/// # fn foo(p: Pair) {
+/// match (p: Pair) {
+/// Pair(None, _) => {}
+/// Pair(_, false) => {}
+/// }
+/// # }
+/// ```
+///
+/// We'll perform the following steps:
+/// 1. Start with an empty witness
+/// `Witness(vec![])`
+/// 2. Push a witness `true` against the `false`
+/// `Witness(vec![true])`
+/// 3. Push a witness `Some(_)` against the `None`
+/// `Witness(vec![true, Some(_)])`
+/// 4. Apply the `Pair` constructor to the witnesses
+/// `Witness(vec![Pair(Some(_), true)])`
+///
+/// The final `Pair(Some(_), true)` is then the resulting witness.
+#[derive(Debug)]
+pub(crate) struct Witness<'p, 'tcx>(Vec<DeconstructedPat<'p, 'tcx>>);
+
+impl<'p, 'tcx> Witness<'p, 'tcx> {
+ /// Asserts that the witness contains a single pattern, and returns it.
+ fn single_pattern(self) -> DeconstructedPat<'p, 'tcx> {
+ assert_eq!(self.0.len(), 1);
+ self.0.into_iter().next().unwrap()
+ }
+
+ /// Constructs a partial witness for a pattern given a list of
+ /// patterns expanded by the specialization step.
+ ///
+ /// When a pattern P is discovered to be useful, this function is used bottom-up
+ /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
+ /// of values, V, where each value in that set is not covered by any previously
+ /// used patterns and is covered by the pattern P'. Examples:
+ ///
+ /// left_ty: tuple of 3 elements
+ /// pats: [10, 20, _] => (10, 20, _)
+ ///
+ /// left_ty: struct X { a: (bool, &'static str), b: usize}
+ /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
+ fn apply_constructor(mut self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Self {
+ let pat = {
+ let len = self.0.len();
+ let arity = ctor.arity(pcx);
+ let pats = self.0.drain((len - arity)..).rev();
+ let fields = Fields::from_iter(pcx.cx, pats);
+ DeconstructedPat::new(ctor.clone(), fields, pcx.ty, DUMMY_SP)
+ };
+
+ self.0.push(pat);
+
+ self
+ }
+}
+
+/// Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns`
+/// is not exhaustive enough.
+///
+/// NB: The partner lint for structs lives in `compiler/rustc_typeck/src/check/pat.rs`.
+fn lint_non_exhaustive_omitted_patterns<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ scrut_ty: Ty<'tcx>,
+ sp: Span,
+ hir_id: HirId,
+ witnesses: Vec<DeconstructedPat<'p, 'tcx>>,
+) {
+ let joined_patterns = joined_uncovered_patterns(cx, &witnesses);
+ cx.tcx.struct_span_lint_hir(NON_EXHAUSTIVE_OMITTED_PATTERNS, hir_id, sp, |build| {
+ let mut lint = build.build("some variants are not matched explicitly");
+ lint.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns));
+ lint.help(
+ "ensure that all variants are matched explicitly by adding the suggested match arms",
+ );
+ lint.note(&format!(
+ "the matched value is of type `{}` and the `non_exhaustive_omitted_patterns` attribute was found",
+ scrut_ty,
+ ));
+ lint.emit();
+ });
+}
+
+/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
+/// The algorithm from the paper has been modified to correctly handle empty
+/// types. The changes are:
+/// (0) We don't exit early if the pattern matrix has zero rows. We just
+/// continue to recurse over columns.
+/// (1) all_constructors will only return constructors that are statically
+/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
+///
+/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
+/// to a set of such vectors `m` - this is defined as there being a set of
+/// inputs that will match `v` but not any of the sets in `m`.
+///
+/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
+///
+/// This is used both for reachability checking (if a pattern isn't useful in
+/// relation to preceding patterns, it is not reachable) and exhaustiveness
+/// checking (if a wildcard pattern is useful in relation to a matrix, the
+/// matrix isn't exhaustive).
+///
+/// `is_under_guard` is used to inform if the pattern has a guard. If it
+/// has one it must not be inserted into the matrix. This shouldn't be
+/// relied on for soundness.
+#[instrument(level = "debug", skip(cx, matrix, hir_id))]
+fn is_useful<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ matrix: &Matrix<'p, 'tcx>,
+ v: &PatStack<'p, 'tcx>,
+ witness_preference: ArmType,
+ hir_id: HirId,
+ is_under_guard: bool,
+ is_top_level: bool,
+) -> Usefulness<'p, 'tcx> {
+ debug!(?matrix, ?v);
+ let Matrix { patterns: rows, .. } = matrix;
+
+ // The base case. We are pattern-matching on () and the return value is
+ // based on whether our matrix has a row or not.
+ // NOTE: This could potentially be optimized by checking rows.is_empty()
+ // first and then, if v is non-empty, the return value is based on whether
+ // the type of the tuple we're checking is inhabited or not.
+ if v.is_empty() {
+ let ret = if rows.is_empty() {
+ Usefulness::new_useful(witness_preference)
+ } else {
+ Usefulness::new_not_useful(witness_preference)
+ };
+ debug!(?ret);
+ return ret;
+ }
+
+ debug_assert!(rows.iter().all(|r| r.len() == v.len()));
+
+ // If the first pattern is an or-pattern, expand it.
+ let mut ret = Usefulness::new_not_useful(witness_preference);
+ if v.head().is_or_pat() {
+ debug!("expanding or-pattern");
+ // We try each or-pattern branch in turn.
+ let mut matrix = matrix.clone();
+ for v in v.expand_or_pat() {
+ debug!(?v);
+ let usefulness = ensure_sufficient_stack(|| {
+ is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false)
+ });
+ debug!(?usefulness);
+ ret.extend(usefulness);
+ // If pattern has a guard don't add it to the matrix.
+ if !is_under_guard {
+ // We push the already-seen patterns into the matrix in order to detect redundant
+ // branches like `Some(_) | Some(0)`.
+ matrix.push(v);
+ }
+ }
+ } else {
+ let ty = v.head().ty();
+ let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
+ debug!("v.head: {:?}, v.span: {:?}", v.head(), v.head().span());
+ let pcx = &PatCtxt { cx, ty, span: v.head().span(), is_top_level, is_non_exhaustive };
+
+ let v_ctor = v.head().ctor();
+ debug!(?v_ctor);
+ if let Constructor::IntRange(ctor_range) = &v_ctor {
+ // Lint on likely incorrect range patterns (#63987)
+ ctor_range.lint_overlapping_range_endpoints(
+ pcx,
+ matrix.heads(),
+ matrix.column_count().unwrap_or(0),
+ hir_id,
+ )
+ }
+ // We split the head constructor of `v`.
+ let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+ let is_non_exhaustive_and_wild = is_non_exhaustive && v_ctor.is_wildcard();
+ // For each constructor, we compute whether there's a value that starts with it that would
+ // witness the usefulness of `v`.
+ let start_matrix = &matrix;
+ for ctor in split_ctors {
+ debug!("specialize({:?})", ctor);
+ // We cache the result of `Fields::wildcards` because it is used a lot.
+ let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
+ let v = v.pop_head_constructor(pcx, &ctor);
+ let usefulness = ensure_sufficient_stack(|| {
+ is_useful(cx, &spec_matrix, &v, witness_preference, hir_id, is_under_guard, false)
+ });
+ let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
+
+ // When all the conditions are met we have a match with a `non_exhaustive` enum
+ // that has the potential to trigger the `non_exhaustive_omitted_patterns` lint.
+ // To understand the workings checkout `Constructor::split` and `SplitWildcard::new/into_ctors`
+ if is_non_exhaustive_and_wild
+ // We check that the match has a wildcard pattern and that that wildcard is useful,
+ // meaning there are variants that are covered by the wildcard. Without the check
+ // for `witness_preference` the lint would trigger on `if let NonExhaustiveEnum::A = foo {}`
+ && usefulness.is_useful() && matches!(witness_preference, RealArm)
+ && matches!(
+ &ctor,
+ Constructor::Missing { nonexhaustive_enum_missing_real_variants: true }
+ )
+ {
+ let patterns = {
+ let mut split_wildcard = SplitWildcard::new(pcx);
+ split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
+ // Construct for each missing constructor a "wild" version of this
+ // constructor, that matches everything that can be built with
+ // it. For example, if `ctor` is a `Constructor::Variant` for
+ // `Option::Some`, we get the pattern `Some(_)`.
+ split_wildcard
+ .iter_missing(pcx)
+ // Filter out the `NonExhaustive` because we want to list only real
+ // variants. Also remove any unstable feature gated variants.
+ // Because of how we computed `nonexhaustive_enum_missing_real_variants`,
+ // this will not return an empty `Vec`.
+ .filter(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx)))
+ .cloned()
+ .map(|missing_ctor| DeconstructedPat::wild_from_ctor(pcx, missing_ctor))
+ .collect::<Vec<_>>()
+ };
+
+ lint_non_exhaustive_omitted_patterns(pcx.cx, pcx.ty, pcx.span, hir_id, patterns);
+ }
+
+ ret.extend(usefulness);
+ }
+ }
+
+ if ret.is_useful() {
+ v.head().set_reachable();
+ }
+
+ debug!(?ret);
+ ret
+}
+
+/// The arm of a match expression.
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct MatchArm<'p, 'tcx> {
+ /// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`.
+ pub(crate) pat: &'p DeconstructedPat<'p, 'tcx>,
+ pub(crate) hir_id: HirId,
+ pub(crate) has_guard: bool,
+}
+
+/// Indicates whether or not a given arm is reachable.
+#[derive(Clone, Debug)]
+pub(crate) enum Reachability {
+ /// The arm is reachable. This additionally carries a set of or-pattern branches that have been
+ /// found to be unreachable despite the overall arm being reachable. Used only in the presence
+ /// of or-patterns, otherwise it stays empty.
+ Reachable(Vec<Span>),
+ /// The arm is unreachable.
+ Unreachable,
+}
+
+/// The output of checking a match for exhaustiveness and arm reachability.
+pub(crate) struct UsefulnessReport<'p, 'tcx> {
+ /// For each arm of the input, whether that arm is reachable after the arms above it.
+ pub(crate) arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Reachability)>,
+ /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
+ /// exhaustiveness.
+ pub(crate) non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p, 'tcx>>,
+}
+
+/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
+/// of its arms are reachable.
+///
+/// Note: the input patterns must have been lowered through
+/// `check_match::MatchVisitor::lower_pattern`.
+#[instrument(skip(cx, arms), level = "debug")]
+pub(crate) fn compute_match_usefulness<'p, 'tcx>(
+ cx: &MatchCheckCtxt<'p, 'tcx>,
+ arms: &[MatchArm<'p, 'tcx>],
+ scrut_hir_id: HirId,
+ scrut_ty: Ty<'tcx>,
+) -> UsefulnessReport<'p, 'tcx> {
+ let mut matrix = Matrix::empty();
+ let arm_usefulness: Vec<_> = arms
+ .iter()
+ .copied()
+ .map(|arm| {
+ debug!(?arm);
+ let v = PatStack::from_pattern(arm.pat);
+ is_useful(cx, &matrix, &v, RealArm, arm.hir_id, arm.has_guard, true);
+ if !arm.has_guard {
+ matrix.push(v);
+ }
+ let reachability = if arm.pat.is_reachable() {
+ Reachability::Reachable(arm.pat.unreachable_spans())
+ } else {
+ Reachability::Unreachable
+ };
+ (arm, reachability)
+ })
+ .collect();
+
+ let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty));
+ let v = PatStack::from_pattern(wild_pattern);
+ let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, scrut_hir_id, false, true);
+ let non_exhaustiveness_witnesses = match usefulness {
+ WithWitnesses(pats) => pats.into_iter().map(|w| w.single_pattern()).collect(),
+ NoWitnesses { .. } => bug!(),
+ };
+ UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses }
+}
diff --git a/compiler/rustc_mir_build/src/thir/util.rs b/compiler/rustc_mir_build/src/thir/util.rs
new file mode 100644
index 000000000..c58ed1ac0
--- /dev/null
+++ b/compiler/rustc_mir_build/src/thir/util.rs
@@ -0,0 +1,31 @@
+use rustc_hir as hir;
+use rustc_middle::ty::{self, CanonicalUserType, TyCtxt, UserType};
+
+pub(crate) trait UserAnnotatedTyHelpers<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx>;
+
+ fn typeck_results(&self) -> &ty::TypeckResults<'tcx>;
+
+ /// Looks up the type associated with this hir-id and applies the
+ /// user-given substitutions; the hir-id must map to a suitable
+ /// type.
+ fn user_substs_applied_to_ty_of_hir_id(
+ &self,
+ hir_id: hir::HirId,
+ ) -> Option<CanonicalUserType<'tcx>> {
+ let user_provided_types = self.typeck_results().user_provided_types();
+ let mut user_ty = *user_provided_types.get(hir_id)?;
+ debug!("user_subts_applied_to_ty_of_hir_id: user_ty={:?}", user_ty);
+ let ty = self.typeck_results().node_type(hir_id);
+ match ty.kind() {
+ ty::Adt(adt_def, ..) => {
+ if let UserType::TypeOf(ref mut did, _) = &mut user_ty.value {
+ *did = adt_def.did();
+ }
+ Some(user_ty)
+ }
+ ty::FnDef(..) => Some(user_ty),
+ _ => bug!("ty: {:?} should not have user provided type {:?} recorded ", ty, user_ty),
+ }
+ }
+}