summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_mir_build
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_build')
-rw-r--r--compiler/rustc_mir_build/Cargo.toml2
-rw-r--r--compiler/rustc_mir_build/messages.ftl56
-rw-r--r--compiler/rustc_mir_build/src/build/block.rs158
-rw-r--r--compiler/rustc_mir_build/src/build/cfg.rs13
-rw-r--r--compiler/rustc_mir_build/src/build/custom/mod.rs17
-rw-r--r--compiler/rustc_mir_build/src/build/custom/parse.rs48
-rw-r--r--compiler/rustc_mir_build/src/build/custom/parse/instruction.rs83
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_constant.rs6
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_place.rs4
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_rvalue.rs11
-rw-r--r--compiler/rustc_mir_build/src/build/matches/mod.rs114
-rw-r--r--compiler/rustc_mir_build/src/build/matches/simplify.rs8
-rw-r--r--compiler/rustc_mir_build/src/build/matches/test.rs53
-rw-r--r--compiler/rustc_mir_build/src/build/mod.rs36
-rw-r--r--compiler/rustc_mir_build/src/build/scope.rs48
-rw-r--r--compiler/rustc_mir_build/src/check_unsafety.rs95
-rw-r--r--compiler/rustc_mir_build/src/errors.rs130
-rw-r--r--compiler/rustc_mir_build/src/lib.rs6
-rw-r--r--compiler/rustc_mir_build/src/lints.rs2
-rw-r--r--compiler/rustc_mir_build/src/thir/constant.rs6
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/block.rs10
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/expr.rs135
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/mod.rs38
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/check_match.rs235
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs25
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs1912
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/mod.rs179
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/usefulness.rs1205
-rw-r--r--compiler/rustc_mir_build/src/thir/print.rs36
29 files changed, 764 insertions, 3907 deletions
diff --git a/compiler/rustc_mir_build/Cargo.toml b/compiler/rustc_mir_build/Cargo.toml
index 6dceacd75..6d681dc29 100644
--- a/compiler/rustc_mir_build/Cargo.toml
+++ b/compiler/rustc_mir_build/Cargo.toml
@@ -17,7 +17,7 @@ rustc_index = { path = "../rustc_index" }
rustc_infer = { path = "../rustc_infer" }
rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }
-rustc_serialize = { path = "../rustc_serialize" }
+rustc_pattern_analysis = { path = "../rustc_pattern_analysis" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl
index 7dd0e7d4b..615b55343 100644
--- a/compiler/rustc_mir_build/messages.ftl
+++ b/compiler/rustc_mir_build/messages.ftl
@@ -30,12 +30,32 @@ mir_build_borrow_of_moved_value = borrow of moved value
mir_build_call_to_fn_with_requires_unsafe =
call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe block
- .note = can only be called if the required target features are available
+ .help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
+ [1] feature
+ *[count] features
+ }: {$missing_target_features}
+ .note = the {$build_target_features} target {$build_target_features_count ->
+ [1] feature
+ *[count] features
+ } being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
+ [1] it
+ *[count] them
+ } in `#[target_feature]`
.label = call to function with `#[target_feature]`
mir_build_call_to_fn_with_requires_unsafe_unsafe_op_in_unsafe_fn_allowed =
call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe function or block
- .note = can only be called if the required target features are available
+ .help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
+ [1] feature
+ *[count] features
+ }: {$missing_target_features}
+ .note = the {$build_target_features} target {$build_target_features_count ->
+ [1] feature
+ *[count] features
+ } being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
+ [1] it
+ *[count] them
+ } in `#[target_feature]`
.label = call to function with `#[target_feature]`
mir_build_call_to_unsafe_fn_requires_unsafe =
@@ -217,15 +237,6 @@ mir_build_non_const_path = runtime values cannot be referenced in patterns
mir_build_non_exhaustive_match_all_arms_guarded =
match arms with guards don't count towards exhaustivity
-mir_build_non_exhaustive_omitted_pattern = some variants are not matched explicitly
- .help = ensure that all variants are matched explicitly by adding the suggested match arms
- .note = the matched value is of type `{$scrut_ty}` and the `non_exhaustive_omitted_patterns` attribute was found
-
-mir_build_non_exhaustive_omitted_pattern_lint_on_arm = the lint level must be set on the whole match
- .help = it no longer has any effect to set the lint level on an individual match arm
- .label = remove this attribute
- .suggestion = set the lint level on the whole match
-
mir_build_non_exhaustive_patterns_type_not_empty = non-exhaustive patterns: type `{$ty}` is non-empty
.def_note = `{$peeled_ty}` defined here
.type_note = the matched value is of type `{$ty}`
@@ -240,10 +251,6 @@ mir_build_non_partial_eq_match =
mir_build_nontrivial_structural_match =
to use a constant of type `{$non_sm_ty}` in a pattern, the constant's initializer must be trivial or `{$non_sm_ty}` must be annotated with `#[derive(PartialEq, Eq)]`
-mir_build_overlapping_range_endpoints = multiple patterns overlap on their endpoints
- .range = ... with this range
- .note = you likely meant to write mutually exclusive ranges
-
mir_build_pattern_not_covered = refutable pattern in {$origin}
.pattern_ty = the matched value is of type `{$pattern_ty}`
@@ -297,13 +304,6 @@ mir_build_unconditional_recursion = function cannot return without recursing
mir_build_unconditional_recursion_call_site_label = recursive call site
-mir_build_uncovered = {$count ->
- [1] pattern `{$witness_1}`
- [2] patterns `{$witness_1}` and `{$witness_2}`
- [3] patterns `{$witness_1}`, `{$witness_2}` and `{$witness_3}`
- *[other] patterns `{$witness_1}`, `{$witness_2}`, `{$witness_3}` and {$remainder} more
- } not covered
-
mir_build_union_field_requires_unsafe =
access to union field is unsafe and requires unsafe block
.note = the field may not be properly initialized: using uninitialized data will cause undefined behavior
@@ -330,7 +330,17 @@ mir_build_unsafe_op_in_unsafe_fn_borrow_of_layout_constrained_field_requires_uns
mir_build_unsafe_op_in_unsafe_fn_call_to_fn_with_requires_unsafe =
call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe block (error E0133)
- .note = can only be called if the required target features are available
+ .help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
+ [1] feature
+ *[count] features
+ }: {$missing_target_features}
+ .note = the {$build_target_features} target {$build_target_features_count ->
+ [1] feature
+ *[count] features
+ } being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
+ [1] it
+ *[count] them
+ } in `#[target_feature]`
.label = call to function with `#[target_feature]`
mir_build_unsafe_op_in_unsafe_fn_call_to_unsafe_fn_requires_unsafe =
diff --git a/compiler/rustc_mir_build/src/build/block.rs b/compiler/rustc_mir_build/src/build/block.rs
index ab4cd2488..8cad6976c 100644
--- a/compiler/rustc_mir_build/src/build/block.rs
+++ b/compiler/rustc_mir_build/src/build/block.rs
@@ -13,42 +13,33 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ast_block: BlockId,
source_info: SourceInfo,
) -> BlockAnd<()> {
- let Block {
- region_scope,
- opt_destruction_scope,
- span,
- ref stmts,
- expr,
- targeted_by_break,
- safety_mode,
- } = self.thir[ast_block];
+ let Block { region_scope, span, ref stmts, expr, targeted_by_break, safety_mode } =
+ self.thir[ast_block];
let expr = expr.map(|expr| &self.thir[expr]);
- self.in_opt_scope(opt_destruction_scope.map(|de| (de, source_info)), move |this| {
- this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
- if targeted_by_break {
- this.in_breakable_scope(None, destination, span, |this| {
- Some(this.ast_block_stmts(
- destination,
- block,
- span,
- &stmts,
- expr,
- safety_mode,
- region_scope,
- ))
- })
- } else {
- this.ast_block_stmts(
+ self.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
+ if targeted_by_break {
+ this.in_breakable_scope(None, destination, span, |this| {
+ Some(this.ast_block_stmts(
destination,
block,
span,
- &stmts,
+ stmts,
expr,
safety_mode,
region_scope,
- )
- }
- })
+ ))
+ })
+ } else {
+ this.ast_block_stmts(
+ destination,
+ block,
+ span,
+ stmts,
+ expr,
+ safety_mode,
+ region_scope,
+ )
+ }
})
}
@@ -92,20 +83,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let source_info = this.source_info(span);
for stmt in stmts {
- let Stmt { ref kind, opt_destruction_scope } = this.thir[*stmt];
+ let Stmt { ref kind } = this.thir[*stmt];
match kind {
StmtKind::Expr { scope, expr } => {
this.block_context.push(BlockFrame::Statement { ignores_expr_result: true });
+ let si = (*scope, source_info);
unpack!(
- block = this.in_opt_scope(
- opt_destruction_scope.map(|de| (de, source_info)),
- |this| {
- let si = (*scope, source_info);
- this.in_scope(si, LintLevel::Inherited, |this| {
- this.stmt_expr(block, &this.thir[*expr], Some(*scope))
- })
- }
- )
+ block = this.in_scope(si, LintLevel::Inherited, |this| {
+ this.stmt_expr(block, &this.thir[*expr], Some(*scope))
+ })
);
}
StmtKind::Let {
@@ -221,43 +207,38 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let init = &this.thir[*initializer];
let initializer_span = init.span;
+ let scope = (*init_scope, source_info);
let failure = unpack!(
- block = this.in_opt_scope(
- opt_destruction_scope.map(|de| (de, source_info)),
- |this| {
- let scope = (*init_scope, source_info);
- this.in_scope(scope, *lint_level, |this| {
- this.declare_bindings(
- visibility_scope,
- remainder_span,
- pattern,
- None,
- Some((Some(&destination), initializer_span)),
- );
- this.visit_primary_bindings(
- pattern,
- UserTypeProjections::none(),
- &mut |this, _, _, _, node, span, _, _| {
- this.storage_live_binding(
- block,
- node,
- span,
- OutsideGuard,
- true,
- );
- },
- );
- this.ast_let_else(
+ block = this.in_scope(scope, *lint_level, |this| {
+ this.declare_bindings(
+ visibility_scope,
+ remainder_span,
+ pattern,
+ None,
+ Some((Some(&destination), initializer_span)),
+ );
+ this.visit_primary_bindings(
+ pattern,
+ UserTypeProjections::none(),
+ &mut |this, _, _, _, node, span, _, _| {
+ this.storage_live_binding(
block,
- init,
- initializer_span,
- *else_block,
- &last_remainder_scope,
- pattern,
- )
- })
- }
- )
+ node,
+ span,
+ OutsideGuard,
+ true,
+ );
+ },
+ );
+ this.ast_let_else(
+ block,
+ init,
+ initializer_span,
+ *else_block,
+ &last_remainder_scope,
+ pattern,
+ )
+ })
);
this.cfg.goto(failure, source_info, failure_entry);
@@ -298,25 +279,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
if let Some(init) = initializer {
let init = &this.thir[*init];
let initializer_span = init.span;
+ let scope = (*init_scope, source_info);
unpack!(
- block = this.in_opt_scope(
- opt_destruction_scope.map(|de| (de, source_info)),
- |this| {
- let scope = (*init_scope, source_info);
- this.in_scope(scope, *lint_level, |this| {
- this.declare_bindings(
- visibility_scope,
- remainder_span,
- pattern,
- None,
- Some((None, initializer_span)),
- );
- this.expr_into_pattern(block, &pattern, init)
- // irrefutable pattern
- })
- },
- )
+ block = this.in_scope(scope, *lint_level, |this| {
+ this.declare_bindings(
+ visibility_scope,
+ remainder_span,
+ pattern,
+ None,
+ Some((None, initializer_span)),
+ );
+ this.expr_into_pattern(block, &pattern, init)
+ // irrefutable pattern
+ })
)
} else {
let scope = (*init_scope, source_info);
diff --git a/compiler/rustc_mir_build/src/build/cfg.rs b/compiler/rustc_mir_build/src/build/cfg.rs
index fddcf9de7..2bd0e2897 100644
--- a/compiler/rustc_mir_build/src/build/cfg.rs
+++ b/compiler/rustc_mir_build/src/build/cfg.rs
@@ -101,6 +101,19 @@ impl<'tcx> CFG<'tcx> {
self.push(block, stmt);
}
+ /// Adds a dummy statement whose only role is to associate a span with its
+ /// enclosing block for the purposes of coverage instrumentation.
+ ///
+ /// This results in more accurate coverage reports for certain kinds of
+ /// syntax (e.g. `continue` or `if !`) that would otherwise not appear in MIR.
+ pub(crate) fn push_coverage_span_marker(&mut self, block: BasicBlock, source_info: SourceInfo) {
+ let kind = StatementKind::Coverage(Box::new(Coverage {
+ kind: coverage::CoverageKind::SpanMarker,
+ }));
+ let stmt = Statement { source_info, kind };
+ self.push(block, stmt);
+ }
+
pub(crate) fn terminate(
&mut self,
block: BasicBlock,
diff --git a/compiler/rustc_mir_build/src/build/custom/mod.rs b/compiler/rustc_mir_build/src/build/custom/mod.rs
index 3de2f45ad..ead20539e 100644
--- a/compiler/rustc_mir_build/src/build/custom/mod.rs
+++ b/compiler/rustc_mir_build/src/build/custom/mod.rs
@@ -88,11 +88,11 @@ pub(super) fn build_custom_mir<'tcx>(
};
let res: PResult<_> = try {
- pctxt.parse_args(&params)?;
+ pctxt.parse_args(params)?;
pctxt.parse_body(expr)?;
};
if let Err(err) = res {
- tcx.sess.diagnostic().span_fatal(
+ tcx.sess.dcx().span_fatal(
err.span,
format!("Could not parse {}, found: {:?}", err.expected, err.item_description),
)
@@ -162,6 +162,19 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
expected: expected.to_string(),
}
}
+
+ fn stmt_error(&self, stmt: StmtId, expected: &'static str) -> ParseError {
+ let stmt = &self.thir[stmt];
+ let span = match stmt.kind {
+ StmtKind::Expr { expr, .. } => self.thir[expr].span,
+ StmtKind::Let { span, .. } => span,
+ };
+ ParseError {
+ span,
+ item_description: format!("{:?}", stmt.kind),
+ expected: expected.to_string(),
+ }
+ }
}
type PResult<T> = Result<T, ParseError>;
diff --git a/compiler/rustc_mir_build/src/build/custom/parse.rs b/compiler/rustc_mir_build/src/build/custom/parse.rs
index e2ab2cb90..a6f9caada 100644
--- a/compiler/rustc_mir_build/src/build/custom/parse.rs
+++ b/compiler/rustc_mir_build/src/build/custom/parse.rs
@@ -27,10 +27,13 @@ macro_rules! parse_by_kind {
$expr_name:pat,
$expected:literal,
$(
- @call($name:literal, $args:ident) => $call_expr:expr,
+ @call($name:ident, $args:ident) => $call_expr:expr,
)*
$(
- $pat:pat => $expr:expr,
+ @variant($adt:ident, $variant:ident) => $variant_expr:expr,
+ )*
+ $(
+ $pat:pat $(if $guard:expr)? => $expr:expr,
)*
) => {{
let expr_id = $self.preparse($expr_id);
@@ -42,14 +45,20 @@ macro_rules! parse_by_kind {
ExprKind::Call { ty, fun: _, args: $args, .. } if {
match ty.kind() {
ty::FnDef(did, _) => {
- $self.tcx.is_diagnostic_item(rustc_span::Symbol::intern($name), *did)
+ $self.tcx.is_diagnostic_item(rustc_span::sym::$name, *did)
}
_ => false,
}
} => $call_expr,
)*
$(
- $pat => $expr,
+ ExprKind::Adt(box AdtExpr { adt_def, variant_index, .. }) if {
+ $self.tcx.is_diagnostic_item(rustc_span::sym::$adt, adt_def.did()) &&
+ adt_def.variants()[*variant_index].name == rustc_span::sym::$variant
+ } => $variant_expr,
+ )*
+ $(
+ $pat $(if $guard)? => $expr,
)*
#[allow(unreachable_patterns)]
_ => return Err($self.expr_error(expr_id, $expected))
@@ -172,7 +181,8 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
ExprKind::Block { block } => &self.thir[*block].stmts,
);
for (i, block_def) in block_defs.iter().enumerate() {
- let block = self.parse_block_def(self.statement_as_expr(*block_def)?)?;
+ let is_cleanup = self.body.basic_blocks_mut()[BasicBlock::from_usize(i)].is_cleanup;
+ let block = self.parse_block_def(self.statement_as_expr(*block_def)?, is_cleanup)?;
self.body.basic_blocks_mut()[BasicBlock::from_usize(i)] = block;
}
@@ -181,15 +191,28 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
fn parse_block_decls(&mut self, stmts: impl Iterator<Item = StmtId>) -> PResult<()> {
for stmt in stmts {
- let (var, _, _) = self.parse_let_statement(stmt)?;
- let data = BasicBlockData::new(None);
- let block = self.body.basic_blocks_mut().push(data);
- self.block_map.insert(var, block);
+ self.parse_basic_block_decl(stmt)?;
}
-
Ok(())
}
+ fn parse_basic_block_decl(&mut self, stmt: StmtId) -> PResult<()> {
+ match &self.thir[stmt].kind {
+ StmtKind::Let { pattern, initializer: Some(initializer), .. } => {
+ let (var, ..) = self.parse_var(pattern)?;
+ let mut data = BasicBlockData::new(None);
+ data.is_cleanup = parse_by_kind!(self, *initializer, _, "basic block declaration",
+ @variant(mir_basic_block, Normal) => false,
+ @variant(mir_basic_block, Cleanup) => true,
+ );
+ let block = self.body.basic_blocks_mut().push(data);
+ self.block_map.insert(var, block);
+ Ok(())
+ }
+ _ => Err(self.stmt_error(stmt, "let statement with an initializer")),
+ }
+ }
+
fn parse_local_decls(&mut self, mut stmts: impl Iterator<Item = StmtId>) -> PResult<()> {
let (ret_var, ..) = self.parse_let_statement(stmts.next().unwrap())?;
self.local_map.insert(ret_var, Local::from_u32(0));
@@ -219,7 +242,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
};
let span = self.thir[expr].span;
let (name, operand) = parse_by_kind!(self, expr, _, "debuginfo",
- @call("mir_debuginfo", args) => {
+ @call(mir_debuginfo, args) => {
(args[0], args[1])
},
);
@@ -281,12 +304,13 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
}
}
- fn parse_block_def(&self, expr_id: ExprId) -> PResult<BasicBlockData<'tcx>> {
+ fn parse_block_def(&self, expr_id: ExprId, is_cleanup: bool) -> PResult<BasicBlockData<'tcx>> {
let block = parse_by_kind!(self, expr_id, _, "basic block",
ExprKind::Block { block } => &self.thir[*block],
);
let mut data = BasicBlockData::new(None);
+ data.is_cleanup = is_cleanup;
for stmt_id in &*block.stmts {
let stmt = self.statement_as_expr(*stmt_id)?;
let span = self.thir[stmt].span;
diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
index fd2c57a0a..4ce7f831c 100644
--- a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
+++ b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
@@ -13,19 +13,19 @@ use super::{parse_by_kind, PResult, ParseCtxt};
impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
pub fn parse_statement(&self, expr_id: ExprId) -> PResult<StatementKind<'tcx>> {
parse_by_kind!(self, expr_id, _, "statement",
- @call("mir_storage_live", args) => {
+ @call(mir_storage_live, args) => {
Ok(StatementKind::StorageLive(self.parse_local(args[0])?))
},
- @call("mir_storage_dead", args) => {
+ @call(mir_storage_dead, args) => {
Ok(StatementKind::StorageDead(self.parse_local(args[0])?))
},
- @call("mir_deinit", args) => {
+ @call(mir_deinit, args) => {
Ok(StatementKind::Deinit(Box::new(self.parse_place(args[0])?)))
},
- @call("mir_retag", args) => {
+ @call(mir_retag, args) => {
Ok(StatementKind::Retag(RetagKind::Default, Box::new(self.parse_place(args[0])?)))
},
- @call("mir_set_discriminant", args) => {
+ @call(mir_set_discriminant, args) => {
let place = self.parse_place(args[0])?;
let var = self.parse_integer_literal(args[1])? as u32;
Ok(StatementKind::SetDiscriminant {
@@ -43,24 +43,30 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
pub fn parse_terminator(&self, expr_id: ExprId) -> PResult<TerminatorKind<'tcx>> {
parse_by_kind!(self, expr_id, expr, "terminator",
- @call("mir_return", _args) => {
+ @call(mir_return, _args) => {
Ok(TerminatorKind::Return)
},
- @call("mir_goto", args) => {
+ @call(mir_goto, args) => {
Ok(TerminatorKind::Goto { target: self.parse_block(args[0])? } )
},
- @call("mir_unreachable", _args) => {
+ @call(mir_unreachable, _args) => {
Ok(TerminatorKind::Unreachable)
},
- @call("mir_drop", args) => {
+ @call(mir_unwind_resume, _args) => {
+ Ok(TerminatorKind::UnwindResume)
+ },
+ @call(mir_unwind_terminate, args) => {
+ Ok(TerminatorKind::UnwindTerminate(self.parse_unwind_terminate_reason(args[0])?))
+ },
+ @call(mir_drop, args) => {
Ok(TerminatorKind::Drop {
place: self.parse_place(args[0])?,
target: self.parse_block(args[1])?,
- unwind: UnwindAction::Continue,
+ unwind: self.parse_unwind_action(args[2])?,
replace: false,
})
},
- @call("mir_call", args) => {
+ @call(mir_call, args) => {
self.parse_call(args)
},
ExprKind::Match { scrutinee, arms, .. } => {
@@ -70,6 +76,34 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
)
}
+ fn parse_unwind_terminate_reason(&self, expr_id: ExprId) -> PResult<UnwindTerminateReason> {
+ parse_by_kind!(self, expr_id, _, "unwind terminate reason",
+ @variant(mir_unwind_terminate_reason, Abi) => {
+ Ok(UnwindTerminateReason::Abi)
+ },
+ @variant(mir_unwind_terminate_reason, InCleanup) => {
+ Ok(UnwindTerminateReason::InCleanup)
+ },
+ )
+ }
+
+ fn parse_unwind_action(&self, expr_id: ExprId) -> PResult<UnwindAction> {
+ parse_by_kind!(self, expr_id, _, "unwind action",
+ @call(mir_unwind_continue, _args) => {
+ Ok(UnwindAction::Continue)
+ },
+ @call(mir_unwind_unreachable, _args) => {
+ Ok(UnwindAction::Unreachable)
+ },
+ @call(mir_unwind_terminate, args) => {
+ Ok(UnwindAction::Terminate(self.parse_unwind_terminate_reason(args[0])?))
+ },
+ @call(mir_unwind_cleanup, args) => {
+ Ok(UnwindAction::Cleanup(self.parse_block(args[0])?))
+ },
+ )
+ }
+
fn parse_match(&self, arms: &[ArmId], span: Span) -> PResult<SwitchTargets> {
let Some((otherwise, rest)) = arms.split_last() else {
return Err(ParseError {
@@ -113,6 +147,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
);
let destination = self.parse_place(destination)?;
let target = self.parse_block(args[1])?;
+ let unwind = self.parse_unwind_action(args[2])?;
parse_by_kind!(self, call, _, "function call",
ExprKind::Call { fun, args, from_hir_call, fn_span, .. } => {
@@ -126,7 +161,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
args,
destination,
target: Some(target),
- unwind: UnwindAction::Continue,
+ unwind,
call_source: if *from_hir_call { CallSource::Normal } else {
CallSource::OverloadedOperator
},
@@ -138,25 +173,25 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
fn parse_rvalue(&self, expr_id: ExprId) -> PResult<Rvalue<'tcx>> {
parse_by_kind!(self, expr_id, expr, "rvalue",
- @call("mir_discriminant", args) => self.parse_place(args[0]).map(Rvalue::Discriminant),
- @call("mir_cast_transmute", args) => {
+ @call(mir_discriminant, args) => self.parse_place(args[0]).map(Rvalue::Discriminant),
+ @call(mir_cast_transmute, args) => {
let source = self.parse_operand(args[0])?;
Ok(Rvalue::Cast(CastKind::Transmute, source, expr.ty))
},
- @call("mir_checked", args) => {
+ @call(mir_checked, args) => {
parse_by_kind!(self, args[0], _, "binary op",
ExprKind::Binary { op, lhs, rhs } => Ok(Rvalue::CheckedBinaryOp(
*op, Box::new((self.parse_operand(*lhs)?, self.parse_operand(*rhs)?))
)),
)
},
- @call("mir_offset", args) => {
+ @call(mir_offset, args) => {
let ptr = self.parse_operand(args[0])?;
let offset = self.parse_operand(args[1])?;
Ok(Rvalue::BinaryOp(BinOp::Offset, Box::new((ptr, offset))))
},
- @call("mir_len", args) => Ok(Rvalue::Len(self.parse_place(args[0])?)),
- @call("mir_copy_for_deref", args) => Ok(Rvalue::CopyForDeref(self.parse_place(args[0])?)),
+ @call(mir_len, args) => Ok(Rvalue::Len(self.parse_place(args[0])?)),
+ @call(mir_copy_for_deref, args) => Ok(Rvalue::CopyForDeref(self.parse_place(args[0])?)),
ExprKind::Borrow { borrow_kind, arg } => Ok(
Rvalue::Ref(self.tcx.lifetimes.re_erased, *borrow_kind, self.parse_place(*arg)?)
),
@@ -206,9 +241,9 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
pub fn parse_operand(&self, expr_id: ExprId) -> PResult<Operand<'tcx>> {
parse_by_kind!(self, expr_id, expr, "operand",
- @call("mir_move", args) => self.parse_place(args[0]).map(Operand::Move),
- @call("mir_static", args) => self.parse_static(args[0]),
- @call("mir_static_mut", args) => self.parse_static(args[0]),
+ @call(mir_move, args) => self.parse_place(args[0]).map(Operand::Move),
+ @call(mir_static, args) => self.parse_static(args[0]),
+ @call(mir_static_mut, args) => self.parse_static(args[0]),
ExprKind::Literal { .. }
| ExprKind::NamedConst { .. }
| ExprKind::NonHirLiteral { .. }
@@ -229,7 +264,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
fn parse_place_inner(&self, expr_id: ExprId) -> PResult<(Place<'tcx>, PlaceTy<'tcx>)> {
let (parent, proj) = parse_by_kind!(self, expr_id, expr, "place",
- @call("mir_field", args) => {
+ @call(mir_field, args) => {
let (parent, ty) = self.parse_place_inner(args[0])?;
let field = FieldIdx::from_u32(self.parse_integer_literal(args[1])? as u32);
let field_ty = ty.field_ty(self.tcx, field);
@@ -237,7 +272,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
let place = parent.project_deeper(&[proj], self.tcx);
return Ok((place, PlaceTy::from_ty(field_ty)));
},
- @call("mir_variant", args) => {
+ @call(mir_variant, args) => {
(args[0], PlaceElem::Downcast(
None,
VariantIdx::from_u32(self.parse_integer_literal(args[1])? as u32)
@@ -245,7 +280,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
},
ExprKind::Deref { arg } => {
parse_by_kind!(self, *arg, _, "does not matter",
- @call("mir_make_place", args) => return self.parse_place_inner(args[0]),
+ @call(mir_make_place, args) => return self.parse_place_inner(args[0]),
_ => (*arg, PlaceElem::Deref),
)
},
diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/build/expr/as_constant.rs
index 4ed49e787..3bfffa735 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_constant.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_constant.rs
@@ -114,7 +114,7 @@ fn lit_to_mir_constant<'tcx>(
let width = tcx
.layout_of(param_ty)
.map_err(|_| {
- LitToConstError::Reported(tcx.sess.delay_span_bug(
+ LitToConstError::Reported(tcx.sess.span_delayed_bug(
DUMMY_SP,
format!("couldn't compute width of literal: {:?}", lit_input.lit),
))
@@ -158,7 +158,7 @@ fn lit_to_mir_constant<'tcx>(
}
(ast::LitKind::Float(n, _), ty::Float(fty)) => parse_float_into_constval(*n, *fty, neg)
.ok_or_else(|| {
- LitToConstError::Reported(tcx.sess.delay_span_bug(
+ LitToConstError::Reported(tcx.sess.span_delayed_bug(
DUMMY_SP,
format!("couldn't parse float literal: {:?}", lit_input.lit),
))
@@ -167,7 +167,7 @@ fn lit_to_mir_constant<'tcx>(
(ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)),
(ast::LitKind::Err, _) => {
return Err(LitToConstError::Reported(
- tcx.sess.delay_span_bug(DUMMY_SP, "encountered LitKind::Err during mir build"),
+ tcx.sess.span_delayed_bug(DUMMY_SP, "encountered LitKind::Err during mir build"),
));
}
_ => return Err(LitToConstError::TypeError),
diff --git a/compiler/rustc_mir_build/src/build/expr/as_place.rs b/compiler/rustc_mir_build/src/build/expr/as_place.rs
index 7e9191a37..43e834890 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_place.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_place.rs
@@ -175,7 +175,7 @@ fn to_upvars_resolved_place_builder<'tcx>(
projection: &[PlaceElem<'tcx>],
) -> Option<PlaceBuilder<'tcx>> {
let Some((capture_index, capture)) =
- find_capture_matching_projections(&cx.upvars, var_hir_id, &projection)
+ find_capture_matching_projections(&cx.upvars, var_hir_id, projection)
else {
let closure_span = cx.tcx.def_span(closure_def_id);
if !enable_precise_capture(closure_span) {
@@ -683,7 +683,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, fake_borrow_deref_ty);
let fake_borrow_temp =
self.local_decls.push(LocalDecl::new(fake_borrow_ty, expr_span));
- let projection = tcx.mk_place_elems(&base_place.projection);
+ let projection = tcx.mk_place_elems(base_place.projection);
self.cfg.push_assign(
block,
source_info,
diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
index eece8684e..a5f6bb12e 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
@@ -10,8 +10,6 @@ use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary};
use rustc_hir::lang_items::LangItem;
use rustc_middle::middle::region;
use rustc_middle::mir::interpret::Scalar;
-use rustc_middle::mir::AssertKind;
-use rustc_middle::mir::Place;
use rustc_middle::mir::*;
use rustc_middle::thir::*;
use rustc_middle::ty::cast::{mir_cast_kind, CastTy};
@@ -600,10 +598,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
BinOp::Shl | BinOp::Shr if self.check_overflow && ty.is_integral() => {
// For an unsigned RHS, the shift is in-range for `rhs < bits`.
// For a signed RHS, `IntToInt` cast to the equivalent unsigned
- // type and do that same comparison. Because the type is the
- // same size, there's no negative shift amount that ends up
- // overlapping with valid ones, thus it catches negatives too.
+ // type and do that same comparison.
+ // A negative value will be *at least* 128 after the cast (that's i8::MIN),
+ // and 128 is an overflowing shift amount for all our currently existing types,
+ // so this cast can never make us miss an overflow.
let (lhs_size, _) = ty.int_size_and_signed(self.tcx);
+ assert!(lhs_size.bits() <= 128);
let rhs_ty = rhs.ty(&self.local_decls, self.tcx);
let (rhs_size, _) = rhs_ty.int_size_and_signed(self.tcx);
@@ -625,7 +625,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// This can't overflow because the largest shiftable types are 128-bit,
// which fits in `u8`, the smallest possible `unsigned_ty`.
- // (And `from_uint` will `bug!` if that's ever no longer true.)
let lhs_bits = Operand::const_from_scalar(
self.tcx,
unsigned_ty,
diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs
index 304870274..487b1f44b 100644
--- a/compiler/rustc_mir_build/src/build/matches/mod.rs
+++ b/compiler/rustc_mir_build/src/build/matches/mod.rs
@@ -90,6 +90,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let local_scope = this.local_scope();
let (success_block, failure_block) =
this.in_if_then_scope(local_scope, expr_span, |this| {
+ // Help out coverage instrumentation by injecting a dummy statement with
+ // the original condition's span (including `!`). This fixes #115468.
+ if this.tcx.sess.instrument_coverage() {
+ this.cfg.push_coverage_span_marker(block, this.source_info(expr_span));
+ }
this.then_else_break(
block,
&this.thir[arg],
@@ -212,7 +217,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let scrutinee_place =
unpack!(block = self.lower_scrutinee(block, scrutinee, scrutinee_span,));
- let mut arm_candidates = self.create_match_candidates(&scrutinee_place, &arms);
+ let mut arm_candidates = self.create_match_candidates(&scrutinee_place, arms);
let match_has_guard = arm_candidates.iter().any(|(_, candidate)| candidate.has_guard);
let mut candidates =
@@ -424,7 +429,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
this.source_scope = source_scope;
}
- this.expr_into_dest(destination, arm_block, &&this.thir[arm.body])
+ this.expr_into_dest(destination, arm_block, &this.thir[arm.body])
})
})
.collect();
@@ -505,7 +510,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let binding_end = self.bind_and_guard_matched_candidate(
leaf_candidate,
parent_bindings,
- &fake_borrow_temps,
+ fake_borrow_temps,
scrutinee_span,
arm_match_scope,
schedule_drops,
@@ -613,7 +618,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
_ => {
let place_builder =
unpack!(block = self.lower_scrutinee(block, initializer, initializer.span));
- self.place_into_pattern(block, &irrefutable_pat, place_builder, true)
+ self.place_into_pattern(block, irrefutable_pat, place_builder, true)
}
}
}
@@ -625,7 +630,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
initializer: PlaceBuilder<'tcx>,
set_match_place: bool,
) -> BlockAnd<()> {
- let mut candidate = Candidate::new(initializer.clone(), &irrefutable_pat, false, self);
+ let mut candidate = Candidate::new(initializer.clone(), irrefutable_pat, false, self);
let fake_borrow_temps = self.lower_match_tree(
block,
irrefutable_pat.span,
@@ -700,7 +705,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
opt_match_place: Option<(Option<&Place<'tcx>>, Span)>,
) -> Option<SourceScope> {
self.visit_primary_bindings(
- &pattern,
+ pattern,
UserTypeProjections::none(),
&mut |this, mutability, name, mode, var, span, ty, user_ty| {
if visibility_scope.is_none() {
@@ -827,6 +832,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
PatKind::Constant { .. }
| PatKind::Range { .. }
| PatKind::Wild
+ | PatKind::Never
| PatKind::Error(_) => {}
PatKind::Deref { ref subpattern } => {
@@ -1693,59 +1699,51 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug!("tested_candidates: {}", total_candidate_count - candidates.len());
debug!("untested_candidates: {}", candidates.len());
- // HACK(matthewjasper) This is a closure so that we can let the test
- // create its blocks before the rest of the match. This currently
- // improves the speed of llvm when optimizing long string literal
- // matches
- let make_target_blocks = move |this: &mut Self| -> Vec<BasicBlock> {
- // The block that we should branch to if none of the
- // `target_candidates` match. This is either the block where we
- // start matching the untested candidates if there are any,
- // otherwise it's the `otherwise_block`.
- let remainder_start = &mut None;
- let remainder_start =
- if candidates.is_empty() { &mut *otherwise_block } else { remainder_start };
-
- // For each outcome of test, process the candidates that still
- // apply. Collect a list of blocks where control flow will
- // branch if one of the `target_candidate` sets is not
- // exhaustive.
- let target_blocks: Vec<_> = target_candidates
- .into_iter()
- .map(|mut candidates| {
- if !candidates.is_empty() {
- let candidate_start = this.cfg.start_new_block();
- this.match_candidates(
- span,
- scrutinee_span,
- candidate_start,
- remainder_start,
- &mut *candidates,
- fake_borrows,
- );
- candidate_start
- } else {
- *remainder_start.get_or_insert_with(|| this.cfg.start_new_block())
- }
- })
- .collect();
-
- if !candidates.is_empty() {
- let remainder_start = remainder_start.unwrap_or_else(|| this.cfg.start_new_block());
- this.match_candidates(
- span,
- scrutinee_span,
- remainder_start,
- otherwise_block,
- candidates,
- fake_borrows,
- );
- };
+ // The block that we should branch to if none of the
+ // `target_candidates` match. This is either the block where we
+ // start matching the untested candidates if there are any,
+ // otherwise it's the `otherwise_block`.
+ let remainder_start = &mut None;
+ let remainder_start =
+ if candidates.is_empty() { &mut *otherwise_block } else { remainder_start };
+
+ // For each outcome of test, process the candidates that still
+ // apply. Collect a list of blocks where control flow will
+ // branch if one of the `target_candidate` sets is not
+ // exhaustive.
+ let target_blocks: Vec<_> = target_candidates
+ .into_iter()
+ .map(|mut candidates| {
+ if !candidates.is_empty() {
+ let candidate_start = self.cfg.start_new_block();
+ self.match_candidates(
+ span,
+ scrutinee_span,
+ candidate_start,
+ remainder_start,
+ &mut *candidates,
+ fake_borrows,
+ );
+ candidate_start
+ } else {
+ *remainder_start.get_or_insert_with(|| self.cfg.start_new_block())
+ }
+ })
+ .collect();
- target_blocks
- };
+ if !candidates.is_empty() {
+ let remainder_start = remainder_start.unwrap_or_else(|| self.cfg.start_new_block());
+ self.match_candidates(
+ span,
+ scrutinee_span,
+ remainder_start,
+ otherwise_block,
+ candidates,
+ fake_borrows,
+ );
+ }
- self.perform_test(span, scrutinee_span, block, &match_place, &test, make_target_blocks);
+ self.perform_test(span, scrutinee_span, block, &match_place, &test, target_blocks);
}
/// Determine the fake borrows that are needed from a set of places that
@@ -1843,7 +1841,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let expr_span = expr.span;
let expr_place_builder = unpack!(block = self.lower_scrutinee(block, expr, expr_span));
let wildcard = Pat::wildcard_from_ty(pat.ty);
- let mut guard_candidate = Candidate::new(expr_place_builder.clone(), &pat, false, self);
+ let mut guard_candidate = Candidate::new(expr_place_builder.clone(), pat, false, self);
let mut otherwise_candidate =
Candidate::new(expr_place_builder.clone(), &wildcard, false, self);
let fake_borrow_temps = self.lower_match_tree(
diff --git a/compiler/rustc_mir_build/src/build/matches/simplify.rs b/compiler/rustc_mir_build/src/build/matches/simplify.rs
index 6a40c8d84..a7f6f4873 100644
--- a/compiler/rustc_mir_build/src/build/matches/simplify.rs
+++ b/compiler/rustc_mir_build/src/build/matches/simplify.rs
@@ -69,7 +69,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
{
existing_bindings.extend_from_slice(&new_bindings);
mem::swap(&mut candidate.bindings, &mut existing_bindings);
- candidate.subcandidates = self.create_or_subcandidates(candidate, &place, pats);
+ candidate.subcandidates = self.create_or_subcandidates(candidate, place, pats);
return true;
}
@@ -194,6 +194,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Ok(())
}
+ PatKind::Never => {
+ // A never pattern acts like a load from the place.
+ // FIXME(never_patterns): load from the place
+ Ok(())
+ }
+
PatKind::Constant { .. } => {
// FIXME normalize patterns when possible
Err(match_pair)
diff --git a/compiler/rustc_mir_build/src/build/matches/test.rs b/compiler/rustc_mir_build/src/build/matches/test.rs
index bdd4f2011..53e5d70f9 100644
--- a/compiler/rustc_mir_build/src/build/matches/test.rs
+++ b/compiler/rustc_mir_build/src/build/matches/test.rs
@@ -75,6 +75,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
| PatKind::Array { .. }
| PatKind::Wild
| PatKind::Binding { .. }
+ | PatKind::Never
| PatKind::Leaf { .. }
| PatKind::Deref { .. }
| PatKind::Error(_) => self.error_simplifiable(match_pair),
@@ -107,6 +108,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
PatKind::Slice { .. }
| PatKind::Array { .. }
| PatKind::Wild
+ | PatKind::Never
| PatKind::Or { .. }
| PatKind::Binding { .. }
| PatKind::AscribeUserType { .. }
@@ -145,7 +147,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
- #[instrument(skip(self, make_target_blocks, place_builder), level = "debug")]
+ #[instrument(skip(self, target_blocks, place_builder), level = "debug")]
pub(super) fn perform_test(
&mut self,
match_start_span: Span,
@@ -153,7 +155,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
block: BasicBlock,
place_builder: &PlaceBuilder<'tcx>,
test: &Test<'tcx>,
- make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>,
+ target_blocks: Vec<BasicBlock>,
) {
let place = place_builder.to_place(self);
let place_ty = place.ty(&self.local_decls, self.tcx);
@@ -162,7 +164,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let source_info = self.source_info(test.span);
match test.kind {
TestKind::Switch { adt_def, ref variants } => {
- let target_blocks = make_target_blocks(self);
// Variants is a BitVec of indexes into adt_def.variants.
let num_enum_variants = adt_def.variants().len();
debug_assert_eq!(target_blocks.len(), num_enum_variants + 1);
@@ -208,7 +209,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
TestKind::SwitchInt { switch_ty, ref options } => {
- let target_blocks = make_target_blocks(self);
let terminator = if *switch_ty.kind() == ty::Bool {
assert!(!options.is_empty() && options.len() <= 2);
let [first_bb, second_bb] = *target_blocks else {
@@ -238,6 +238,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TestKind::Eq { value, ty } => {
let tcx = self.tcx;
+ let [success_block, fail_block] = *target_blocks else {
+ bug!("`TestKind::Eq` should have two target blocks")
+ };
if let ty::Adt(def, _) = ty.kind()
&& Some(def.did()) == tcx.lang_items().string()
{
@@ -278,38 +281,43 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
);
self.non_scalar_compare(
eq_block,
- make_target_blocks,
+ success_block,
+ fail_block,
source_info,
value,
ref_str,
ref_str_ty,
);
- return;
- }
- if !ty.is_scalar() {
+ } else if !ty.is_scalar() {
// Use `PartialEq::eq` instead of `BinOp::Eq`
// (the binop can only handle primitives)
self.non_scalar_compare(
block,
- make_target_blocks,
+ success_block,
+ fail_block,
source_info,
value,
place,
ty,
);
- } else if let [success, fail] = *make_target_blocks(self) {
+ } else {
assert_eq!(value.ty(), ty);
let expect = self.literal_operand(test.span, value);
let val = Operand::Copy(place);
- self.compare(block, success, fail, source_info, BinOp::Eq, expect, val);
- } else {
- bug!("`TestKind::Eq` should have two target blocks");
+ self.compare(
+ block,
+ success_block,
+ fail_block,
+ source_info,
+ BinOp::Eq,
+ expect,
+ val,
+ );
}
}
TestKind::Range(ref range) => {
let lower_bound_success = self.cfg.start_new_block();
- let target_blocks = make_target_blocks(self);
// Test `val` by computing `lo <= val && val <= hi`, using primitive comparisons.
// FIXME: skip useless comparison when the range is half-open.
@@ -339,8 +347,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
TestKind::Len { len, op } => {
- let target_blocks = make_target_blocks(self);
-
let usize_ty = self.tcx.types.usize;
let actual = self.temp(usize_ty, test.span);
@@ -404,7 +410,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn non_scalar_compare(
&mut self,
block: BasicBlock,
- make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>,
+ success_block: BasicBlock,
+ fail_block: BasicBlock,
source_info: SourceInfo,
value: Const<'tcx>,
mut val: Place<'tcx>,
@@ -494,7 +501,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
let eq_def_id = self.tcx.require_lang_item(LangItem::PartialEq, Some(source_info.span));
- let method = trait_method(self.tcx, eq_def_id, sym::eq, [ty, ty]);
+ let method = trait_method(
+ self.tcx,
+ eq_def_id,
+ sym::eq,
+ self.tcx.with_opt_host_effect_param(self.def_id, eq_def_id, [ty, ty]),
+ );
let bool_ty = self.tcx.types.bool;
let eq_result = self.temp(bool_ty, source_info.span);
@@ -524,9 +536,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
);
self.diverge_from(block);
- let [success_block, fail_block] = *make_target_blocks(self) else {
- bug!("`TestKind::Eq` should have two target blocks")
- };
// check the result
self.cfg.terminate(
eq_block,
@@ -736,7 +745,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// These are all binary tests.
//
// FIXME(#29623) we can be more clever here
- let pattern_test = self.test(&match_pair);
+ let pattern_test = self.test(match_pair);
if pattern_test.kind == test.kind {
self.candidate_without_match_pair(match_pair_index, candidate);
Some(0)
diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs
index 886d80545..dae83d4b4 100644
--- a/compiler/rustc_mir_build/src/build/mod.rs
+++ b/compiler/rustc_mir_build/src/build/mod.rs
@@ -451,7 +451,7 @@ fn construct_fn<'tcx>(
fn_sig: ty::FnSig<'tcx>,
) -> Body<'tcx> {
let span = tcx.def_span(fn_def);
- let fn_id = tcx.hir().local_def_id_to_hir_id(fn_def);
+ let fn_id = tcx.local_def_id_to_hir_id(fn_def);
let coroutine_kind = tcx.coroutine_kind(fn_def);
// The representation of thir for `-Zunpretty=thir-tree` relies on
@@ -569,10 +569,10 @@ fn construct_const<'a, 'tcx>(
expr: ExprId,
const_ty: Ty<'tcx>,
) -> Body<'tcx> {
- let hir_id = tcx.hir().local_def_id_to_hir_id(def);
+ let hir_id = tcx.local_def_id_to_hir_id(def);
// Figure out what primary body this item has.
- let (span, const_ty_span) = match tcx.hir().get(hir_id) {
+ let (span, const_ty_span) = match tcx.hir_node(hir_id) {
Node::Item(hir::Item {
kind: hir::ItemKind::Static(ty, _, _) | hir::ItemKind::Const(ty, _, _),
span,
@@ -622,7 +622,7 @@ fn construct_const<'a, 'tcx>(
/// with type errors, but normal MIR construction can't handle that in general.
fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) -> Body<'_> {
let span = tcx.def_span(def_id);
- let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = tcx.local_def_id_to_hir_id(def_id);
let coroutine_kind = tcx.coroutine_kind(def_id);
let (inputs, output, yield_ty) = match tcx.def_kind(def_id) {
@@ -638,9 +638,21 @@ fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) -
);
(sig.inputs().to_vec(), sig.output(), None)
}
+ DefKind::Closure if coroutine_kind.is_some() => {
+ let coroutine_ty = tcx.type_of(def_id).instantiate_identity();
+ let ty::Coroutine(_, args, _) = coroutine_ty.kind() else {
+ bug!("expected type of coroutine-like closure to be a coroutine")
+ };
+ let args = args.as_coroutine();
+ let yield_ty = args.yield_ty();
+ let return_ty = args.return_ty();
+ (vec![coroutine_ty, args.resume_ty()], return_ty, Some(yield_ty))
+ }
DefKind::Closure => {
let closure_ty = tcx.type_of(def_id).instantiate_identity();
- let ty::Closure(_, args) = closure_ty.kind() else { bug!() };
+ let ty::Closure(_, args) = closure_ty.kind() else {
+ bug!("expected type of closure to be a closure")
+ };
let args = args.as_closure();
let sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), args.sig());
let self_ty = match args.kind() {
@@ -650,14 +662,6 @@ fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) -
};
([self_ty].into_iter().chain(sig.inputs().to_vec()).collect(), sig.output(), None)
}
- DefKind::Coroutine => {
- let coroutine_ty = tcx.type_of(def_id).instantiate_identity();
- let ty::Coroutine(_, args, _) = coroutine_ty.kind() else { bug!() };
- let args = args.as_coroutine();
- let yield_ty = args.yield_ty();
- let return_ty = args.return_ty();
- (vec![coroutine_ty, args.resume_ty()], return_ty, Some(yield_ty))
- }
dk => bug!("{:?} is not a body: {:?}", def_id, dk),
};
@@ -938,12 +942,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
scope = self.declare_bindings(
scope,
expr.span,
- &pat,
+ pat,
None,
Some((Some(&place), span)),
);
let place_builder = PlaceBuilder::from(local);
- unpack!(block = self.place_into_pattern(block, &pat, place_builder, false));
+ unpack!(block = self.place_into_pattern(block, pat, place_builder, false));
}
}
self.source_scope = original_source_scope;
@@ -954,7 +958,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.source_scope = source_scope;
}
- self.expr_into_dest(Place::return_place(), block, &expr)
+ self.expr_into_dest(Place::return_place(), block, expr)
}
fn set_correct_source_scope_for_arg(
diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs
index b3d3863b5..25b79e6a5 100644
--- a/compiler/rustc_mir_build/src/build/scope.rs
+++ b/compiler/rustc_mir_build/src/build/scope.rs
@@ -90,7 +90,6 @@ use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::middle::region;
use rustc_middle::mir::*;
use rustc_middle::thir::{Expr, LintLevel};
-use rustc_middle::ty::Ty;
use rustc_session::lint::Level;
use rustc_span::{Span, DUMMY_SP};
@@ -186,6 +185,7 @@ pub(crate) enum BreakableTarget {
}
rustc_index::newtype_index! {
+ #[orderable]
struct DropIdx {}
}
@@ -536,27 +536,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
(then_block, else_block)
}
- pub(crate) fn in_opt_scope<F, R>(
- &mut self,
- opt_scope: Option<(region::Scope, SourceInfo)>,
- f: F,
- ) -> BlockAnd<R>
- where
- F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
- {
- debug!("in_opt_scope(opt_scope={:?})", opt_scope);
- if let Some(region_scope) = opt_scope {
- self.push_scope(region_scope);
- }
- let mut block;
- let rv = unpack!(block = f(self));
- if let Some(region_scope) = opt_scope {
- unpack!(block = self.pop_scope(region_scope, block));
- }
- debug!("in_scope: exiting opt_scope={:?} block={:?}", opt_scope, block);
- block.and(rv)
- }
-
/// Convenience wrapper that pushes a scope and then executes `f`
/// to build its contents, popping the scope afterwards.
#[instrument(skip(self, f), level = "debug")]
@@ -659,14 +638,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
(None, Some(_)) => {
panic!("`return`, `become` and `break` with value and must have a destination")
}
- (None, None) if self.tcx.sess.instrument_coverage() => {
- // Unlike `break` and `return`, which push an `Assign` statement to MIR, from which
- // a Coverage code region can be generated, `continue` needs no `Assign`; but
- // without one, the `InstrumentCoverage` MIR pass cannot generate a code region for
- // `continue`. Coverage will be missing unless we add a dummy `Assign` to MIR.
- self.add_dummy_assignment(span, block, source_info);
+ (None, None) => {
+ if self.tcx.sess.instrument_coverage() {
+ // Normally we wouldn't build any MIR in this case, but that makes it
+ // harder for coverage instrumentation to extract a relevant span for
+ // `continue` expressions. So here we inject a dummy statement with the
+ // desired span.
+ self.cfg.push_coverage_span_marker(block, source_info);
+ }
}
- (None, None) => {}
}
let region_scope = self.scopes.breakable_scopes[break_index].region_scope;
@@ -722,14 +702,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.cfg.terminate(block, source_info, TerminatorKind::UnwindResume);
}
- // Add a dummy `Assign` statement to the CFG, with the span for the source code's `continue`
- // statement.
- fn add_dummy_assignment(&mut self, span: Span, block: BasicBlock, source_info: SourceInfo) {
- let local_decl = LocalDecl::new(Ty::new_unit(self.tcx), span);
- let temp_place = Place::from(self.local_decls.push(local_decl));
- self.cfg.push_assign_unit(block, source_info, temp_place, self.tcx);
- }
-
fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
// If we are emitting a `drop` statement, we need to have the cached
// diverge cleanup pads ready in case that drop panics.
@@ -995,7 +967,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
if scope.region_scope == region_scope {
- let region_scope_span = region_scope.span(self.tcx, &self.region_scope_tree);
+ let region_scope_span = region_scope.span(self.tcx, self.region_scope_tree);
// Attribute scope exit drops to scope's closing brace.
let scope_end = self.tcx.sess.source_map().end_point(region_scope_span);
diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs
index fcb563607..62190848d 100644
--- a/compiler/rustc_mir_build/src/check_unsafety.rs
+++ b/compiler/rustc_mir_build/src/check_unsafety.rs
@@ -1,7 +1,10 @@
+use std::borrow::Cow;
+
use crate::build::ExprCategory;
use crate::errors::*;
-use rustc_middle::thir::visit::{self, Visitor};
+use rustc_middle::thir::visit::Visitor;
+use rustc_errors::DiagnosticArgValue;
use rustc_hir as hir;
use rustc_middle::mir::BorrowKind;
use rustc_middle::thir::*;
@@ -138,7 +141,7 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> {
// Runs all other queries that depend on THIR.
self.tcx.ensure_with_value().mir_built(def);
let inner_thir = &inner_thir.steal();
- let hir_context = self.tcx.hir().local_def_id_to_hir_id(def);
+ let hir_context = self.tcx.local_def_id_to_hir_id(def);
let safety_context = mem::replace(&mut self.safety_context, SafetyContext::Safe);
let mut inner_visitor = UnsafetyVisitor {
thir: inner_thir,
@@ -200,7 +203,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for LayoutConstrainedPlaceVisitor<'a, 'tcx> {
impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
fn thir(&self) -> &'a Thir<'tcx> {
- &self.thir
+ self.thir
}
fn visit_block(&mut self, block: &Block) {
@@ -247,8 +250,9 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
self.requires_unsafe(pat.span, AccessToUnionField);
return; // we can return here since this already requires unsafe
}
- // wildcard doesn't take anything
+ // wildcard/never don't take anything
PatKind::Wild |
+ PatKind::Never |
// these just wrap other patterns
PatKind::Or { .. } |
PatKind::InlineConstant { .. } |
@@ -392,15 +396,29 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
// the call requires `unsafe`. Don't check this on wasm
// targets, though. For more information on wasm see the
// is_like_wasm check in hir_analysis/src/collect.rs
+ let callee_features = &self.tcx.codegen_fn_attrs(func_did).target_features;
if !self.tcx.sess.target.options.is_like_wasm
- && !self
- .tcx
- .codegen_fn_attrs(func_did)
- .target_features
+ && !callee_features
.iter()
.all(|feature| self.body_target_features.contains(feature))
{
- self.requires_unsafe(expr.span, CallToFunctionWith(func_did));
+ let missing: Vec<_> = callee_features
+ .iter()
+ .copied()
+ .filter(|feature| !self.body_target_features.contains(feature))
+ .collect();
+ let build_enabled = self
+ .tcx
+ .sess
+ .target_features
+ .iter()
+ .copied()
+ .filter(|feature| missing.contains(feature))
+ .collect();
+ self.requires_unsafe(
+ expr.span,
+ CallToFunctionWith { function: func_did, missing, build_enabled },
+ );
}
}
}
@@ -452,7 +470,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
if let Some((assigned_ty, assignment_span)) = self.assignment_info {
if assigned_ty.needs_drop(self.tcx, self.param_env) {
// This would be unsafe, but should be outright impossible since we reject such unions.
- self.tcx.sess.delay_span_bug(assignment_span, format!("union fields that need dropping should be impossible: {assigned_ty}"));
+ self.tcx.sess.span_delayed_bug(assignment_span, format!("union fields that need dropping should be impossible: {assigned_ty}"));
}
} else {
self.requires_unsafe(expr.span, AccessToUnionField);
@@ -526,7 +544,7 @@ struct UnusedUnsafeWarning {
enclosing_unsafe: Option<UnusedUnsafeEnclosing>,
}
-#[derive(Clone, Copy, PartialEq)]
+#[derive(Clone, PartialEq)]
enum UnsafeOpKind {
CallToUnsafeFunction(Option<DefId>),
UseOfInlineAssembly,
@@ -537,7 +555,15 @@ enum UnsafeOpKind {
AccessToUnionField,
MutationOfLayoutConstrainedField,
BorrowOfLayoutConstrainedField,
- CallToFunctionWith(DefId),
+ CallToFunctionWith {
+ function: DefId,
+ /// Target features enabled in callee's `#[target_feature]` but missing in
+ /// caller's `#[target_feature]`.
+ missing: Vec<Symbol>,
+ /// Target features in `missing` that are enabled at compile time
+ /// (e.g., with `-C target-feature`).
+ build_enabled: Vec<Symbol>,
+ },
}
use UnsafeOpKind::*;
@@ -552,7 +578,7 @@ impl UnsafeOpKind {
) {
let parent_id = tcx.hir().get_parent_item(hir_id);
let parent_owner = tcx.hir().owner(parent_id);
- let should_suggest = parent_owner.fn_sig().map_or(false, |sig| sig.header.is_unsafe());
+ let should_suggest = parent_owner.fn_sig().is_some_and(|sig| sig.header.is_unsafe());
let unsafe_not_inherited_note = if should_suggest {
suggest_unsafe_block.then(|| {
let body_span = tcx.hir().body(parent_owner.body_id().unwrap()).value.span;
@@ -658,13 +684,22 @@ impl UnsafeOpKind {
unsafe_not_inherited_note,
},
),
- CallToFunctionWith(did) => tcx.emit_spanned_lint(
+ CallToFunctionWith { function, missing, build_enabled } => tcx.emit_spanned_lint(
UNSAFE_OP_IN_UNSAFE_FN,
hir_id,
span,
UnsafeOpInUnsafeFnCallToFunctionWithRequiresUnsafe {
span,
- function: &with_no_trimmed_paths!(tcx.def_path_str(*did)),
+ function: &with_no_trimmed_paths!(tcx.def_path_str(*function)),
+ missing_target_features: DiagnosticArgValue::StrListSepByAnd(
+ missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
+ ),
+ missing_target_features_count: missing.len(),
+ note: if build_enabled.is_empty() { None } else { Some(()) },
+ build_target_features: DiagnosticArgValue::StrListSepByAnd(
+ build_enabled.iter().map(|feature| Cow::from(feature.as_str())).collect(),
+ ),
+ build_target_features_count: build_enabled.len(),
unsafe_not_inherited_note,
},
),
@@ -821,18 +856,38 @@ impl UnsafeOpKind {
unsafe_not_inherited_note,
});
}
- CallToFunctionWith(did) if unsafe_op_in_unsafe_fn_allowed => {
+ CallToFunctionWith { function, missing, build_enabled }
+ if unsafe_op_in_unsafe_fn_allowed =>
+ {
tcx.sess.emit_err(CallToFunctionWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed {
span,
+ missing_target_features: DiagnosticArgValue::StrListSepByAnd(
+ missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
+ ),
+ missing_target_features_count: missing.len(),
+ note: if build_enabled.is_empty() { None } else { Some(()) },
+ build_target_features: DiagnosticArgValue::StrListSepByAnd(
+ build_enabled.iter().map(|feature| Cow::from(feature.as_str())).collect(),
+ ),
+ build_target_features_count: build_enabled.len(),
unsafe_not_inherited_note,
- function: &tcx.def_path_str(*did),
+ function: &tcx.def_path_str(*function),
});
}
- CallToFunctionWith(did) => {
+ CallToFunctionWith { function, missing, build_enabled } => {
tcx.sess.emit_err(CallToFunctionWithRequiresUnsafe {
span,
+ missing_target_features: DiagnosticArgValue::StrListSepByAnd(
+ missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
+ ),
+ missing_target_features_count: missing.len(),
+ note: if build_enabled.is_empty() { None } else { Some(()) },
+ build_target_features: DiagnosticArgValue::StrListSepByAnd(
+ build_enabled.iter().map(|feature| Cow::from(feature.as_str())).collect(),
+ ),
+ build_target_features_count: build_enabled.len(),
unsafe_not_inherited_note,
- function: &tcx.def_path_str(*did),
+ function: &tcx.def_path_str(*function),
});
}
}
@@ -859,7 +914,7 @@ pub fn thir_check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) {
return;
}
- let hir_id = tcx.hir().local_def_id_to_hir_id(def);
+ let hir_id = tcx.local_def_id_to_hir_id(def);
let safety_context = tcx.hir().fn_sig_by_hir_id(hir_id).map_or(SafetyContext::Safe, |fn_sig| {
if fn_sig.header.unsafety == hir::Unsafety::Unsafe {
SafetyContext::UnsafeFn
diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs
index 418f9bb9d..c66687330 100644
--- a/compiler/rustc_mir_build/src/errors.rs
+++ b/compiler/rustc_mir_build/src/errors.rs
@@ -1,14 +1,12 @@
-use crate::{
- fluent_generated as fluent,
- thir::pattern::{deconstruct_pat::WitnessPat, MatchCheckCtxt},
-};
+use crate::fluent_generated as fluent;
+use rustc_errors::DiagnosticArgValue;
use rustc_errors::{
- error_code, AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
- Handler, IntoDiagnostic, MultiSpan, SubdiagnosticMessage,
+ error_code, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
+ ErrorGuaranteed, IntoDiagnostic, MultiSpan, SubdiagnosticMessage,
};
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
-use rustc_middle::thir::Pat;
use rustc_middle::ty::{self, Ty};
+use rustc_pattern_analysis::{errors::Uncovered, rustc::RustcMatchCheckCtxt};
use rustc_span::symbol::Symbol;
use rustc_span::Span;
@@ -124,11 +122,17 @@ pub struct UnsafeOpInUnsafeFnBorrowOfLayoutConstrainedFieldRequiresUnsafe {
#[derive(LintDiagnostic)]
#[diag(mir_build_unsafe_op_in_unsafe_fn_call_to_fn_with_requires_unsafe)]
-#[note]
+#[help]
pub struct UnsafeOpInUnsafeFnCallToFunctionWithRequiresUnsafe<'a> {
#[label]
pub span: Span,
pub function: &'a str,
+ pub missing_target_features: DiagnosticArgValue<'a>,
+ pub missing_target_features_count: usize,
+ #[note]
+ pub note: Option<()>,
+ pub build_target_features: DiagnosticArgValue<'a>,
+ pub build_target_features_count: usize,
#[subdiagnostic]
pub unsafe_not_inherited_note: Option<UnsafeNotInheritedLintNote>,
}
@@ -369,24 +373,36 @@ pub struct BorrowOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed
#[derive(Diagnostic)]
#[diag(mir_build_call_to_fn_with_requires_unsafe, code = "E0133")]
-#[note]
+#[help]
pub struct CallToFunctionWithRequiresUnsafe<'a> {
#[primary_span]
#[label]
pub span: Span,
pub function: &'a str,
+ pub missing_target_features: DiagnosticArgValue<'a>,
+ pub missing_target_features_count: usize,
+ #[note]
+ pub note: Option<()>,
+ pub build_target_features: DiagnosticArgValue<'a>,
+ pub build_target_features_count: usize,
#[subdiagnostic]
pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>,
}
#[derive(Diagnostic)]
#[diag(mir_build_call_to_fn_with_requires_unsafe_unsafe_op_in_unsafe_fn_allowed, code = "E0133")]
-#[note]
+#[help]
pub struct CallToFunctionWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed<'a> {
#[primary_span]
#[label]
pub span: Span,
pub function: &'a str,
+ pub missing_target_features: DiagnosticArgValue<'a>,
+ pub missing_target_features_count: usize,
+ #[note]
+ pub note: Option<()>,
+ pub build_target_features: DiagnosticArgValue<'a>,
+ pub build_target_features_count: usize,
#[subdiagnostic]
pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>,
}
@@ -438,15 +454,15 @@ pub enum UnusedUnsafeEnclosing {
}
pub(crate) struct NonExhaustivePatternsTypeNotEmpty<'p, 'tcx, 'm> {
- pub cx: &'m MatchCheckCtxt<'p, 'tcx>,
+ pub cx: &'m RustcMatchCheckCtxt<'p, 'tcx>,
pub expr_span: Span,
pub span: Span,
pub ty: Ty<'tcx>,
}
impl<'a> IntoDiagnostic<'a> for NonExhaustivePatternsTypeNotEmpty<'_, '_, '_> {
- fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
- let mut diag = handler.struct_span_err_with_code(
+ fn into_diagnostic(self, dcx: &'a DiagCtxt) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
+ let mut diag = dcx.struct_span_err_with_code(
self.span,
fluent::mir_build_non_exhaustive_patterns_type_not_empty,
error_code!(E0004),
@@ -793,94 +809,6 @@ pub struct NonPartialEqMatch<'tcx> {
pub non_peq_ty: Ty<'tcx>,
}
-#[derive(LintDiagnostic)]
-#[diag(mir_build_overlapping_range_endpoints)]
-#[note]
-pub struct OverlappingRangeEndpoints<'tcx> {
- #[label(mir_build_range)]
- pub range: Span,
- #[subdiagnostic]
- pub overlap: Vec<Overlap<'tcx>>,
-}
-
-pub struct Overlap<'tcx> {
- pub span: Span,
- pub range: Pat<'tcx>,
-}
-
-impl<'tcx> AddToDiagnostic for Overlap<'tcx> {
- fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
- where
- F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
- {
- let Overlap { span, range } = self;
-
- // FIXME(mejrs) unfortunately `#[derive(LintDiagnostic)]`
- // does not support `#[subdiagnostic(eager)]`...
- let message = format!("this range overlaps on `{range}`...");
- diag.span_label(span, message);
- }
-}
-
-#[derive(LintDiagnostic)]
-#[diag(mir_build_non_exhaustive_omitted_pattern)]
-#[help]
-#[note]
-pub(crate) struct NonExhaustiveOmittedPattern<'tcx> {
- pub scrut_ty: Ty<'tcx>,
- #[subdiagnostic]
- pub uncovered: Uncovered<'tcx>,
-}
-
-#[derive(LintDiagnostic)]
-#[diag(mir_build_non_exhaustive_omitted_pattern_lint_on_arm)]
-#[help]
-pub(crate) struct NonExhaustiveOmittedPatternLintOnArm {
- #[label]
- pub lint_span: Span,
- #[suggestion(code = "#[{lint_level}({lint_name})]\n", applicability = "maybe-incorrect")]
- pub suggest_lint_on_match: Option<Span>,
- pub lint_level: &'static str,
- pub lint_name: &'static str,
-}
-
-#[derive(Subdiagnostic)]
-#[label(mir_build_uncovered)]
-pub(crate) struct Uncovered<'tcx> {
- #[primary_span]
- span: Span,
- count: usize,
- witness_1: Pat<'tcx>,
- witness_2: Pat<'tcx>,
- witness_3: Pat<'tcx>,
- remainder: usize,
-}
-
-impl<'tcx> Uncovered<'tcx> {
- pub fn new<'p>(
- span: Span,
- cx: &MatchCheckCtxt<'p, 'tcx>,
- witnesses: Vec<WitnessPat<'tcx>>,
- ) -> Self {
- let witness_1 = witnesses.get(0).unwrap().to_diagnostic_pat(cx);
- Self {
- span,
- count: witnesses.len(),
- // Substitute dummy values if witnesses is smaller than 3. These will never be read.
- witness_2: witnesses
- .get(1)
- .map(|w| w.to_diagnostic_pat(cx))
- .unwrap_or_else(|| witness_1.clone()),
- witness_3: witnesses
- .get(2)
- .map(|w| w.to_diagnostic_pat(cx))
- .unwrap_or_else(|| witness_1.clone()),
- witness_1,
- remainder: witnesses.len().saturating_sub(3),
- }
- }
-}
-
#[derive(Diagnostic)]
#[diag(mir_build_pattern_not_covered, code = "E0005")]
pub(crate) struct PatternNotCovered<'s, 'tcx> {
diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs
index 745c3046d..a776e917d 100644
--- a/compiler/rustc_mir_build/src/lib.rs
+++ b/compiler/rustc_mir_build/src/lib.rs
@@ -1,7 +1,6 @@
//! Construction of MIR from HIR.
//!
//! This crate also contains the match exhaustiveness and usefulness checking.
-#![allow(rustc::potential_query_instability)]
#![feature(assert_matches)]
#![feature(associated_type_bounds)]
#![feature(box_patterns)]
@@ -24,10 +23,7 @@ mod thir;
use rustc_middle::query::Providers;
-use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage};
-use rustc_fluent_macro::fluent_messages;
-
-fluent_messages! { "../messages.ftl" }
+rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
pub fn provide(providers: &mut Providers) {
providers.check_match = thir::pattern::check_match;
diff --git a/compiler/rustc_mir_build/src/lints.rs b/compiler/rustc_mir_build/src/lints.rs
index acf4d6bc2..167b65328 100644
--- a/compiler/rustc_mir_build/src/lints.rs
+++ b/compiler/rustc_mir_build/src/lints.rs
@@ -52,7 +52,7 @@ fn check_recursion<'tcx>(
vis.reachable_recursive_calls.sort();
let sp = tcx.def_span(def_id);
- let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
+ let hir_id = tcx.local_def_id_to_hir_id(def_id);
tcx.emit_spanned_lint(
UNCONDITIONAL_RECURSION,
hir_id,
diff --git a/compiler/rustc_mir_build/src/thir/constant.rs b/compiler/rustc_mir_build/src/thir/constant.rs
index fbb74650f..240aa10ac 100644
--- a/compiler/rustc_mir_build/src/thir/constant.rs
+++ b/compiler/rustc_mir_build/src/thir/constant.rs
@@ -16,7 +16,7 @@ pub(crate) fn lit_to_const<'tcx>(
let width = tcx
.layout_of(param_ty)
.map_err(|_| {
- LitToConstError::Reported(tcx.sess.delay_span_bug(
+ LitToConstError::Reported(tcx.sess.span_delayed_bug(
DUMMY_SP,
format!("couldn't compute width of literal: {:?}", lit_input.lit),
))
@@ -62,7 +62,7 @@ pub(crate) fn lit_to_const<'tcx>(
(ast::LitKind::Float(n, _), ty::Float(fty)) => {
let bits = parse_float_into_scalar(*n, *fty, neg)
.ok_or_else(|| {
- LitToConstError::Reported(tcx.sess.delay_span_bug(
+ LitToConstError::Reported(tcx.sess.span_delayed_bug(
DUMMY_SP,
format!("couldn't parse float literal: {:?}", lit_input.lit),
))
@@ -73,7 +73,7 @@ pub(crate) fn lit_to_const<'tcx>(
(ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()),
(ast::LitKind::Err, _) => {
return Err(LitToConstError::Reported(
- tcx.sess.delay_span_bug(DUMMY_SP, "encountered LitKind::Err during mir build"),
+ tcx.sess.span_delayed_bug(DUMMY_SP, "encountered LitKind::Err during mir build"),
));
}
_ => return Err(LitToConstError::TypeError),
diff --git a/compiler/rustc_mir_build/src/thir/cx/block.rs b/compiler/rustc_mir_build/src/thir/cx/block.rs
index a46ad6423..1e93e126b 100644
--- a/compiler/rustc_mir_build/src/thir/cx/block.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/block.rs
@@ -13,15 +13,12 @@ impl<'tcx> Cx<'tcx> {
// We have to eagerly lower the "spine" of the statements
// in order to get the lexical scoping correctly.
let stmts = self.mirror_stmts(block.hir_id.local_id, block.stmts);
- let opt_destruction_scope =
- self.region_scope_tree.opt_destruction_scope(block.hir_id.local_id);
let block = Block {
targeted_by_break: block.targeted_by_break,
region_scope: region::Scope {
id: block.hir_id.local_id,
data: region::ScopeData::Node,
},
- opt_destruction_scope,
span: block.span,
stmts,
expr: block.expr.map(|expr| self.mirror_expr(expr)),
@@ -49,9 +46,8 @@ impl<'tcx> Cx<'tcx> {
.enumerate()
.filter_map(|(index, stmt)| {
let hir_id = stmt.hir_id;
- let opt_dxn_ext = self.region_scope_tree.opt_destruction_scope(hir_id.local_id);
match stmt.kind {
- hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
+ hir::StmtKind::Expr(expr) | hir::StmtKind::Semi(expr) => {
let stmt = Stmt {
kind: StmtKind::Expr {
scope: region::Scope {
@@ -60,7 +56,6 @@ impl<'tcx> Cx<'tcx> {
},
expr: self.mirror_expr(expr),
},
- opt_destruction_scope: opt_dxn_ext,
};
Some(self.thir.stmts.push(stmt))
}
@@ -68,7 +63,7 @@ impl<'tcx> Cx<'tcx> {
// ignore for purposes of the MIR
None
}
- hir::StmtKind::Local(ref local) => {
+ hir::StmtKind::Local(local) => {
let remainder_scope = region::Scope {
id: block_id,
data: region::ScopeData::Remainder(region::FirstStatementIndex::new(
@@ -122,7 +117,6 @@ impl<'tcx> Cx<'tcx> {
lint_level: LintLevel::Explicit(local.hir_id),
span,
},
- opt_destruction_scope: opt_dxn_ext,
};
Some(self.thir.stmts.push(stmt))
}
diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs
index dfd39b512..ab8846382 100644
--- a/compiler/rustc_mir_build/src/thir/cx/expr.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs
@@ -54,7 +54,7 @@ impl<'tcx> Cx<'tcx> {
trace!(?expr.ty, "after adjustments");
- // Next, wrap this up in the expr's scope.
+ // Finally, wrap this up in the expr's scope.
expr = Expr {
temp_lifetime: expr.temp_lifetime,
ty: expr.ty,
@@ -66,22 +66,6 @@ impl<'tcx> Cx<'tcx> {
},
};
- // Finally, create a destruction scope, if any.
- if let Some(region_scope) =
- self.region_scope_tree.opt_destruction_scope(hir_expr.hir_id.local_id)
- {
- expr = Expr {
- temp_lifetime: expr.temp_lifetime,
- ty: expr.ty,
- span: hir_expr.span,
- kind: ExprKind::Scope {
- region_scope,
- value: self.thir.exprs.push(expr),
- lint_level: LintLevel::Inherited,
- },
- };
- }
-
// OK, all done!
self.thir.exprs.push(expr)
}
@@ -192,10 +176,10 @@ impl<'tcx> Cx<'tcx> {
cast: PointerCoercion::ArrayToPointer,
}
} else if let hir::ExprKind::Path(ref qpath) = source.kind
- && let res = self.typeck_results().qpath_res(qpath, source.hir_id)
- && let ty = self.typeck_results().node_type(source.hir_id)
- && let ty::Adt(adt_def, args) = ty.kind()
- && let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), variant_ctor_id) = res
+ && let res = self.typeck_results().qpath_res(qpath, source.hir_id)
+ && let ty = self.typeck_results().node_type(source.hir_id)
+ && let ty::Adt(adt_def, args) = ty.kind()
+ && let Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Const), variant_ctor_id) = res
{
// Check whether this is casting an enum variant discriminant.
// To prevent cycles, we refer to the discriminant initializer,
@@ -261,7 +245,7 @@ impl<'tcx> Cx<'tcx> {
let kind = match expr.kind {
// Here comes the interesting stuff:
- hir::ExprKind::MethodCall(segment, receiver, ref args, fn_span) => {
+ hir::ExprKind::MethodCall(segment, receiver, args, fn_span) => {
// Rewrite a.b(c) into UFCS form like Trait::b(a, c)
let expr = self.method_callee(expr, segment.ident.span, None);
info!("Using method span: {:?}", expr.span);
@@ -278,7 +262,7 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::Call(ref fun, ref args) => {
+ hir::ExprKind::Call(fun, ref args) => {
if self.typeck_results().is_method_call(expr) {
// The callee is something implementing Fn, FnMut, or FnOnce.
// Find the actual method implementation being called and
@@ -346,7 +330,7 @@ impl<'tcx> Cx<'tcx> {
&& let Some(adt_def) = expr_ty.ty_adt_def()
{
match qpath {
- hir::QPath::Resolved(_, ref path) => match path.res {
+ hir::QPath::Resolved(_, path) => match path.res {
Res::Def(DefKind::Ctor(_, CtorKind::Fn), ctor_id) => {
Some((adt_def, adt_def.variant_index_with_ctor_id(ctor_id)))
}
@@ -407,21 +391,21 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::AddrOf(hir::BorrowKind::Ref, mutbl, ref arg) => {
+ hir::ExprKind::AddrOf(hir::BorrowKind::Ref, mutbl, arg) => {
ExprKind::Borrow { borrow_kind: mutbl.to_borrow_kind(), arg: self.mirror_expr(arg) }
}
- hir::ExprKind::AddrOf(hir::BorrowKind::Raw, mutability, ref arg) => {
+ hir::ExprKind::AddrOf(hir::BorrowKind::Raw, mutability, arg) => {
ExprKind::AddressOf { mutability, arg: self.mirror_expr(arg) }
}
- hir::ExprKind::Block(ref blk, _) => ExprKind::Block { block: self.mirror_block(blk) },
+ hir::ExprKind::Block(blk, _) => ExprKind::Block { block: self.mirror_block(blk) },
- hir::ExprKind::Assign(ref lhs, ref rhs, _) => {
+ hir::ExprKind::Assign(lhs, rhs, _) => {
ExprKind::Assign { lhs: self.mirror_expr(lhs), rhs: self.mirror_expr(rhs) }
}
- hir::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
+ hir::ExprKind::AssignOp(op, lhs, rhs) => {
if self.typeck_results().is_method_call(expr) {
let lhs = self.mirror_expr(lhs);
let rhs = self.mirror_expr(rhs);
@@ -435,9 +419,9 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::Lit(ref lit) => ExprKind::Literal { lit, neg: false },
+ hir::ExprKind::Lit(lit) => ExprKind::Literal { lit, neg: false },
- hir::ExprKind::Binary(op, ref lhs, ref rhs) => {
+ hir::ExprKind::Binary(op, lhs, rhs) => {
if self.typeck_results().is_method_call(expr) {
let lhs = self.mirror_expr(lhs);
let rhs = self.mirror_expr(rhs);
@@ -466,7 +450,7 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::Index(ref lhs, ref index, brackets_span) => {
+ hir::ExprKind::Index(lhs, index, brackets_span) => {
if self.typeck_results().is_method_call(expr) {
let lhs = self.mirror_expr(lhs);
let index = self.mirror_expr(index);
@@ -482,7 +466,7 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::Unary(hir::UnOp::Deref, ref arg) => {
+ hir::ExprKind::Unary(hir::UnOp::Deref, arg) => {
if self.typeck_results().is_method_call(expr) {
let arg = self.mirror_expr(arg);
self.overloaded_place(expr, expr_ty, None, Box::new([arg]), expr.span)
@@ -491,7 +475,7 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::Unary(hir::UnOp::Not, ref arg) => {
+ hir::ExprKind::Unary(hir::UnOp::Not, arg) => {
if self.typeck_results().is_method_call(expr) {
let arg = self.mirror_expr(arg);
self.overloaded_operator(expr, Box::new([arg]))
@@ -500,18 +484,18 @@ impl<'tcx> Cx<'tcx> {
}
}
- hir::ExprKind::Unary(hir::UnOp::Neg, ref arg) => {
+ hir::ExprKind::Unary(hir::UnOp::Neg, arg) => {
if self.typeck_results().is_method_call(expr) {
let arg = self.mirror_expr(arg);
self.overloaded_operator(expr, Box::new([arg]))
- } else if let hir::ExprKind::Lit(ref lit) = arg.kind {
+ } else if let hir::ExprKind::Lit(lit) = arg.kind {
ExprKind::Literal { lit, neg: true }
} else {
ExprKind::Unary { op: UnOp::Neg, arg: self.mirror_expr(arg) }
}
}
- hir::ExprKind::Struct(ref qpath, ref fields, ref base) => match expr_ty.kind() {
+ hir::ExprKind::Struct(qpath, fields, ref base) => match expr_ty.kind() {
ty::Adt(adt, args) => match adt.adt_kind() {
AdtKind::Struct | AdtKind::Union => {
let user_provided_types = self.typeck_results().user_provided_types();
@@ -614,13 +598,13 @@ impl<'tcx> Cx<'tcx> {
self.convert_path_expr(expr, res)
}
- hir::ExprKind::InlineAsm(ref asm) => ExprKind::InlineAsm(Box::new(InlineAsmExpr {
+ hir::ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(Box::new(InlineAsmExpr {
template: asm.template,
operands: asm
.operands
.iter()
.map(|(op, _op_sp)| match *op {
- hir::InlineAsmOperand::In { reg, ref expr } => {
+ hir::InlineAsmOperand::In { reg, expr } => {
InlineAsmOperand::In { reg, expr: self.mirror_expr(expr) }
}
hir::InlineAsmOperand::Out { reg, late, ref expr } => {
@@ -630,30 +614,35 @@ impl<'tcx> Cx<'tcx> {
expr: expr.map(|expr| self.mirror_expr(expr)),
}
}
- hir::InlineAsmOperand::InOut { reg, late, ref expr } => {
+ hir::InlineAsmOperand::InOut { reg, late, expr } => {
InlineAsmOperand::InOut { reg, late, expr: self.mirror_expr(expr) }
}
- hir::InlineAsmOperand::SplitInOut {
- reg,
- late,
- ref in_expr,
- ref out_expr,
- } => InlineAsmOperand::SplitInOut {
- reg,
- late,
- in_expr: self.mirror_expr(in_expr),
- out_expr: out_expr.map(|expr| self.mirror_expr(expr)),
- },
+ hir::InlineAsmOperand::SplitInOut { reg, late, in_expr, ref out_expr } => {
+ InlineAsmOperand::SplitInOut {
+ reg,
+ late,
+ in_expr: self.mirror_expr(in_expr),
+ out_expr: out_expr.map(|expr| self.mirror_expr(expr)),
+ }
+ }
hir::InlineAsmOperand::Const { ref anon_const } => {
- let value =
- mir::Const::from_anon_const(tcx, anon_const.def_id, self.param_env);
+ let value = mir::Const::identity_unevaluated(
+ tcx,
+ anon_const.def_id.to_def_id(),
+ )
+ .instantiate_identity()
+ .normalize(tcx, self.param_env);
let span = tcx.def_span(anon_const.def_id);
InlineAsmOperand::Const { value, span }
}
hir::InlineAsmOperand::SymFn { ref anon_const } => {
- let value =
- mir::Const::from_anon_const(tcx, anon_const.def_id, self.param_env);
+ let value = mir::Const::identity_unevaluated(
+ tcx,
+ anon_const.def_id.to_def_id(),
+ )
+ .instantiate_identity()
+ .normalize(tcx, self.param_env);
let span = tcx.def_span(anon_const.def_id);
InlineAsmOperand::SymFn { value, span }
@@ -686,7 +675,7 @@ impl<'tcx> Cx<'tcx> {
ExprKind::ConstBlock { did, args }
}
// Now comes the rote stuff:
- hir::ExprKind::Repeat(ref v, _) => {
+ hir::ExprKind::Repeat(v, _) => {
let ty = self.typeck_results().expr_ty(expr);
let ty::Array(_, count) = ty.kind() else {
span_bug!(expr.span, "unexpected repeat expr ty: {:?}", ty);
@@ -722,12 +711,12 @@ impl<'tcx> Cx<'tcx> {
then: self.mirror_expr(then),
else_opt: else_opt.map(|el| self.mirror_expr(el)),
},
- hir::ExprKind::Match(ref discr, ref arms, _) => ExprKind::Match {
+ hir::ExprKind::Match(discr, arms, _) => ExprKind::Match {
scrutinee: self.mirror_expr(discr),
scrutinee_hir_id: discr.hir_id,
arms: arms.iter().map(|a| self.convert_arm(a)).collect(),
},
- hir::ExprKind::Loop(ref body, ..) => {
+ hir::ExprKind::Loop(body, ..) => {
let block_ty = self.typeck_results().node_type(body.hir_id);
let temp_lifetime = self
.rvalue_scopes
@@ -741,12 +730,12 @@ impl<'tcx> Cx<'tcx> {
});
ExprKind::Loop { body }
}
- hir::ExprKind::Field(ref source, ..) => ExprKind::Field {
+ hir::ExprKind::Field(source, ..) => ExprKind::Field {
lhs: self.mirror_expr(source),
variant_index: FIRST_VARIANT,
name: self.typeck_results.field_index(expr.hir_id),
},
- hir::ExprKind::Cast(ref source, ref cast_ty) => {
+ hir::ExprKind::Cast(source, cast_ty) => {
// Check for a user-given type annotation on this `cast`
let user_provided_types = self.typeck_results.user_provided_types();
let user_ty = user_provided_types.get(cast_ty.hir_id);
@@ -756,7 +745,7 @@ impl<'tcx> Cx<'tcx> {
expr, cast_ty.hir_id, user_ty,
);
- let cast = self.mirror_expr_cast(*source, temp_lifetime, expr.span);
+ let cast = self.mirror_expr_cast(source, temp_lifetime, expr.span);
if let Some(user_ty) = user_ty {
// NOTE: Creating a new Expr and wrapping a Cast inside of it may be
@@ -777,7 +766,7 @@ impl<'tcx> Cx<'tcx> {
cast
}
}
- hir::ExprKind::Type(ref source, ref ty) => {
+ hir::ExprKind::Type(source, ty) => {
let user_provided_types = self.typeck_results.user_provided_types();
let user_ty = user_provided_types.get(ty.hir_id).copied().map(Box::new);
debug!("make_mirror_unadjusted: (type) user_ty={:?}", user_ty);
@@ -788,16 +777,12 @@ impl<'tcx> Cx<'tcx> {
ExprKind::ValueTypeAscription { source: mirrored, user_ty }
}
}
- hir::ExprKind::DropTemps(ref source) => {
- ExprKind::Use { source: self.mirror_expr(source) }
- }
- hir::ExprKind::Array(ref fields) => {
- ExprKind::Array { fields: self.mirror_exprs(fields) }
- }
- hir::ExprKind::Tup(ref fields) => ExprKind::Tuple { fields: self.mirror_exprs(fields) },
+ hir::ExprKind::DropTemps(source) => ExprKind::Use { source: self.mirror_expr(source) },
+ hir::ExprKind::Array(fields) => ExprKind::Array { fields: self.mirror_exprs(fields) },
+ hir::ExprKind::Tup(fields) => ExprKind::Tuple { fields: self.mirror_exprs(fields) },
- hir::ExprKind::Yield(ref v, _) => ExprKind::Yield { value: self.mirror_expr(v) },
- hir::ExprKind::Err(_) => unreachable!(),
+ hir::ExprKind::Yield(v, _) => ExprKind::Yield { value: self.mirror_expr(v) },
+ hir::ExprKind::Err(_) => unreachable!("cannot lower a `hir::ExprKind::Err` to THIR"),
};
Expr { temp_lifetime, ty: expr_ty, span: expr.span, kind }
@@ -870,10 +855,10 @@ impl<'tcx> Cx<'tcx> {
fn convert_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) -> ArmId {
let arm = Arm {
- pattern: self.pattern_from_hir(&arm.pat),
+ pattern: self.pattern_from_hir(arm.pat),
guard: arm.guard.as_ref().map(|g| match g {
- hir::Guard::If(ref e) => Guard::If(self.mirror_expr(e)),
- hir::Guard::IfLet(ref l) => {
+ hir::Guard::If(e) => Guard::If(self.mirror_expr(e)),
+ hir::Guard::IfLet(l) => {
Guard::IfLet(self.pattern_from_hir(l.pat), self.mirror_expr(l.init))
}
}),
@@ -898,7 +883,7 @@ impl<'tcx> Cx<'tcx> {
}
Res::Def(DefKind::ConstParam, def_id) => {
- let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
+ let hir_id = self.tcx.local_def_id_to_hir_id(def_id.expect_local());
let generics = self.tcx.generics_of(hir_id.owner);
let index = generics.param_def_id_to_index[&def_id];
let name = self.tcx.hir().name(hir_id);
diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs
index b6adb383f..fa3dd2afa 100644
--- a/compiler/rustc_mir_build/src/thir/cx/mod.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs
@@ -27,17 +27,17 @@ pub(crate) fn thir_body(
if let Some(reported) = cx.typeck_results.tainted_by_errors {
return Err(reported);
}
- let expr = cx.mirror_expr(&body.value);
+ let expr = cx.mirror_expr(body.value);
- let owner_id = hir.local_def_id_to_hir_id(owner_def);
- if let Some(ref fn_decl) = hir.fn_decl_by_hir_id(owner_id) {
+ let owner_id = tcx.local_def_id_to_hir_id(owner_def);
+ if let Some(fn_decl) = hir.fn_decl_by_hir_id(owner_id) {
let closure_env_param = cx.closure_env_param(owner_def, owner_id);
let explicit_params = cx.explicit_params(owner_id, fn_decl, body);
cx.thir.params = closure_env_param.into_iter().chain(explicit_params).collect();
// The resume argument may be missing, in that case we need to provide it here.
// It will always be `()` in this case.
- if tcx.def_kind(owner_def) == DefKind::Coroutine && body.params.is_empty() {
+ if tcx.is_coroutine(owner_def.to_def_id()) && body.params.is_empty() {
cx.thir.params.push(Param {
ty: Ty::new_unit(tcx),
pat: None,
@@ -72,7 +72,7 @@ impl<'tcx> Cx<'tcx> {
fn new(tcx: TyCtxt<'tcx>, def: LocalDefId) -> Cx<'tcx> {
let typeck_results = tcx.typeck(def);
let hir = tcx.hir();
- let hir_id = hir.local_def_id_to_hir_id(def);
+ let hir_id = tcx.local_def_id_to_hir_id(def);
let body_type = if hir.body_owner_kind(def).is_fn_or_closure() {
// fetch the fully liberated fn signature (that is, all bound
@@ -110,7 +110,7 @@ impl<'tcx> Cx<'tcx> {
#[instrument(level = "debug", skip(self))]
fn pattern_from_hir(&mut self, p: &hir::Pat<'_>) -> Box<Pat<'tcx>> {
- let p = match self.tcx.hir().get(p.hir_id) {
+ let p = match self.tcx.hir_node(p.hir_id) {
Node::Pat(p) => p,
node => bug!("pattern became {:?}", node),
};
@@ -119,6 +119,17 @@ impl<'tcx> Cx<'tcx> {
fn closure_env_param(&self, owner_def: LocalDefId, owner_id: HirId) -> Option<Param<'tcx>> {
match self.tcx.def_kind(owner_def) {
+ DefKind::Closure if self.tcx.is_coroutine(owner_def.to_def_id()) => {
+ let coroutine_ty = self.typeck_results.node_type(owner_id);
+ let coroutine_param = Param {
+ ty: coroutine_ty,
+ pat: None,
+ ty_span: None,
+ self_kind: None,
+ hir_id: None,
+ };
+ Some(coroutine_param)
+ }
DefKind::Closure => {
let closure_ty = self.typeck_results.node_type(owner_id);
@@ -132,10 +143,10 @@ impl<'tcx> Cx<'tcx> {
var: ty::BoundVar::from_usize(bound_vars.len() - 1),
kind: ty::BrEnv,
};
- let env_region = ty::Region::new_late_bound(self.tcx, ty::INNERMOST, br);
+ let env_region = ty::Region::new_bound(self.tcx, ty::INNERMOST, br);
let closure_env_ty =
self.tcx.closure_env_ty(closure_def_id, closure_args, env_region).unwrap();
- let liberated_closure_env_ty = self.tcx.erase_late_bound_regions(
+ let liberated_closure_env_ty = self.tcx.instantiate_bound_regions_with_erased(
ty::Binder::bind_with_vars(closure_env_ty, bound_vars),
);
let env_param = Param {
@@ -148,17 +159,6 @@ impl<'tcx> Cx<'tcx> {
Some(env_param)
}
- DefKind::Coroutine => {
- let coroutine_ty = self.typeck_results.node_type(owner_id);
- let coroutine_param = Param {
- ty: coroutine_ty,
- pat: None,
- ty_span: None,
- self_kind: None,
- hir_id: None,
- };
- Some(coroutine_param)
- }
_ => None,
}
}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
index 8c3d09c19..c435f4023 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
@@ -1,20 +1,22 @@
-use super::deconstruct_pat::{Constructor, DeconstructedPat, WitnessPat};
-use super::usefulness::{
- compute_match_usefulness, MatchArm, MatchCheckCtxt, Reachability, UsefulnessReport,
+use rustc_pattern_analysis::errors::Uncovered;
+use rustc_pattern_analysis::rustc::{
+ Constructor, DeconstructedPat, RustcMatchCheckCtxt as MatchCheckCtxt, Usefulness,
+ UsefulnessReport, WitnessPat,
};
+use rustc_pattern_analysis::{analyze_match, MatchArm};
use crate::errors::*;
-use rustc_arena::TypedArena;
+use rustc_arena::{DroplessArena, TypedArena};
use rustc_ast::Mutability;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def::*;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::HirId;
-use rustc_middle::thir::visit::{self, Visitor};
+use rustc_middle::thir::visit::Visitor;
use rustc_middle::thir::*;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
@@ -29,20 +31,22 @@ pub(crate) fn check_match(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(), Err
let (thir, expr) = tcx.thir_body(def_id)?;
let thir = thir.borrow();
let pattern_arena = TypedArena::default();
+ let dropless_arena = DroplessArena::default();
let mut visitor = MatchVisitor {
tcx,
thir: &*thir,
param_env: tcx.param_env(def_id),
- lint_level: tcx.hir().local_def_id_to_hir_id(def_id),
+ lint_level: tcx.local_def_id_to_hir_id(def_id),
let_source: LetSource::None,
pattern_arena: &pattern_arena,
+ dropless_arena: &dropless_arena,
error: Ok(()),
};
visitor.visit_expr(&thir[expr]);
for param in thir.params.iter() {
if let Some(box ref pattern) = param.pat {
- visitor.check_binding_is_irrefutable(pattern, "function argument", None);
+ visitor.check_binding_is_irrefutable(pattern, "function argument", None, None);
}
}
visitor.error
@@ -80,6 +84,7 @@ struct MatchVisitor<'thir, 'p, 'tcx> {
lint_level: HirId,
let_source: LetSource,
pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
+ dropless_arena: &'p DroplessArena,
/// Tracks if we encountered an error while checking this body. That the first function to
/// report it stores it here. Some functions return `Result` to allow callers to short-circuit
/// on error, but callers don't need to store it here again.
@@ -254,10 +259,11 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
self.with_lint_level(lint_level, |this| this.visit_land_rhs(&this.thir[value]))
}
ExprKind::Let { box ref pat, expr } => {
+ let expr = &self.thir()[expr];
self.with_let_source(LetSource::None, |this| {
- this.visit_expr(&this.thir()[expr]);
+ this.visit_expr(expr);
});
- Ok(Some((ex.span, self.is_let_irrefutable(pat)?)))
+ Ok(Some((ex.span, self.is_let_irrefutable(pat, Some(expr))?)))
}
_ => {
self.with_let_source(LetSource::None, |this| {
@@ -279,38 +285,123 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
} else {
// Check the pattern for some things unrelated to exhaustiveness.
let refutable = if cx.refutable { Refutable } else { Irrefutable };
- pat.walk_always(|pat| check_borrow_conflicts_in_at_patterns(self, pat));
- pat.walk_always(|pat| check_for_bindings_named_same_as_variants(self, pat, refutable));
- Ok(cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, pat)))
+ pat.walk_always(|pat| {
+ check_borrow_conflicts_in_at_patterns(self, pat);
+ check_for_bindings_named_same_as_variants(self, pat, refutable);
+ });
+ Ok(cx.pattern_arena.alloc(cx.lower_pat(pat)))
+ }
+ }
+
+ /// Inspects the match scrutinee expression to determine whether the place it evaluates to may
+ /// hold invalid data.
+ fn is_known_valid_scrutinee(&self, scrutinee: &Expr<'tcx>) -> bool {
+ use ExprKind::*;
+ match &scrutinee.kind {
+ // Both pointers and references can validly point to a place with invalid data.
+ Deref { .. } => false,
+ // Inherit validity of the parent place, unless the parent is an union.
+ Field { lhs, .. } => {
+ let lhs = &self.thir()[*lhs];
+ match lhs.ty.kind() {
+ ty::Adt(def, _) if def.is_union() => false,
+ _ => self.is_known_valid_scrutinee(lhs),
+ }
+ }
+ // Essentially a field access.
+ Index { lhs, .. } => {
+ let lhs = &self.thir()[*lhs];
+ self.is_known_valid_scrutinee(lhs)
+ }
+
+ // No-op.
+ Scope { value, .. } => self.is_known_valid_scrutinee(&self.thir()[*value]),
+
+ // Casts don't cause a load.
+ NeverToAny { source }
+ | Cast { source }
+ | Use { source }
+ | PointerCoercion { source, .. }
+ | PlaceTypeAscription { source, .. }
+ | ValueTypeAscription { source, .. } => {
+ self.is_known_valid_scrutinee(&self.thir()[*source])
+ }
+
+ // These diverge.
+ Become { .. } | Break { .. } | Continue { .. } | Return { .. } => true,
+
+ // These are statements that evaluate to `()`.
+ Assign { .. } | AssignOp { .. } | InlineAsm { .. } | Let { .. } => true,
+
+ // These evaluate to a value.
+ AddressOf { .. }
+ | Adt { .. }
+ | Array { .. }
+ | Binary { .. }
+ | Block { .. }
+ | Borrow { .. }
+ | Box { .. }
+ | Call { .. }
+ | Closure { .. }
+ | ConstBlock { .. }
+ | ConstParam { .. }
+ | If { .. }
+ | Literal { .. }
+ | LogicalOp { .. }
+ | Loop { .. }
+ | Match { .. }
+ | NamedConst { .. }
+ | NonHirLiteral { .. }
+ | OffsetOf { .. }
+ | Repeat { .. }
+ | StaticRef { .. }
+ | ThreadLocalRef { .. }
+ | Tuple { .. }
+ | Unary { .. }
+ | UpvarRef { .. }
+ | VarRef { .. }
+ | ZstLiteral { .. }
+ | Yield { .. } => true,
}
}
fn new_cx(
&self,
refutability: RefutableFlag,
- match_span: Option<Span>,
+ whole_match_span: Option<Span>,
+ scrutinee: Option<&Expr<'tcx>>,
+ scrut_span: Span,
) -> MatchCheckCtxt<'p, 'tcx> {
let refutable = match refutability {
Irrefutable => false,
Refutable => true,
};
+ // If we don't have a scrutinee we're either a function parameter or a `let x;`. Both cases
+ // require validity.
+ let known_valid_scrutinee =
+ scrutinee.map(|scrut| self.is_known_valid_scrutinee(scrut)).unwrap_or(true);
MatchCheckCtxt {
tcx: self.tcx,
param_env: self.param_env,
module: self.tcx.parent_module(self.lint_level).to_def_id(),
- pattern_arena: &self.pattern_arena,
- match_span,
+ pattern_arena: self.pattern_arena,
+ dropless_arena: self.dropless_arena,
+ match_lint_level: self.lint_level,
+ whole_match_span,
+ scrut_span,
refutable,
+ known_valid_scrutinee,
}
}
#[instrument(level = "trace", skip(self))]
fn check_let(&mut self, pat: &Pat<'tcx>, scrutinee: Option<ExprId>, span: Span) {
assert!(self.let_source != LetSource::None);
+ let scrut = scrutinee.map(|id| &self.thir[id]);
if let LetSource::PlainLet = self.let_source {
- self.check_binding_is_irrefutable(pat, "local binding", Some(span))
+ self.check_binding_is_irrefutable(pat, "local binding", scrut, Some(span))
} else {
- let Ok(refutability) = self.is_let_irrefutable(pat) else { return };
+ let Ok(refutability) = self.is_let_irrefutable(pat, scrut) else { return };
if matches!(refutability, Irrefutable) {
report_irrefutable_let_patterns(
self.tcx,
@@ -330,14 +421,16 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
source: hir::MatchSource,
expr_span: Span,
) {
- let cx = self.new_cx(Refutable, Some(expr_span));
+ let scrut = &self.thir[scrut];
+ let cx = self.new_cx(Refutable, Some(expr_span), Some(scrut), scrut.span);
let mut tarms = Vec::with_capacity(arms.len());
for &arm in arms {
let arm = &self.thir.arms[arm];
let got_error = self.with_lint_level(arm.lint_level, |this| {
let Ok(pat) = this.lower_pattern(&cx, &arm.pattern) else { return true };
- let arm = MatchArm { pat, hir_id: this.lint_level, has_guard: arm.guard.is_some() };
+ let arm =
+ MatchArm { pat, arm_data: this.lint_level, has_guard: arm.guard.is_some() };
tarms.push(arm);
false
});
@@ -346,9 +439,8 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
}
}
- let scrut = &self.thir[scrut];
let scrut_ty = scrut.ty;
- let report = compute_match_usefulness(&cx, &tarms, self.lint_level, scrut_ty, scrut.span);
+ let report = analyze_match(&cx, &tarms, scrut_ty);
match source {
// Don't report arm reachability of desugared `match $iter.into_iter() { iter => .. }`
@@ -372,7 +464,12 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
debug_assert_eq!(pat.span.desugaring_kind(), Some(DesugaringKind::ForLoop));
let PatKind::Variant { ref subpatterns, .. } = pat.kind else { bug!() };
let [pat_field] = &subpatterns[..] else { bug!() };
- self.check_binding_is_irrefutable(&pat_field.pattern, "`for` loop binding", None);
+ self.check_binding_is_irrefutable(
+ &pat_field.pattern,
+ "`for` loop binding",
+ None,
+ None,
+ );
} else {
self.error = Err(report_non_exhaustive_match(
&cx, self.thir, scrut_ty, scrut.span, witnesses, arms, expr_span,
@@ -452,16 +549,21 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
&mut self,
pat: &Pat<'tcx>,
refutability: RefutableFlag,
+ scrut: Option<&Expr<'tcx>>,
) -> Result<(MatchCheckCtxt<'p, 'tcx>, UsefulnessReport<'p, 'tcx>), ErrorGuaranteed> {
- let cx = self.new_cx(refutability, None);
+ let cx = self.new_cx(refutability, None, scrut, pat.span);
let pat = self.lower_pattern(&cx, pat)?;
- let arms = [MatchArm { pat, hir_id: self.lint_level, has_guard: false }];
- let report = compute_match_usefulness(&cx, &arms, self.lint_level, pat.ty(), pat.span());
+ let arms = [MatchArm { pat, arm_data: self.lint_level, has_guard: false }];
+ let report = analyze_match(&cx, &arms, pat.ty());
Ok((cx, report))
}
- fn is_let_irrefutable(&mut self, pat: &Pat<'tcx>) -> Result<RefutableFlag, ErrorGuaranteed> {
- let (cx, report) = self.analyze_binding(pat, Refutable)?;
+ fn is_let_irrefutable(
+ &mut self,
+ pat: &Pat<'tcx>,
+ scrut: Option<&Expr<'tcx>>,
+ ) -> Result<RefutableFlag, ErrorGuaranteed> {
+ let (cx, report) = self.analyze_binding(pat, Refutable, scrut)?;
// Report if the pattern is unreachable, which can only occur when the type is uninhabited.
// This also reports unreachable sub-patterns.
report_arm_reachability(&cx, &report);
@@ -471,10 +573,16 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
}
#[instrument(level = "trace", skip(self))]
- fn check_binding_is_irrefutable(&mut self, pat: &Pat<'tcx>, origin: &str, sp: Option<Span>) {
+ fn check_binding_is_irrefutable(
+ &mut self,
+ pat: &Pat<'tcx>,
+ origin: &str,
+ scrut: Option<&Expr<'tcx>>,
+ sp: Option<Span>,
+ ) {
let pattern_ty = pat.ty;
- let Ok((cx, report)) = self.analyze_binding(pat, Irrefutable) else { return };
+ let Ok((cx, report)) = self.analyze_binding(pat, Irrefutable, scrut) else { return };
let witnesses = report.non_exhaustiveness_witnesses;
if witnesses.is_empty() {
// The pattern is irrefutable.
@@ -744,34 +852,34 @@ fn report_arm_reachability<'p, 'tcx>(
);
};
- use Reachability::*;
let mut catchall = None;
for (arm, is_useful) in report.arm_usefulness.iter() {
match is_useful {
- Unreachable => report_unreachable_pattern(arm.pat.span(), arm.hir_id, catchall),
- Reachable(unreachables) if unreachables.is_empty() => {}
- // The arm is reachable, but contains unreachable subpatterns (from or-patterns).
- Reachable(unreachables) => {
- let mut unreachables = unreachables.clone();
+ Usefulness::Redundant => {
+ report_unreachable_pattern(*arm.pat.data(), arm.arm_data, catchall)
+ }
+ Usefulness::Useful(redundant_subpats) if redundant_subpats.is_empty() => {}
+ // The arm is reachable, but contains redundant subpatterns (from or-patterns).
+ Usefulness::Useful(redundant_subpats) => {
+ let mut redundant_subpats = redundant_subpats.clone();
// Emit lints in the order in which they occur in the file.
- unreachables.sort_unstable();
- for span in unreachables {
- report_unreachable_pattern(span, arm.hir_id, None);
+ redundant_subpats.sort_unstable_by_key(|pat| pat.data());
+ for pat in redundant_subpats {
+ report_unreachable_pattern(*pat.data(), arm.arm_data, None);
}
}
}
if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) {
- catchall = Some(arm.pat.span());
+ catchall = Some(*arm.pat.data());
}
}
}
/// Checks for common cases of "catchall" patterns that may not be intended as such.
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
- use Constructor::*;
match pat.ctor() {
- Wildcard => true,
- Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
+ Constructor::Wildcard => true,
+ Constructor::Struct | Constructor::Ref => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
_ => false,
}
}
@@ -782,7 +890,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
thir: &Thir<'tcx>,
scrut_ty: Ty<'tcx>,
sp: Span,
- witnesses: Vec<WitnessPat<'tcx>>,
+ witnesses: Vec<WitnessPat<'p, 'tcx>>,
arms: &[ArmId],
expr_span: Span,
) -> ErrorGuaranteed {
@@ -823,7 +931,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
pattern = if witnesses.len() < 4 {
witnesses
.iter()
- .map(|witness| witness.to_diagnostic_pat(cx).to_string())
+ .map(|witness| cx.hoist_witness_pat(witness).to_string())
.collect::<Vec<String>>()
.join(" | ")
} else {
@@ -845,9 +953,9 @@ fn report_non_exhaustive_match<'p, 'tcx>(
err.note(format!("the matched value is of type `{}`", scrut_ty));
if !is_empty_match {
- let mut non_exhaustive_tys = FxHashSet::default();
+ let mut non_exhaustive_tys = FxIndexSet::default();
// Look at the first witness.
- collect_non_exhaustive_tys(cx.tcx, &witnesses[0], &mut non_exhaustive_tys);
+ collect_non_exhaustive_tys(cx, &witnesses[0], &mut non_exhaustive_tys);
for ty in non_exhaustive_tys {
if ty.is_ptr_sized_integral() {
@@ -862,12 +970,6 @@ fn report_non_exhaustive_match<'p, 'tcx>(
exhaustively",
));
}
- if cx.tcx.sess.is_nightly_build() {
- err.help(format!(
- "add `#![feature(precise_pointer_size_matching)]` to the crate attributes to \
- enable precise `{ty}` matching",
- ));
- }
} else if ty == cx.tcx.types.str_ {
err.note("`&str` cannot be matched exhaustively, so a wildcard `_` is necessary");
} else if cx.is_foreign_non_exhaustive_enum(ty) {
@@ -985,16 +1087,16 @@ fn report_non_exhaustive_match<'p, 'tcx>(
fn joined_uncovered_patterns<'p, 'tcx>(
cx: &MatchCheckCtxt<'p, 'tcx>,
- witnesses: &[WitnessPat<'tcx>],
+ witnesses: &[WitnessPat<'p, 'tcx>],
) -> String {
const LIMIT: usize = 3;
- let pat_to_str = |pat: &WitnessPat<'tcx>| pat.to_diagnostic_pat(cx).to_string();
+ let pat_to_str = |pat: &WitnessPat<'p, 'tcx>| cx.hoist_witness_pat(pat).to_string();
match witnesses {
[] => bug!(),
- [witness] => format!("`{}`", witness.to_diagnostic_pat(cx)),
+ [witness] => format!("`{}`", cx.hoist_witness_pat(witness)),
[head @ .., tail] if head.len() < LIMIT => {
let head: Vec<_> = head.iter().map(pat_to_str).collect();
- format!("`{}` and `{}`", head.join("`, `"), tail.to_diagnostic_pat(cx))
+ format!("`{}` and `{}`", head.join("`, `"), cx.hoist_witness_pat(tail))
}
_ => {
let (head, tail) = witnesses.split_at(LIMIT);
@@ -1005,27 +1107,27 @@ fn joined_uncovered_patterns<'p, 'tcx>(
}
fn collect_non_exhaustive_tys<'tcx>(
- tcx: TyCtxt<'tcx>,
- pat: &WitnessPat<'tcx>,
- non_exhaustive_tys: &mut FxHashSet<Ty<'tcx>>,
+ cx: &MatchCheckCtxt<'_, 'tcx>,
+ pat: &WitnessPat<'_, 'tcx>,
+ non_exhaustive_tys: &mut FxIndexSet<Ty<'tcx>>,
) {
if matches!(pat.ctor(), Constructor::NonExhaustive) {
non_exhaustive_tys.insert(pat.ty());
}
if let Constructor::IntRange(range) = pat.ctor() {
- if range.is_beyond_boundaries(pat.ty(), tcx) {
+ if cx.is_range_beyond_boundaries(range, pat.ty()) {
// The range denotes the values before `isize::MIN` or the values after `usize::MAX`/`isize::MAX`.
non_exhaustive_tys.insert(pat.ty());
}
}
pat.iter_fields()
- .for_each(|field_pat| collect_non_exhaustive_tys(tcx, field_pat, non_exhaustive_tys))
+ .for_each(|field_pat| collect_non_exhaustive_tys(cx, field_pat, non_exhaustive_tys))
}
fn report_adt_defined_here<'tcx>(
tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>,
- witnesses: &[WitnessPat<'tcx>],
+ witnesses: &[WitnessPat<'_, 'tcx>],
point_at_non_local_ty: bool,
) -> Option<AdtDefinedHere<'tcx>> {
let ty = ty.peel_refs();
@@ -1047,15 +1149,14 @@ fn report_adt_defined_here<'tcx>(
Some(AdtDefinedHere { adt_def_span, ty, variants })
}
-fn maybe_point_at_variant<'a, 'tcx: 'a>(
+fn maybe_point_at_variant<'a, 'p: 'a, 'tcx: 'p>(
tcx: TyCtxt<'tcx>,
def: AdtDef<'tcx>,
- patterns: impl Iterator<Item = &'a WitnessPat<'tcx>>,
+ patterns: impl Iterator<Item = &'a WitnessPat<'p, 'tcx>>,
) -> Vec<Span> {
- use Constructor::*;
let mut covered = vec![];
for pattern in patterns {
- if let Variant(variant_index) = pattern.ctor() {
+ if let Constructor::Variant(variant_index) = pattern.ctor() {
if let ty::Adt(this_def, _) = pattern.ty().kind()
&& this_def.did() != def.did()
{
diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
index 48a590f5d..a7f237721 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
@@ -200,7 +200,9 @@ impl<'tcx> ConstToPat<'tcx> {
// We errored. Signal that in the pattern, so that follow up errors can be silenced.
let kind = PatKind::Error(e);
return Box::new(Pat { span: self.span, ty: cv.ty(), kind });
- } else if let ty::Adt(..) = cv.ty().kind() && matches!(cv, mir::Const::Val(..)) {
+ } else if let ty::Adt(..) = cv.ty().kind()
+ && matches!(cv, mir::Const::Val(..))
+ {
// This branch is only entered when the current `cv` is `mir::Const::Val`.
// This is because `mir::Const::ty` has already been handled by `Self::recur`
// and the invalid types may be ignored.
@@ -256,18 +258,26 @@ impl<'tcx> ConstToPat<'tcx> {
#[instrument(level = "trace", skip(self), ret)]
fn type_has_partial_eq_impl(&self, ty: Ty<'tcx>) -> bool {
+ let tcx = self.tcx();
// double-check there even *is* a semantic `PartialEq` to dispatch to.
//
// (If there isn't, then we can safely issue a hard
// error, because that's never worked, due to compiler
// using `PartialEq::eq` in this scenario in the past.)
- let partial_eq_trait_id =
- self.tcx().require_lang_item(hir::LangItem::PartialEq, Some(self.span));
+ let partial_eq_trait_id = tcx.require_lang_item(hir::LangItem::PartialEq, Some(self.span));
let partial_eq_obligation = Obligation::new(
- self.tcx(),
+ tcx,
ObligationCause::dummy(),
self.param_env,
- ty::TraitRef::new(self.tcx(), partial_eq_trait_id, [ty, ty]),
+ ty::TraitRef::new(
+ tcx,
+ partial_eq_trait_id,
+ tcx.with_opt_host_effect_param(
+ tcx.hir().enclosing_body_owner(self.id),
+ partial_eq_trait_id,
+ [ty, ty],
+ ),
+ ),
);
// This *could* accept a type that isn't actually `PartialEq`, because region bounds get
@@ -482,8 +492,9 @@ impl<'tcx> ConstToPat<'tcx> {
PatKind::Constant { value: mir::Const::Ty(ty::Const::new_value(tcx, cv, ty)) }
}
ty::FnPtr(..) => {
- // Valtree construction would never succeed for these, so this is unreachable.
- unreachable!()
+ unreachable!(
+ "Valtree construction would never succeed for FnPtr, so this is unreachable."
+ )
}
_ => {
let err = InvalidPattern { span, non_sm_ty: ty };
diff --git a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
deleted file mode 100644
index 0c7c2c6f9..000000000
--- a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
+++ /dev/null
@@ -1,1912 +0,0 @@
-//! [`super::usefulness`] explains most of what is happening in this file. As explained there,
-//! values and patterns are made from constructors applied to fields. This file defines a
-//! `Constructor` enum, a `Fields` struct, and various operations to manipulate them and convert
-//! them from/to patterns.
-//!
-//! There's one idea that is not detailed in [`super::usefulness`] because the details are not
-//! needed there: _constructor splitting_.
-//!
-//! # Constructor splitting
-//!
-//! The idea is as follows: given a constructor `c` and a matrix, we want to specialize in turn
-//! with all the value constructors that are covered by `c`, and compute usefulness for each.
-//! Instead of listing all those constructors (which is intractable), we group those value
-//! constructors together as much as possible. Example:
-//!
-//! ```compile_fail,E0004
-//! match (0, false) {
-//! (0 ..=100, true) => {} // `p_1`
-//! (50..=150, false) => {} // `p_2`
-//! (0 ..=200, _) => {} // `q`
-//! }
-//! ```
-//!
-//! The naive approach would try all numbers in the range `0..=200`. But we can be a lot more
-//! clever: `0` and `1` for example will match the exact same rows, and return equivalent
-//! witnesses. In fact all of `0..50` would. We can thus restrict our exploration to 4
-//! constructors: `0..50`, `50..=100`, `101..=150` and `151..=200`. That is enough and infinitely
-//! more tractable.
-//!
-//! We capture this idea in a function `split(p_1 ... p_n, c)` which returns a list of constructors
-//! `c'` covered by `c`. Given such a `c'`, we require that all value ctors `c''` covered by `c'`
-//! return an equivalent set of witnesses after specializing and computing usefulness.
-//! In the example above, witnesses for specializing by `c''` covered by `0..50` will only differ
-//! in their first element.
-//!
-//! We usually also ask that the `c'` together cover all of the original `c`. However we allow
-//! skipping some constructors as long as it doesn't change whether the resulting list of witnesses
-//! is empty of not. We use this in the wildcard `_` case.
-//!
-//! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for
-//! or-patterns; instead we just try the alternatives one-by-one. For details on splitting
-//! wildcards, see [`Constructor::split`]; for integer ranges, see
-//! [`IntRange::split`]; for slices, see [`Slice::split`].
-
-use std::cell::Cell;
-use std::cmp::{self, max, min, Ordering};
-use std::fmt;
-use std::iter::once;
-
-use smallvec::{smallvec, SmallVec};
-
-use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS};
-use rustc_data_structures::captures::Captures;
-use rustc_data_structures::fx::FxHashSet;
-use rustc_hir::RangeEnd;
-use rustc_index::Idx;
-use rustc_middle::middle::stability::EvalResult;
-use rustc_middle::mir;
-use rustc_middle::mir::interpret::Scalar;
-use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary};
-use rustc_middle::ty::layout::IntegerExt;
-use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef};
-use rustc_span::{Span, DUMMY_SP};
-use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT};
-
-use self::Constructor::*;
-use self::MaybeInfiniteInt::*;
-use self::SliceKind::*;
-
-use super::usefulness::{MatchCheckCtxt, PatCtxt};
-
-/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
-fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
- fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
- if let PatKind::Or { pats } = &pat.kind {
- for pat in pats.iter() {
- expand(&pat, vec);
- }
- } else {
- vec.push(pat)
- }
- }
-
- let mut pats = Vec::new();
- expand(pat, &mut pats);
- pats
-}
-
-/// Whether we have seen a constructor in the column or not.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
-enum Presence {
- Unseen,
- Seen,
-}
-
-/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the
-/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as
-/// `255`. See `signed_bias` for details.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
-pub(crate) enum MaybeInfiniteInt {
- NegInfinity,
- /// Encoded value. DO NOT CONSTRUCT BY HAND; use `new_finite`.
- Finite(u128),
- /// The integer after `u128::MAX`. We need it to represent `x..=u128::MAX` as an exclusive range.
- JustAfterMax,
- PosInfinity,
-}
-
-impl MaybeInfiniteInt {
- // The return value of `signed_bias` should be XORed with a value to encode/decode it.
- fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 {
- match *ty.kind() {
- ty::Int(ity) => {
- let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128;
- 1u128 << (bits - 1)
- }
- _ => 0,
- }
- }
-
- fn new_finite(tcx: TyCtxt<'_>, ty: Ty<'_>, bits: u128) -> Self {
- let bias = Self::signed_bias(tcx, ty);
- // Perform a shift if the underlying types are signed, which makes the interval arithmetic
- // type-independent.
- let x = bits ^ bias;
- Finite(x)
- }
- fn from_pat_range_bdy<'tcx>(
- bdy: PatRangeBoundary<'tcx>,
- ty: Ty<'tcx>,
- tcx: TyCtxt<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- ) -> Self {
- match bdy {
- PatRangeBoundary::NegInfinity => NegInfinity,
- PatRangeBoundary::Finite(value) => {
- let bits = value.eval_bits(tcx, param_env);
- Self::new_finite(tcx, ty, bits)
- }
- PatRangeBoundary::PosInfinity => PosInfinity,
- }
- }
-
- /// Used only for diagnostics.
- /// Note: it is possible to get `isize/usize::MAX+1` here, as explained in the doc for
- /// [`IntRange::split`]. This cannot be represented as a `Const`, so we represent it with
- /// `PosInfinity`.
- fn to_diagnostic_pat_range_bdy<'tcx>(
- self,
- ty: Ty<'tcx>,
- tcx: TyCtxt<'tcx>,
- ) -> PatRangeBoundary<'tcx> {
- match self {
- NegInfinity => PatRangeBoundary::NegInfinity,
- Finite(x) => {
- let bias = Self::signed_bias(tcx, ty);
- let bits = x ^ bias;
- let size = ty.primitive_size(tcx);
- match Scalar::try_from_uint(bits, size) {
- Some(scalar) => {
- let value = mir::Const::from_scalar(tcx, scalar, ty);
- PatRangeBoundary::Finite(value)
- }
- // The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value
- // for a type, the problem isn't that the value is too small. So it must be too
- // large.
- None => PatRangeBoundary::PosInfinity,
- }
- }
- JustAfterMax | PosInfinity => PatRangeBoundary::PosInfinity,
- }
- }
-
- /// Note: this will not turn a finite value into an infinite one or vice-versa.
- pub(crate) fn minus_one(self) -> Self {
- match self {
- Finite(n) => match n.checked_sub(1) {
- Some(m) => Finite(m),
- None => bug!(),
- },
- JustAfterMax => Finite(u128::MAX),
- x => x,
- }
- }
- /// Note: this will not turn a finite value into an infinite one or vice-versa.
- pub(crate) fn plus_one(self) -> Self {
- match self {
- Finite(n) => match n.checked_add(1) {
- Some(m) => Finite(m),
- None => JustAfterMax,
- },
- JustAfterMax => bug!(),
- x => x,
- }
- }
-}
-
-/// An exclusive interval, used for precise integer exhaustiveness checking. `IntRange`s always
-/// store a contiguous range.
-///
-/// `IntRange` is never used to encode an empty range or a "range" that wraps around the (offset)
-/// space: i.e., `range.lo < range.hi`.
-#[derive(Clone, Copy, PartialEq, Eq)]
-pub(crate) struct IntRange {
- pub(crate) lo: MaybeInfiniteInt, // Must not be `PosInfinity`.
- pub(crate) hi: MaybeInfiniteInt, // Must not be `NegInfinity`.
-}
-
-impl IntRange {
- #[inline]
- pub(super) fn is_integral(ty: Ty<'_>) -> bool {
- matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_))
- }
-
- /// Best effort; will not know that e.g. `255u8..` is a singleton.
- pub(super) fn is_singleton(&self) -> bool {
- // Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite
- // to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`.
- self.lo.plus_one() == self.hi
- }
-
- #[inline]
- fn from_bits<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, bits: u128) -> IntRange {
- let x = MaybeInfiniteInt::new_finite(tcx, ty, bits);
- IntRange { lo: x, hi: x.plus_one() }
- }
-
- #[inline]
- fn from_range(lo: MaybeInfiniteInt, mut hi: MaybeInfiniteInt, end: RangeEnd) -> IntRange {
- if end == RangeEnd::Included {
- hi = hi.plus_one();
- }
- if lo >= hi {
- // This should have been caught earlier by E0030.
- bug!("malformed range pattern: {lo:?}..{hi:?}");
- }
- IntRange { lo, hi }
- }
-
- fn is_subrange(&self, other: &Self) -> bool {
- other.lo <= self.lo && self.hi <= other.hi
- }
-
- fn intersection(&self, other: &Self) -> Option<Self> {
- if self.lo < other.hi && other.lo < self.hi {
- Some(IntRange { lo: max(self.lo, other.lo), hi: min(self.hi, other.hi) })
- } else {
- None
- }
- }
-
- /// Partition a range of integers into disjoint subranges. This does constructor splitting for
- /// integer ranges as explained at the top of the file.
- ///
- /// This returns an output that covers `self`. The output is split so that the only
- /// intersections between an output range and a column range are inclusions. No output range
- /// straddles the boundary of one of the inputs.
- ///
- /// Additionally, we track for each output range whether it is covered by one of the column ranges or not.
- ///
- /// The following input:
- /// ```text
- /// (--------------------------) // `self`
- /// (------) (----------) (-)
- /// (------) (--------)
- /// ```
- /// is first intersected with `self`:
- /// ```text
- /// (--------------------------) // `self`
- /// (----) (----------) (-)
- /// (------) (--------)
- /// ```
- /// and then iterated over as follows:
- /// ```text
- /// (-(--)-(-)-(------)-)--(-)-
- /// ```
- /// where each sequence of dashes is an output range, and dashes outside parentheses are marked
- /// as `Presence::Missing`.
- ///
- /// ## `isize`/`usize`
- ///
- /// Whereas a wildcard of type `i32` stands for the range `i32::MIN..=i32::MAX`, a `usize`
- /// wildcard stands for `0..PosInfinity` and a `isize` wildcard stands for
- /// `NegInfinity..PosInfinity`. In other words, as far as `IntRange` is concerned, there are
- /// values before `isize::MIN` and after `usize::MAX`/`isize::MAX`.
- /// This is to avoid e.g. `0..(u32::MAX as usize)` from being exhaustive on one architecture and
- /// not others. See discussions around the `precise_pointer_size_matching` feature for more
- /// details.
- ///
- /// These infinities affect splitting subtly: it is possible to get `NegInfinity..0` and
- /// `usize::MAX+1..PosInfinity` in the output. Diagnostics must be careful to handle these
- /// fictitious ranges sensibly.
- fn split(
- &self,
- column_ranges: impl Iterator<Item = IntRange>,
- ) -> impl Iterator<Item = (Presence, IntRange)> {
- // The boundaries of ranges in `column_ranges` intersected with `self`.
- // We do parenthesis matching for input ranges. A boundary counts as +1 if it starts
- // a range and -1 if it ends it. When the count is > 0 between two boundaries, we
- // are within an input range.
- let mut boundaries: Vec<(MaybeInfiniteInt, isize)> = column_ranges
- .filter_map(|r| self.intersection(&r))
- .flat_map(|r| [(r.lo, 1), (r.hi, -1)])
- .collect();
- // We sort by boundary, and for each boundary we sort the "closing parentheses" first. The
- // order of +1/-1 for a same boundary value is actually irrelevant, because we only look at
- // the accumulated count between distinct boundary values.
- boundaries.sort_unstable();
-
- // Accumulate parenthesis counts.
- let mut paren_counter = 0isize;
- // Gather pairs of adjacent boundaries.
- let mut prev_bdy = self.lo;
- boundaries
- .into_iter()
- // End with the end of the range. The count is ignored.
- .chain(once((self.hi, 0)))
- // List pairs of adjacent boundaries and the count between them.
- .map(move |(bdy, delta)| {
- // `delta` affects the count as we cross `bdy`, so the relevant count between
- // `prev_bdy` and `bdy` is untouched by `delta`.
- let ret = (prev_bdy, paren_counter, bdy);
- prev_bdy = bdy;
- paren_counter += delta;
- ret
- })
- // Skip empty ranges.
- .filter(|&(prev_bdy, _, bdy)| prev_bdy != bdy)
- // Convert back to ranges.
- .map(move |(prev_bdy, paren_count, bdy)| {
- use Presence::*;
- let presence = if paren_count > 0 { Seen } else { Unseen };
- let range = IntRange { lo: prev_bdy, hi: bdy };
- (presence, range)
- })
- }
-
- /// Whether the range denotes the fictitious values before `isize::MIN` or after
- /// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist).
- pub(crate) fn is_beyond_boundaries<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> bool {
- ty.is_ptr_sized_integral() && !tcx.features().precise_pointer_size_matching && {
- // The two invalid ranges are `NegInfinity..isize::MIN` (represented as
- // `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `to_diagnostic_pat_range_bdy`
- // converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `self.lo`
- // otherwise.
- let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx);
- matches!(lo, PatRangeBoundary::PosInfinity)
- || matches!(self.hi, MaybeInfiniteInt::Finite(0))
- }
- }
- /// Only used for displaying the range.
- pub(super) fn to_diagnostic_pat<'tcx>(&self, ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Pat<'tcx> {
- let kind = if matches!((self.lo, self.hi), (NegInfinity, PosInfinity)) {
- PatKind::Wild
- } else if self.is_singleton() {
- let lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx);
- let value = lo.as_finite().unwrap();
- PatKind::Constant { value }
- } else {
- // We convert to an inclusive range for diagnostics.
- let mut end = RangeEnd::Included;
- let mut lo = self.lo.to_diagnostic_pat_range_bdy(ty, tcx);
- if matches!(lo, PatRangeBoundary::PosInfinity) {
- // The only reason to get `PosInfinity` here is the special case where
- // `to_diagnostic_pat_range_bdy` found `{u,i}size::MAX+1`. So the range denotes the
- // fictitious values after `{u,i}size::MAX` (see [`IntRange::split`] for why we do
- // this). We show this to the user as `usize::MAX..` which is slightly incorrect but
- // probably clear enough.
- let c = ty.numeric_max_val(tcx).unwrap();
- let value = mir::Const::from_ty_const(c, tcx);
- lo = PatRangeBoundary::Finite(value);
- }
- let hi = if matches!(self.hi, MaybeInfiniteInt::Finite(0)) {
- // The range encodes `..ty::MIN`, so we can't convert it to an inclusive range.
- end = RangeEnd::Excluded;
- self.hi
- } else {
- self.hi.minus_one()
- };
- let hi = hi.to_diagnostic_pat_range_bdy(ty, tcx);
- PatKind::Range(Box::new(PatRange { lo, hi, end, ty }))
- };
-
- Pat { ty, span: DUMMY_SP, kind }
- }
-}
-
-/// Note: this will render signed ranges incorrectly. To render properly, convert to a pattern
-/// first.
-impl fmt::Debug for IntRange {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- if let Finite(lo) = self.lo {
- write!(f, "{lo}")?;
- }
- write!(f, "{}", RangeEnd::Excluded)?;
- if let Finite(hi) = self.hi {
- write!(f, "{hi}")?;
- }
- Ok(())
- }
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-enum SliceKind {
- /// Patterns of length `n` (`[x, y]`).
- FixedLen(usize),
- /// Patterns using the `..` notation (`[x, .., y]`).
- /// Captures any array constructor of `length >= i + j`.
- /// In the case where `array_len` is `Some(_)`,
- /// this indicates that we only care about the first `i` and the last `j` values of the array,
- /// and everything in between is a wildcard `_`.
- VarLen(usize, usize),
-}
-
-impl SliceKind {
- fn arity(self) -> usize {
- match self {
- FixedLen(length) => length,
- VarLen(prefix, suffix) => prefix + suffix,
- }
- }
-
- /// Whether this pattern includes patterns of length `other_len`.
- fn covers_length(self, other_len: usize) -> bool {
- match self {
- FixedLen(len) => len == other_len,
- VarLen(prefix, suffix) => prefix + suffix <= other_len,
- }
- }
-}
-
-/// A constructor for array and slice patterns.
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub(super) struct Slice {
- /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`.
- array_len: Option<usize>,
- /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`.
- kind: SliceKind,
-}
-
-impl Slice {
- fn new(array_len: Option<usize>, kind: SliceKind) -> Self {
- let kind = match (array_len, kind) {
- // If the middle `..` is empty, we effectively have a fixed-length pattern.
- (Some(len), VarLen(prefix, suffix)) if prefix + suffix >= len => FixedLen(len),
- _ => kind,
- };
- Slice { array_len, kind }
- }
-
- fn arity(self) -> usize {
- self.kind.arity()
- }
-
- /// See `Constructor::is_covered_by`
- fn is_covered_by(self, other: Self) -> bool {
- other.kind.covers_length(self.arity())
- }
-
- /// This computes constructor splitting for variable-length slices, as explained at the top of
- /// the file.
- ///
- /// A slice pattern `[x, .., y]` behaves like the infinite or-pattern `[x, y] | [x, _, y] | [x,
- /// _, _, y] | etc`. The corresponding value constructors are fixed-length array constructors of
- /// corresponding lengths. We obviously can't list this infinitude of constructors.
- /// Thankfully, it turns out that for each finite set of slice patterns, all sufficiently large
- /// array lengths are equivalent.
- ///
- /// Let's look at an example, where we are trying to split the last pattern:
- /// ```
- /// # fn foo(x: &[bool]) {
- /// match x {
- /// [true, true, ..] => {}
- /// [.., false, false] => {}
- /// [..] => {}
- /// }
- /// # }
- /// ```
- /// Here are the results of specialization for the first few lengths:
- /// ```
- /// # fn foo(x: &[bool]) { match x {
- /// // length 0
- /// [] => {}
- /// // length 1
- /// [_] => {}
- /// // length 2
- /// [true, true] => {}
- /// [false, false] => {}
- /// [_, _] => {}
- /// // length 3
- /// [true, true, _ ] => {}
- /// [_, false, false] => {}
- /// [_, _, _ ] => {}
- /// // length 4
- /// [true, true, _, _ ] => {}
- /// [_, _, false, false] => {}
- /// [_, _, _, _ ] => {}
- /// // length 5
- /// [true, true, _, _, _ ] => {}
- /// [_, _, _, false, false] => {}
- /// [_, _, _, _, _ ] => {}
- /// # _ => {}
- /// # }}
- /// ```
- ///
- /// We see that above length 4, we are simply inserting columns full of wildcards in the middle.
- /// This means that specialization and witness computation with slices of length `l >= 4` will
- /// give equivalent results regardless of `l`. This applies to any set of slice patterns: there
- /// will be a length `L` above which all lengths behave the same. This is exactly what we need
- /// for constructor splitting.
- ///
- /// A variable-length slice pattern covers all lengths from its arity up to infinity. As we just
- /// saw, we can split this in two: lengths below `L` are treated individually with a
- /// fixed-length slice each; lengths above `L` are grouped into a single variable-length slice
- /// constructor.
- ///
- /// For each variable-length slice pattern `p` with a prefix of length `plâ‚š` and suffix of
- /// length `slâ‚š`, only the first `plâ‚š` and the last `slâ‚š` elements are examined. Therefore, as
- /// long as `L` is positive (to avoid concerns about empty types), all elements after the
- /// maximum prefix length and before the maximum suffix length are not examined by any
- /// variable-length pattern, and therefore can be ignored. This gives us a way to compute `L`.
- ///
- /// Additionally, if fixed-length patterns exist, we must pick an `L` large enough to miss them,
- /// so we can pick `L = max(max(FIXED_LEN)+1, max(PREFIX_LEN) + max(SUFFIX_LEN))`.
- /// `max_slice` below will be made to have this arity `L`.
- ///
- /// If `self` is fixed-length, it is returned as-is.
- ///
- /// Additionally, we track for each output slice whether it is covered by one of the column slices or not.
- fn split(
- self,
- column_slices: impl Iterator<Item = Slice>,
- ) -> impl Iterator<Item = (Presence, Slice)> {
- // Range of lengths below `L`.
- let smaller_lengths;
- let arity = self.arity();
- let mut max_slice = self.kind;
- // Tracks the smallest variable-length slice we've seen. Any slice arity above it is
- // therefore `Presence::Seen` in the column.
- let mut min_var_len = usize::MAX;
- // Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`.
- let mut seen_fixed_lens = FxHashSet::default();
- match &mut max_slice {
- VarLen(max_prefix_len, max_suffix_len) => {
- // We grow `max_slice` to be larger than all slices encountered, as described above.
- // For diagnostics, we keep the prefix and suffix lengths separate, but grow them so that
- // `L = max_prefix_len + max_suffix_len`.
- let mut max_fixed_len = 0;
- for slice in column_slices {
- match slice.kind {
- FixedLen(len) => {
- max_fixed_len = cmp::max(max_fixed_len, len);
- if arity <= len {
- seen_fixed_lens.insert(len);
- }
- }
- VarLen(prefix, suffix) => {
- *max_prefix_len = cmp::max(*max_prefix_len, prefix);
- *max_suffix_len = cmp::max(*max_suffix_len, suffix);
- min_var_len = cmp::min(min_var_len, prefix + suffix);
- }
- }
- }
- // We want `L = max(L, max_fixed_len + 1)`, modulo the fact that we keep prefix and
- // suffix separate.
- if max_fixed_len + 1 >= *max_prefix_len + *max_suffix_len {
- // The subtraction can't overflow thanks to the above check.
- // The new `max_prefix_len` is larger than its previous value.
- *max_prefix_len = max_fixed_len + 1 - *max_suffix_len;
- }
-
- // We cap the arity of `max_slice` at the array size.
- match self.array_len {
- Some(len) if max_slice.arity() >= len => max_slice = FixedLen(len),
- _ => {}
- }
-
- smaller_lengths = match self.array_len {
- // The only admissible fixed-length slice is one of the array size. Whether `max_slice`
- // is fixed-length or variable-length, it will be the only relevant slice to output
- // here.
- Some(_) => 0..0, // empty range
- // We need to cover all arities in the range `(arity..infinity)`. We split that
- // range into two: lengths smaller than `max_slice.arity()` are treated
- // independently as fixed-lengths slices, and lengths above are captured by
- // `max_slice`.
- None => self.arity()..max_slice.arity(),
- };
- }
- FixedLen(_) => {
- // No need to split here. We only track presence.
- for slice in column_slices {
- match slice.kind {
- FixedLen(len) => {
- if len == arity {
- seen_fixed_lens.insert(len);
- }
- }
- VarLen(prefix, suffix) => {
- min_var_len = cmp::min(min_var_len, prefix + suffix);
- }
- }
- }
- smaller_lengths = 0..0;
- }
- };
-
- smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| {
- let arity = kind.arity();
- let seen = if min_var_len <= arity || seen_fixed_lens.contains(&arity) {
- Presence::Seen
- } else {
- Presence::Unseen
- };
- (seen, Slice::new(self.array_len, kind))
- })
- }
-}
-
-/// A value can be decomposed into a constructor applied to some fields. This struct represents
-/// the constructor. See also `Fields`.
-///
-/// `pat_constructor` retrieves the constructor corresponding to a pattern.
-/// `specialize_constructor` returns the list of fields corresponding to a pattern, given a
-/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
-/// `Fields`.
-#[derive(Clone, Debug, PartialEq)]
-pub(super) enum Constructor<'tcx> {
- /// The constructor for patterns that have a single constructor, like tuples, struct patterns
- /// and fixed-length arrays.
- Single,
- /// Enum variants.
- Variant(VariantIdx),
- /// Booleans
- Bool(bool),
- /// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
- IntRange(IntRange),
- /// Ranges of floating-point literal values (`2.0..=5.2`).
- F32Range(IeeeFloat<SingleS>, IeeeFloat<SingleS>, RangeEnd),
- F64Range(IeeeFloat<DoubleS>, IeeeFloat<DoubleS>, RangeEnd),
- /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
- Str(mir::Const<'tcx>),
- /// Array and slice patterns.
- Slice(Slice),
- /// Constants that must not be matched structurally. They are treated as black
- /// boxes for the purposes of exhaustiveness: we must not inspect them, and they
- /// don't count towards making a match exhaustive.
- Opaque,
- /// Or-pattern.
- Or,
- /// Wildcard pattern.
- Wildcard,
- /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
- /// for those types for which we cannot list constructors explicitly, like `f64` and `str`.
- NonExhaustive,
- /// Fake extra constructor for variants that should not be mentioned in diagnostics.
- /// We use this for variants behind an unstable gate as well as
- /// `#[doc(hidden)]` ones.
- Hidden,
- /// Fake extra constructor for constructors that are not seen in the matrix, as explained in the
- /// code for [`Constructor::split`].
- Missing,
-}
-
-impl<'tcx> Constructor<'tcx> {
- pub(super) fn is_non_exhaustive(&self) -> bool {
- matches!(self, NonExhaustive)
- }
-
- pub(super) fn as_variant(&self) -> Option<VariantIdx> {
- match self {
- Variant(i) => Some(*i),
- _ => None,
- }
- }
- fn as_bool(&self) -> Option<bool> {
- match self {
- Bool(b) => Some(*b),
- _ => None,
- }
- }
- pub(super) fn as_int_range(&self) -> Option<&IntRange> {
- match self {
- IntRange(range) => Some(range),
- _ => None,
- }
- }
- fn as_slice(&self) -> Option<Slice> {
- match self {
- Slice(slice) => Some(*slice),
- _ => None,
- }
- }
-
- fn variant_index_for_adt(&self, adt: ty::AdtDef<'tcx>) -> VariantIdx {
- match *self {
- Variant(idx) => idx,
- Single => {
- assert!(!adt.is_enum());
- FIRST_VARIANT
- }
- _ => bug!("bad constructor {:?} for adt {:?}", self, adt),
- }
- }
-
- /// The number of fields for this constructor. This must be kept in sync with
- /// `Fields::wildcards`.
- pub(super) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize {
- match self {
- Single | Variant(_) => match pcx.ty.kind() {
- ty::Tuple(fs) => fs.len(),
- ty::Ref(..) => 1,
- ty::Adt(adt, ..) => {
- if adt.is_box() {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- 1
- } else {
- let variant = &adt.variant(self.variant_index_for_adt(*adt));
- Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
- }
- }
- _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx.ty),
- },
- Slice(slice) => slice.arity(),
- Bool(..)
- | IntRange(..)
- | F32Range(..)
- | F64Range(..)
- | Str(..)
- | Opaque
- | NonExhaustive
- | Hidden
- | Missing { .. }
- | Wildcard => 0,
- Or => bug!("The `Or` constructor doesn't have a fixed arity"),
- }
- }
-
- /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of
- /// actual constructors (like variants, integers or fixed-sized slices). When specializing for
- /// these constructors, we want to be specialising for the actual underlying constructors.
- /// Naively, we would simply return the list of constructors they correspond to. We instead are
- /// more clever: if there are constructors that we know will behave the same w.r.t. the current
- /// matrix, we keep them grouped. For example, all slices of a sufficiently large length will
- /// either be all useful or all non-useful with a given matrix.
- ///
- /// See the branches for details on how the splitting is done.
- ///
- /// This function may discard some irrelevant constructors if this preserves behavior. Eg. for
- /// the `_` case, we ignore the constructors already present in the column, unless all of them
- /// are.
- pub(super) fn split<'a>(
- &self,
- pcx: &PatCtxt<'_, '_, 'tcx>,
- ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone,
- ) -> SmallVec<[Self; 1]>
- where
- 'tcx: 'a,
- {
- match self {
- Wildcard => {
- let split_set = ConstructorSet::for_ty(pcx.cx, pcx.ty).split(pcx, ctors);
- if !split_set.missing.is_empty() {
- // We are splitting a wildcard in order to compute its usefulness. Some constructors are
- // not present in the column. The first thing we note is that specializing with any of
- // the missing constructors would select exactly the rows with wildcards. Moreover, they
- // would all return equivalent results. We can therefore group them all into a
- // fictitious `Missing` constructor.
- //
- // As an important optimization, this function will skip all the present constructors.
- // This is correct because specializing with any of the present constructors would
- // select a strict superset of the wildcard rows, and thus would only find witnesses
- // already found with the `Missing` constructor.
- // This does mean that diagnostics are incomplete: in
- // ```
- // match x {
- // Some(true) => {}
- // }
- // ```
- // we report `None` as missing but not `Some(false)`.
- //
- // When all the constructors are missing we can equivalently return the `Wildcard`
- // constructor on its own. The difference between `Wildcard` and `Missing` will then
- // only be in diagnostics.
-
- // If some constructors are missing, we typically want to report those constructors,
- // e.g.:
- // ```
- // enum Direction { N, S, E, W }
- // let Direction::N = ...;
- // ```
- // we can report 3 witnesses: `S`, `E`, and `W`.
- //
- // However, if the user didn't actually specify a constructor
- // in this arm, e.g., in
- // ```
- // let x: (Direction, Direction, bool) = ...;
- // let (_, _, false) = x;
- // ```
- // we don't want to show all 16 possible witnesses `(<direction-1>, <direction-2>,
- // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we
- // prefer to report just a wildcard `_`.
- //
- // The exception is: if we are at the top-level, for example in an empty match, we
- // usually prefer to report the full list of constructors.
- let all_missing = split_set.present.is_empty();
- let report_when_all_missing =
- pcx.is_top_level && !IntRange::is_integral(pcx.ty);
- let ctor =
- if all_missing && !report_when_all_missing { Wildcard } else { Missing };
- smallvec![ctor]
- } else {
- split_set.present
- }
- }
- // Fast-track if the range is trivial.
- IntRange(this_range) if !this_range.is_singleton() => {
- let column_ranges = ctors.filter_map(|ctor| ctor.as_int_range()).cloned();
- this_range.split(column_ranges).map(|(_, range)| IntRange(range)).collect()
- }
- Slice(this_slice @ Slice { kind: VarLen(..), .. }) => {
- let column_slices = ctors.filter_map(|c| c.as_slice());
- this_slice.split(column_slices).map(|(_, slice)| Slice(slice)).collect()
- }
- // Any other constructor can be used unchanged.
- _ => smallvec![self.clone()],
- }
- }
-
- /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
- /// For the simple cases, this is simply checking for equality. For the "grouped" constructors,
- /// this checks for inclusion.
- // We inline because this has a single call site in `Matrix::specialize_constructor`.
- #[inline]
- pub(super) fn is_covered_by<'p>(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool {
- // This must be kept in sync with `is_covered_by_any`.
- match (self, other) {
- // Wildcards cover anything
- (_, Wildcard) => true,
- // Only a wildcard pattern can match these special constructors.
- (Wildcard | Missing { .. } | NonExhaustive | Hidden, _) => false,
-
- (Single, Single) => true,
- (Variant(self_id), Variant(other_id)) => self_id == other_id,
- (Bool(self_b), Bool(other_b)) => self_b == other_b,
-
- (IntRange(self_range), IntRange(other_range)) => self_range.is_subrange(other_range),
- (F32Range(self_from, self_to, self_end), F32Range(other_from, other_to, other_end)) => {
- self_from.ge(other_from)
- && match self_to.partial_cmp(other_to) {
- Some(Ordering::Less) => true,
- Some(Ordering::Equal) => other_end == self_end,
- _ => false,
- }
- }
- (F64Range(self_from, self_to, self_end), F64Range(other_from, other_to, other_end)) => {
- self_from.ge(other_from)
- && match self_to.partial_cmp(other_to) {
- Some(Ordering::Less) => true,
- Some(Ordering::Equal) => other_end == self_end,
- _ => false,
- }
- }
- (Str(self_val), Str(other_val)) => {
- // FIXME Once valtrees are available we can directly use the bytes
- // in the `Str` variant of the valtree for the comparison here.
- self_val == other_val
- }
- (Slice(self_slice), Slice(other_slice)) => self_slice.is_covered_by(*other_slice),
-
- // We are trying to inspect an opaque constant. Thus we skip the row.
- (Opaque, _) | (_, Opaque) => false,
-
- _ => span_bug!(
- pcx.span,
- "trying to compare incompatible constructors {:?} and {:?}",
- self,
- other
- ),
- }
- }
-}
-
-/// Describes the set of all constructors for a type.
-#[derive(Debug)]
-pub(super) enum ConstructorSet {
- /// The type has a single constructor, e.g. `&T` or a struct.
- Single,
- /// This type has the following list of constructors.
- /// Some variants are hidden, which means they won't be mentioned in diagnostics unless the user
- /// mentioned them first. We use this for variants behind an unstable gate as well as
- /// `#[doc(hidden)]` ones.
- Variants {
- visible_variants: Vec<VariantIdx>,
- hidden_variants: Vec<VariantIdx>,
- non_exhaustive: bool,
- },
- /// Booleans.
- Bool,
- /// The type is spanned by integer values. The range or ranges give the set of allowed values.
- /// The second range is only useful for `char`.
- Integers { range_1: IntRange, range_2: Option<IntRange> },
- /// The type is matched by slices. The usize is the compile-time length of the array, if known.
- Slice(Option<usize>),
- /// The type is matched by slices whose elements are uninhabited.
- SliceOfEmpty,
- /// The constructors cannot be listed, and the type cannot be matched exhaustively. E.g. `str`,
- /// floats.
- Unlistable,
- /// The type has no inhabitants.
- Uninhabited,
-}
-
-/// Describes the result of analyzing the constructors in a column of a match.
-///
-/// `present` is morally the set of constructors present in the column, and `missing` is the set of
-/// constructors that exist in the type but are not present in the column.
-///
-/// More formally, they respect the following constraints:
-/// - the union of `present` and `missing` covers the whole type
-/// - `present` and `missing` are disjoint
-/// - neither contains wildcards
-/// - each constructor in `present` is covered by some non-wildcard constructor in the column
-/// - together, the constructors in `present` cover all the non-wildcard constructor in the column
-/// - non-wildcards in the column do no cover anything in `missing`
-/// - constructors in `present` and `missing` are split for the column; in other words, they are
-/// either fully included in or disjoint from each constructor in the column. This avoids
-/// non-trivial intersections like between `0..10` and `5..15`.
-#[derive(Debug)]
-pub(super) struct SplitConstructorSet<'tcx> {
- pub(super) present: SmallVec<[Constructor<'tcx>; 1]>,
- pub(super) missing: Vec<Constructor<'tcx>>,
-}
-
-impl ConstructorSet {
- #[instrument(level = "debug", skip(cx), ret)]
- pub(super) fn for_ty<'p, 'tcx>(cx: &MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self {
- let make_range = |start, end| {
- IntRange::from_range(
- MaybeInfiniteInt::new_finite(cx.tcx, ty, start),
- MaybeInfiniteInt::new_finite(cx.tcx, ty, end),
- RangeEnd::Included,
- )
- };
- // This determines the set of all possible constructors for the type `ty`. For numbers,
- // arrays and slices we use ranges and variable-length slices when appropriate.
- //
- // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that
- // are statically impossible. E.g., for `Option<!>`, we do not include `Some(_)` in the
- // returned list of constructors.
- // Invariant: this is `Uninhabited` if and only if the type is uninhabited (as determined by
- // `cx.is_uninhabited()`).
- match ty.kind() {
- ty::Bool => Self::Bool,
- ty::Char => {
- // The valid Unicode Scalar Value ranges.
- Self::Integers {
- range_1: make_range('\u{0000}' as u128, '\u{D7FF}' as u128),
- range_2: Some(make_range('\u{E000}' as u128, '\u{10FFFF}' as u128)),
- }
- }
- &ty::Int(ity) => {
- let range = if ty.is_ptr_sized_integral()
- && !cx.tcx.features().precise_pointer_size_matching
- {
- // The min/max values of `isize` are not allowed to be observed unless the
- // `precise_pointer_size_matching` feature is enabled.
- IntRange { lo: NegInfinity, hi: PosInfinity }
- } else {
- let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128;
- let min = 1u128 << (bits - 1);
- let max = min - 1;
- make_range(min, max)
- };
- Self::Integers { range_1: range, range_2: None }
- }
- &ty::Uint(uty) => {
- let range = if ty.is_ptr_sized_integral()
- && !cx.tcx.features().precise_pointer_size_matching
- {
- // The max value of `usize` is not allowed to be observed unless the
- // `precise_pointer_size_matching` feature is enabled.
- let lo = MaybeInfiniteInt::new_finite(cx.tcx, ty, 0);
- IntRange { lo, hi: PosInfinity }
- } else {
- let size = Integer::from_uint_ty(&cx.tcx, uty).size();
- let max = size.truncate(u128::MAX);
- make_range(0, max)
- };
- Self::Integers { range_1: range, range_2: None }
- }
- ty::Array(sub_ty, len) if len.try_eval_target_usize(cx.tcx, cx.param_env).is_some() => {
- let len = len.eval_target_usize(cx.tcx, cx.param_env) as usize;
- if len != 0 && cx.is_uninhabited(*sub_ty) {
- Self::Uninhabited
- } else {
- Self::Slice(Some(len))
- }
- }
- // Treat arrays of a constant but unknown length like slices.
- ty::Array(sub_ty, _) | ty::Slice(sub_ty) => {
- if cx.is_uninhabited(*sub_ty) {
- Self::SliceOfEmpty
- } else {
- Self::Slice(None)
- }
- }
- ty::Adt(def, args) if def.is_enum() => {
- // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an
- // additional "unknown" constructor.
- // There is no point in enumerating all possible variants, because the user can't
- // actually match against them all themselves. So we always return only the fictitious
- // constructor.
- // E.g., in an example like:
- //
- // ```
- // let err: io::ErrorKind = ...;
- // match err {
- // io::ErrorKind::NotFound => {},
- // }
- // ```
- //
- // we don't want to show every possible IO error, but instead have only `_` as the
- // witness.
- let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(ty);
-
- if def.variants().is_empty() && !is_declared_nonexhaustive {
- Self::Uninhabited
- } else {
- let is_exhaustive_pat_feature = cx.tcx.features().exhaustive_patterns;
- let (hidden_variants, visible_variants) = def
- .variants()
- .iter_enumerated()
- .filter(|(_, v)| {
- // If `exhaustive_patterns` is enabled, we exclude variants known to be
- // uninhabited.
- !is_exhaustive_pat_feature
- || v.inhabited_predicate(cx.tcx, *def)
- .instantiate(cx.tcx, args)
- .apply(cx.tcx, cx.param_env, cx.module)
- })
- .map(|(idx, _)| idx)
- .partition(|idx| {
- let variant_def_id = def.variant(*idx).def_id;
- // Filter variants that depend on a disabled unstable feature.
- let is_unstable = matches!(
- cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None),
- EvalResult::Deny { .. }
- );
- // Filter foreign `#[doc(hidden)]` variants.
- let is_doc_hidden =
- cx.tcx.is_doc_hidden(variant_def_id) && !variant_def_id.is_local();
- is_unstable || is_doc_hidden
- });
-
- Self::Variants {
- visible_variants,
- hidden_variants,
- non_exhaustive: is_declared_nonexhaustive,
- }
- }
- }
- ty::Never => Self::Uninhabited,
- _ if cx.is_uninhabited(ty) => Self::Uninhabited,
- ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => Self::Single,
- // This type is one for which we cannot list constructors, like `str` or `f64`.
- _ => Self::Unlistable,
- }
- }
-
- /// This is the core logical operation of exhaustiveness checking. This analyzes a column a
- /// constructors to 1/ determine which constructors of the type (if any) are missing; 2/ split
- /// constructors to handle non-trivial intersections e.g. on ranges or slices.
- #[instrument(level = "debug", skip(self, pcx, ctors), ret)]
- pub(super) fn split<'a, 'tcx>(
- &self,
- pcx: &PatCtxt<'_, '_, 'tcx>,
- ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone,
- ) -> SplitConstructorSet<'tcx>
- where
- 'tcx: 'a,
- {
- let mut present: SmallVec<[_; 1]> = SmallVec::new();
- let mut missing = Vec::new();
- // Constructors in `ctors`, except wildcards.
- let mut seen = ctors.filter(|c| !(matches!(c, Opaque | Wildcard)));
- match self {
- ConstructorSet::Single => {
- if seen.next().is_none() {
- missing.push(Single);
- } else {
- present.push(Single);
- }
- }
- ConstructorSet::Variants { visible_variants, hidden_variants, non_exhaustive } => {
- let seen_set: FxHashSet<_> = seen.map(|c| c.as_variant().unwrap()).collect();
- let mut skipped_a_hidden_variant = false;
-
- for variant in visible_variants {
- let ctor = Variant(*variant);
- if seen_set.contains(&variant) {
- present.push(ctor);
- } else {
- missing.push(ctor);
- }
- }
-
- for variant in hidden_variants {
- let ctor = Variant(*variant);
- if seen_set.contains(&variant) {
- present.push(ctor);
- } else {
- skipped_a_hidden_variant = true;
- }
- }
- if skipped_a_hidden_variant {
- missing.push(Hidden);
- }
-
- if *non_exhaustive {
- missing.push(NonExhaustive);
- }
- }
- ConstructorSet::Bool => {
- let mut seen_false = false;
- let mut seen_true = false;
- for b in seen.map(|ctor| ctor.as_bool().unwrap()) {
- if b {
- seen_true = true;
- } else {
- seen_false = true;
- }
- }
- if seen_false {
- present.push(Bool(false));
- } else {
- missing.push(Bool(false));
- }
- if seen_true {
- present.push(Bool(true));
- } else {
- missing.push(Bool(true));
- }
- }
- ConstructorSet::Integers { range_1, range_2 } => {
- let seen_ranges: Vec<_> =
- seen.map(|ctor| ctor.as_int_range().unwrap().clone()).collect();
- for (seen, splitted_range) in range_1.split(seen_ranges.iter().cloned()) {
- match seen {
- Presence::Unseen => missing.push(IntRange(splitted_range)),
- Presence::Seen => present.push(IntRange(splitted_range)),
- }
- }
- if let Some(range_2) = range_2 {
- for (seen, splitted_range) in range_2.split(seen_ranges.into_iter()) {
- match seen {
- Presence::Unseen => missing.push(IntRange(splitted_range)),
- Presence::Seen => present.push(IntRange(splitted_range)),
- }
- }
- }
- }
- &ConstructorSet::Slice(array_len) => {
- let seen_slices = seen.map(|c| c.as_slice().unwrap());
- let base_slice = Slice::new(array_len, VarLen(0, 0));
- for (seen, splitted_slice) in base_slice.split(seen_slices) {
- let ctor = Slice(splitted_slice);
- match seen {
- Presence::Unseen => missing.push(ctor),
- Presence::Seen => present.push(ctor),
- }
- }
- }
- ConstructorSet::SliceOfEmpty => {
- // This one is tricky because even though there's only one possible value of this
- // type (namely `[]`), slice patterns of all lengths are allowed, they're just
- // unreachable if length != 0.
- // We still gather the seen constructors in `present`, but the only slice that can
- // go in `missing` is `[]`.
- let seen_slices = seen.map(|c| c.as_slice().unwrap());
- let base_slice = Slice::new(None, VarLen(0, 0));
- for (seen, splitted_slice) in base_slice.split(seen_slices) {
- let ctor = Slice(splitted_slice);
- match seen {
- Presence::Seen => present.push(ctor),
- Presence::Unseen if splitted_slice.arity() == 0 => {
- missing.push(Slice(Slice::new(None, FixedLen(0))))
- }
- Presence::Unseen => {}
- }
- }
- }
- ConstructorSet::Unlistable => {
- // Since we can't list constructors, we take the ones in the column. This might list
- // some constructors several times but there's not much we can do.
- present.extend(seen.cloned());
- missing.push(NonExhaustive);
- }
- // If `exhaustive_patterns` is disabled and our scrutinee is an empty type, we cannot
- // expose its emptiness. The exception is if the pattern is at the top level, because we
- // want empty matches to be considered exhaustive.
- ConstructorSet::Uninhabited
- if !pcx.cx.tcx.features().exhaustive_patterns && !pcx.is_top_level =>
- {
- missing.push(NonExhaustive);
- }
- ConstructorSet::Uninhabited => {}
- }
-
- SplitConstructorSet { present, missing }
- }
-
- /// Compute the set of constructors missing from this column.
- /// This is only used for reporting to the user.
- pub(super) fn compute_missing<'a, 'tcx>(
- &self,
- pcx: &PatCtxt<'_, '_, 'tcx>,
- ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone,
- ) -> Vec<Constructor<'tcx>>
- where
- 'tcx: 'a,
- {
- self.split(pcx, ctors).missing
- }
-}
-
-/// A value can be decomposed into a constructor applied to some fields. This struct represents
-/// those fields, generalized to allow patterns in each field. See also `Constructor`.
-///
-/// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that
-/// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then
-/// given a pattern we fill some of the fields with its subpatterns.
-/// In the following example `Fields::wildcards` returns `[_, _, _, _]`. Then in
-/// `extract_pattern_arguments` we fill some of the entries, and the result is
-/// `[Some(0), _, _, _]`.
-/// ```compile_fail,E0004
-/// # fn foo() -> [Option<u8>; 4] { [None; 4] }
-/// let x: [Option<u8>; 4] = foo();
-/// match x {
-/// [Some(0), ..] => {}
-/// }
-/// ```
-///
-/// Note that the number of fields of a constructor may not match the fields declared in the
-/// original struct/variant. This happens if a private or `non_exhaustive` field is uninhabited,
-/// because the code mustn't observe that it is uninhabited. In that case that field is not
-/// included in `fields`. For that reason, when you have a `FieldIdx` you must use
-/// `index_with_declared_idx`.
-#[derive(Debug, Clone, Copy)]
-pub(super) struct Fields<'p, 'tcx> {
- fields: &'p [DeconstructedPat<'p, 'tcx>],
-}
-
-impl<'p, 'tcx> Fields<'p, 'tcx> {
- fn empty() -> Self {
- Fields { fields: &[] }
- }
-
- fn singleton(cx: &MatchCheckCtxt<'p, 'tcx>, field: DeconstructedPat<'p, 'tcx>) -> Self {
- let field: &_ = cx.pattern_arena.alloc(field);
- Fields { fields: std::slice::from_ref(field) }
- }
-
- pub(super) fn from_iter(
- cx: &MatchCheckCtxt<'p, 'tcx>,
- fields: impl IntoIterator<Item = DeconstructedPat<'p, 'tcx>>,
- ) -> Self {
- let fields: &[_] = cx.pattern_arena.alloc_from_iter(fields);
- Fields { fields }
- }
-
- fn wildcards_from_tys(
- cx: &MatchCheckCtxt<'p, 'tcx>,
- tys: impl IntoIterator<Item = Ty<'tcx>>,
- span: Span,
- ) -> Self {
- Fields::from_iter(cx, tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, span)))
- }
-
- // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
- // uninhabited fields in order not to reveal the uninhabitedness of the whole variant.
- // This lists the fields we keep along with their types.
- fn list_variant_nonhidden_fields<'a>(
- cx: &'a MatchCheckCtxt<'p, 'tcx>,
- ty: Ty<'tcx>,
- variant: &'a VariantDef,
- ) -> impl Iterator<Item = (FieldIdx, Ty<'tcx>)> + Captures<'a> + Captures<'p> {
- let ty::Adt(adt, args) = ty.kind() else { bug!() };
- // Whether we must not match the fields of this variant exhaustively.
- let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did().is_local();
-
- variant.fields.iter().enumerate().filter_map(move |(i, field)| {
- let ty = field.ty(cx.tcx, args);
- // `field.ty()` doesn't normalize after substituting.
- let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty);
- let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx);
- let is_uninhabited = cx.is_uninhabited(ty);
-
- if is_uninhabited && (!is_visible || is_non_exhaustive) {
- None
- } else {
- Some((FieldIdx::new(i), ty))
- }
- })
- }
-
- /// Creates a new list of wildcard fields for a given constructor. The result must have a
- /// length of `constructor.arity()`.
- #[instrument(level = "trace")]
- pub(super) fn wildcards(pcx: &PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self {
- let ret = match constructor {
- Single | Variant(_) => match pcx.ty.kind() {
- ty::Tuple(fs) => Fields::wildcards_from_tys(pcx.cx, fs.iter(), pcx.span),
- ty::Ref(_, rty, _) => Fields::wildcards_from_tys(pcx.cx, once(*rty), pcx.span),
- ty::Adt(adt, args) => {
- if adt.is_box() {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- Fields::wildcards_from_tys(pcx.cx, once(args.type_at(0)), pcx.span)
- } else {
- let variant = &adt.variant(constructor.variant_index_for_adt(*adt));
- let tys = Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant)
- .map(|(_, ty)| ty);
- Fields::wildcards_from_tys(pcx.cx, tys, pcx.span)
- }
- }
- _ => bug!("Unexpected type for `Single` constructor: {:?}", pcx),
- },
- Slice(slice) => match *pcx.ty.kind() {
- ty::Slice(ty) | ty::Array(ty, _) => {
- let arity = slice.arity();
- Fields::wildcards_from_tys(pcx.cx, (0..arity).map(|_| ty), pcx.span)
- }
- _ => bug!("bad slice pattern {:?} {:?}", constructor, pcx),
- },
- Bool(..)
- | IntRange(..)
- | F32Range(..)
- | F64Range(..)
- | Str(..)
- | Opaque
- | NonExhaustive
- | Hidden
- | Missing { .. }
- | Wildcard => Fields::empty(),
- Or => {
- bug!("called `Fields::wildcards` on an `Or` ctor")
- }
- };
- debug!(?ret);
- ret
- }
-
- /// Returns the list of patterns.
- pub(super) fn iter_patterns<'a>(
- &'a self,
- ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
- self.fields.iter()
- }
-}
-
-/// Values and patterns can be represented as a constructor applied to some fields. This represents
-/// a pattern in this form.
-/// This also uses interior mutability to keep track of whether the pattern has been found reachable
-/// during analysis. For this reason they cannot be cloned.
-/// A `DeconstructedPat` will almost always come from user input; the only exception are some
-/// `Wildcard`s introduced during specialization.
-pub(crate) struct DeconstructedPat<'p, 'tcx> {
- ctor: Constructor<'tcx>,
- fields: Fields<'p, 'tcx>,
- ty: Ty<'tcx>,
- span: Span,
- reachable: Cell<bool>,
-}
-
-impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
- pub(super) fn wildcard(ty: Ty<'tcx>, span: Span) -> Self {
- Self::new(Wildcard, Fields::empty(), ty, span)
- }
-
- pub(super) fn new(
- ctor: Constructor<'tcx>,
- fields: Fields<'p, 'tcx>,
- ty: Ty<'tcx>,
- span: Span,
- ) -> Self {
- DeconstructedPat { ctor, fields, ty, span, reachable: Cell::new(false) }
- }
-
- pub(crate) fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self {
- let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
- let ctor;
- let fields;
- match &pat.kind {
- PatKind::AscribeUserType { subpattern, .. }
- | PatKind::InlineConstant { subpattern, .. } => return mkpat(subpattern),
- PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
- PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
- ctor = Wildcard;
- fields = Fields::empty();
- }
- PatKind::Deref { subpattern } => {
- ctor = Single;
- fields = Fields::singleton(cx, mkpat(subpattern));
- }
- PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
- match pat.ty.kind() {
- ty::Tuple(fs) => {
- ctor = Single;
- let mut wilds: SmallVec<[_; 2]> =
- fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
- for pat in subpatterns {
- wilds[pat.field.index()] = mkpat(&pat.pattern);
- }
- fields = Fields::from_iter(cx, wilds);
- }
- ty::Adt(adt, args) if adt.is_box() => {
- // The only legal patterns of type `Box` (outside `std`) are `_` and box
- // patterns. If we're here we can assume this is a box pattern.
- // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
- // _)` or a box pattern. As a hack to avoid an ICE with the former, we
- // ignore other fields than the first one. This will trigger an error later
- // anyway.
- // See https://github.com/rust-lang/rust/issues/82772 ,
- // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
- // The problem is that we can't know from the type whether we'll match
- // normally or through box-patterns. We'll have to figure out a proper
- // solution when we introduce generalized deref patterns. Also need to
- // prevent mixing of those two options.
- let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
- let pat = if let Some(pat) = pattern {
- mkpat(&pat.pattern)
- } else {
- DeconstructedPat::wildcard(args.type_at(0), pat.span)
- };
- ctor = Single;
- fields = Fields::singleton(cx, pat);
- }
- ty::Adt(adt, _) => {
- ctor = match pat.kind {
- PatKind::Leaf { .. } => Single,
- PatKind::Variant { variant_index, .. } => Variant(variant_index),
- _ => bug!(),
- };
- let variant = &adt.variant(ctor.variant_index_for_adt(*adt));
- // For each field in the variant, we store the relevant index into `self.fields` if any.
- let mut field_id_to_id: Vec<Option<usize>> =
- (0..variant.fields.len()).map(|_| None).collect();
- let tys = Fields::list_variant_nonhidden_fields(cx, pat.ty, variant)
- .enumerate()
- .map(|(i, (field, ty))| {
- field_id_to_id[field.index()] = Some(i);
- ty
- });
- let mut wilds: SmallVec<[_; 2]> =
- tys.map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
- for pat in subpatterns {
- if let Some(i) = field_id_to_id[pat.field.index()] {
- wilds[i] = mkpat(&pat.pattern);
- }
- }
- fields = Fields::from_iter(cx, wilds);
- }
- _ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty),
- }
- }
- PatKind::Constant { value } => {
- match pat.ty.kind() {
- ty::Bool => {
- ctor = match value.try_eval_bool(cx.tcx, cx.param_env) {
- Some(b) => Bool(b),
- None => Opaque,
- };
- fields = Fields::empty();
- }
- ty::Char | ty::Int(_) | ty::Uint(_) => {
- ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
- Some(bits) => IntRange(IntRange::from_bits(cx.tcx, pat.ty, bits)),
- None => Opaque,
- };
- fields = Fields::empty();
- }
- ty::Float(ty::FloatTy::F32) => {
- ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
- Some(bits) => {
- use rustc_apfloat::Float;
- let value = rustc_apfloat::ieee::Single::from_bits(bits);
- F32Range(value, value, RangeEnd::Included)
- }
- None => Opaque,
- };
- fields = Fields::empty();
- }
- ty::Float(ty::FloatTy::F64) => {
- ctor = match value.try_eval_bits(cx.tcx, cx.param_env) {
- Some(bits) => {
- use rustc_apfloat::Float;
- let value = rustc_apfloat::ieee::Double::from_bits(bits);
- F64Range(value, value, RangeEnd::Included)
- }
- None => Opaque,
- };
- fields = Fields::empty();
- }
- ty::Ref(_, t, _) if t.is_str() => {
- // We want a `&str` constant to behave like a `Deref` pattern, to be compatible
- // with other `Deref` patterns. This could have been done in `const_to_pat`,
- // but that causes issues with the rest of the matching code.
- // So here, the constructor for a `"foo"` pattern is `&` (represented by
- // `Single`), and has one field. That field has constructor `Str(value)` and no
- // fields.
- // Note: `t` is `str`, not `&str`.
- let subpattern =
- DeconstructedPat::new(Str(*value), Fields::empty(), *t, pat.span);
- ctor = Single;
- fields = Fields::singleton(cx, subpattern)
- }
- // All constants that can be structurally matched have already been expanded
- // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
- // opaque.
- _ => {
- ctor = Opaque;
- fields = Fields::empty();
- }
- }
- }
- PatKind::Range(box PatRange { lo, hi, end, .. }) => {
- let ty = pat.ty;
- ctor = match ty.kind() {
- ty::Char | ty::Int(_) | ty::Uint(_) => {
- let lo =
- MaybeInfiniteInt::from_pat_range_bdy(*lo, ty, cx.tcx, cx.param_env);
- let hi =
- MaybeInfiniteInt::from_pat_range_bdy(*hi, ty, cx.tcx, cx.param_env);
- IntRange(IntRange::from_range(lo, hi, *end))
- }
- ty::Float(fty) => {
- use rustc_apfloat::Float;
- let lo = lo.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
- let hi = hi.as_finite().map(|c| c.eval_bits(cx.tcx, cx.param_env));
- match fty {
- ty::FloatTy::F32 => {
- use rustc_apfloat::ieee::Single;
- let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY);
- let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY);
- F32Range(lo, hi, *end)
- }
- ty::FloatTy::F64 => {
- use rustc_apfloat::ieee::Double;
- let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY);
- let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY);
- F64Range(lo, hi, *end)
- }
- }
- }
- _ => bug!("invalid type for range pattern: {}", ty),
- };
- fields = Fields::empty();
- }
- PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
- let array_len = match pat.ty.kind() {
- ty::Array(_, length) => {
- Some(length.eval_target_usize(cx.tcx, cx.param_env) as usize)
- }
- ty::Slice(_) => None,
- _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
- };
- let kind = if slice.is_some() {
- VarLen(prefix.len(), suffix.len())
- } else {
- FixedLen(prefix.len() + suffix.len())
- };
- ctor = Slice(Slice::new(array_len, kind));
- fields =
- Fields::from_iter(cx, prefix.iter().chain(suffix.iter()).map(|p| mkpat(&*p)));
- }
- PatKind::Or { .. } => {
- ctor = Or;
- let pats = expand_or_pat(pat);
- fields = Fields::from_iter(cx, pats.into_iter().map(mkpat));
- }
- PatKind::Error(_) => {
- ctor = Opaque;
- fields = Fields::empty();
- }
- }
- DeconstructedPat::new(ctor, fields, pat.ty, pat.span)
- }
-
- pub(super) fn is_or_pat(&self) -> bool {
- matches!(self.ctor, Or)
- }
- pub(super) fn flatten_or_pat(&'p self) -> SmallVec<[&'p Self; 1]> {
- if self.is_or_pat() {
- self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect()
- } else {
- smallvec![self]
- }
- }
-
- pub(super) fn ctor(&self) -> &Constructor<'tcx> {
- &self.ctor
- }
- pub(super) fn ty(&self) -> Ty<'tcx> {
- self.ty
- }
- pub(super) fn span(&self) -> Span {
- self.span
- }
-
- pub(super) fn iter_fields<'a>(
- &'a self,
- ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
- self.fields.iter_patterns()
- }
-
- /// Specialize this pattern with a constructor.
- /// `other_ctor` can be different from `self.ctor`, but must be covered by it.
- pub(super) fn specialize<'a>(
- &'a self,
- pcx: &PatCtxt<'_, 'p, 'tcx>,
- other_ctor: &Constructor<'tcx>,
- ) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> {
- match (&self.ctor, other_ctor) {
- (Wildcard, _) => {
- // We return a wildcard for each field of `other_ctor`.
- Fields::wildcards(pcx, other_ctor).iter_patterns().collect()
- }
- (Slice(self_slice), Slice(other_slice))
- if self_slice.arity() != other_slice.arity() =>
- {
- // The only tricky case: two slices of different arity. Since `self_slice` covers
- // `other_slice`, `self_slice` must be `VarLen`, i.e. of the form
- // `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger
- // arity. So we fill the middle part with enough wildcards to reach the length of
- // the new, larger slice.
- match self_slice.kind {
- FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice),
- VarLen(prefix, suffix) => {
- let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else {
- bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty);
- };
- let prefix = &self.fields.fields[..prefix];
- let suffix = &self.fields.fields[self_slice.arity() - suffix..];
- let wildcard: &_ = pcx
- .cx
- .pattern_arena
- .alloc(DeconstructedPat::wildcard(inner_ty, pcx.span));
- let extra_wildcards = other_slice.arity() - self_slice.arity();
- let extra_wildcards = (0..extra_wildcards).map(|_| wildcard);
- prefix.iter().chain(extra_wildcards).chain(suffix).collect()
- }
- }
- }
- _ => self.fields.iter_patterns().collect(),
- }
- }
-
- /// We keep track for each pattern if it was ever reachable during the analysis. This is used
- /// with `unreachable_spans` to report unreachable subpatterns arising from or patterns.
- pub(super) fn set_reachable(&self) {
- self.reachable.set(true)
- }
- pub(super) fn is_reachable(&self) -> bool {
- self.reachable.get()
- }
-
- /// Report the spans of subpatterns that were not reachable, if any.
- pub(super) fn unreachable_spans(&self) -> Vec<Span> {
- let mut spans = Vec::new();
- self.collect_unreachable_spans(&mut spans);
- spans
- }
-
- fn collect_unreachable_spans(&self, spans: &mut Vec<Span>) {
- // We don't look at subpatterns if we already reported the whole pattern as unreachable.
- if !self.is_reachable() {
- spans.push(self.span);
- } else {
- for p in self.iter_fields() {
- p.collect_unreachable_spans(spans);
- }
- }
- }
-}
-
-/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a
-/// `Display` impl.
-impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- // Printing lists is a chore.
- let mut first = true;
- let mut start_or_continue = |s| {
- if first {
- first = false;
- ""
- } else {
- s
- }
- };
- let mut start_or_comma = || start_or_continue(", ");
-
- match &self.ctor {
- Single | Variant(_) => match self.ty.kind() {
- ty::Adt(def, _) if def.is_box() => {
- // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
- // of `std`). So this branch is only reachable when the feature is enabled and
- // the pattern is a box pattern.
- let subpattern = self.iter_fields().next().unwrap();
- write!(f, "box {subpattern:?}")
- }
- ty::Adt(..) | ty::Tuple(..) => {
- let variant = match self.ty.kind() {
- ty::Adt(adt, _) => Some(adt.variant(self.ctor.variant_index_for_adt(*adt))),
- ty::Tuple(_) => None,
- _ => unreachable!(),
- };
-
- if let Some(variant) = variant {
- write!(f, "{}", variant.name)?;
- }
-
- // Without `cx`, we can't know which field corresponds to which, so we can't
- // get the names of the fields. Instead we just display everything as a tuple
- // struct, which should be good enough.
- write!(f, "(")?;
- for p in self.iter_fields() {
- write!(f, "{}", start_or_comma())?;
- write!(f, "{p:?}")?;
- }
- write!(f, ")")
- }
- // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
- // be careful to detect strings here. However a string literal pattern will never
- // be reported as a non-exhaustiveness witness, so we can ignore this issue.
- ty::Ref(_, _, mutbl) => {
- let subpattern = self.iter_fields().next().unwrap();
- write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern)
- }
- _ => write!(f, "_"),
- },
- Slice(slice) => {
- let mut subpatterns = self.fields.iter_patterns();
- write!(f, "[")?;
- match slice.kind {
- FixedLen(_) => {
- for p in subpatterns {
- write!(f, "{}{:?}", start_or_comma(), p)?;
- }
- }
- VarLen(prefix_len, _) => {
- for p in subpatterns.by_ref().take(prefix_len) {
- write!(f, "{}{:?}", start_or_comma(), p)?;
- }
- write!(f, "{}", start_or_comma())?;
- write!(f, "..")?;
- for p in subpatterns {
- write!(f, "{}{:?}", start_or_comma(), p)?;
- }
- }
- }
- write!(f, "]")
- }
- Bool(b) => write!(f, "{b}"),
- // Best-effort, will render signed ranges incorrectly
- IntRange(range) => write!(f, "{range:?}"),
- F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
- F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
- Str(value) => write!(f, "{value}"),
- Opaque => write!(f, "<constant pattern>"),
- Or => {
- for pat in self.iter_fields() {
- write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
- }
- Ok(())
- }
- Wildcard | Missing { .. } | NonExhaustive | Hidden => write!(f, "_ : {:?}", self.ty),
- }
- }
-}
-
-/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
-/// purposes. As such they don't use interning and can be cloned.
-#[derive(Debug, Clone)]
-pub(crate) struct WitnessPat<'tcx> {
- ctor: Constructor<'tcx>,
- pub(crate) fields: Vec<WitnessPat<'tcx>>,
- ty: Ty<'tcx>,
-}
-
-impl<'tcx> WitnessPat<'tcx> {
- pub(super) fn new(ctor: Constructor<'tcx>, fields: Vec<Self>, ty: Ty<'tcx>) -> Self {
- Self { ctor, fields, ty }
- }
- pub(super) fn wildcard(ty: Ty<'tcx>) -> Self {
- Self::new(Wildcard, Vec::new(), ty)
- }
-
- /// Construct a pattern that matches everything that starts with this constructor.
- /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
- /// `Some(_)`.
- pub(super) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self {
- // Reuse `Fields::wildcards` to get the types.
- let fields = Fields::wildcards(pcx, &ctor)
- .iter_patterns()
- .map(|deco_pat| Self::wildcard(deco_pat.ty()))
- .collect();
- Self::new(ctor, fields, pcx.ty)
- }
-
- pub(super) fn ctor(&self) -> &Constructor<'tcx> {
- &self.ctor
- }
- pub(super) fn ty(&self) -> Ty<'tcx> {
- self.ty
- }
-
- /// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't
- /// appear in diagnostics, like float ranges.
- pub(crate) fn to_diagnostic_pat(&self, cx: &MatchCheckCtxt<'_, 'tcx>) -> Pat<'tcx> {
- let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild);
- let mut subpatterns = self.iter_fields().map(|p| Box::new(p.to_diagnostic_pat(cx)));
- let kind = match &self.ctor {
- Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
- IntRange(range) => return range.to_diagnostic_pat(self.ty, cx.tcx),
- Single | Variant(_) => match self.ty.kind() {
- ty::Tuple(..) => PatKind::Leaf {
- subpatterns: subpatterns
- .enumerate()
- .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern })
- .collect(),
- },
- ty::Adt(adt_def, _) if adt_def.is_box() => {
- // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
- // of `std`). So this branch is only reachable when the feature is enabled and
- // the pattern is a box pattern.
- PatKind::Deref { subpattern: subpatterns.next().unwrap() }
- }
- ty::Adt(adt_def, args) => {
- let variant_index = self.ctor.variant_index_for_adt(*adt_def);
- let variant = &adt_def.variant(variant_index);
- let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant)
- .zip(subpatterns)
- .map(|((field, _ty), pattern)| FieldPat { field, pattern })
- .collect();
-
- if adt_def.is_enum() {
- PatKind::Variant { adt_def: *adt_def, args, variant_index, subpatterns }
- } else {
- PatKind::Leaf { subpatterns }
- }
- }
- // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
- // be careful to reconstruct the correct constant pattern here. However a string
- // literal pattern will never be reported as a non-exhaustiveness witness, so we
- // ignore this issue.
- ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
- _ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty),
- },
- Slice(slice) => {
- match slice.kind {
- FixedLen(_) => PatKind::Slice {
- prefix: subpatterns.collect(),
- slice: None,
- suffix: Box::new([]),
- },
- VarLen(prefix, _) => {
- let mut subpatterns = subpatterns.peekable();
- let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
- if slice.array_len.is_some() {
- // Improves diagnostics a bit: if the type is a known-size array, instead
- // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
- // This is incorrect if the size is not known, since `[_, ..]` captures
- // arrays of lengths `>= 1` whereas `[..]` captures any length.
- while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
- prefix.pop();
- }
- while subpatterns.peek().is_some()
- && is_wildcard(subpatterns.peek().unwrap())
- {
- subpatterns.next();
- }
- }
- let suffix: Box<[_]> = subpatterns.collect();
- let wild = Pat::wildcard_from_ty(self.ty);
- PatKind::Slice {
- prefix: prefix.into_boxed_slice(),
- slice: Some(Box::new(wild)),
- suffix,
- }
- }
- }
- }
- &Str(value) => PatKind::Constant { value },
- Wildcard | NonExhaustive | Hidden => PatKind::Wild,
- Missing { .. } => bug!(
- "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
- `Missing` should have been processed in `apply_constructors`"
- ),
- F32Range(..) | F64Range(..) | Opaque | Or => {
- bug!("can't convert to pattern: {:?}", self)
- }
- };
-
- Pat { ty: self.ty, span: DUMMY_SP, kind }
- }
-
- pub(super) fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a WitnessPat<'tcx>> {
- self.fields.iter()
- }
-}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
index 0811ab6a0..c7762f8ce 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
@@ -2,11 +2,8 @@
mod check_match;
mod const_to_pat;
-pub(crate) mod deconstruct_pat;
-mod usefulness;
pub(crate) use self::check_match::check_match;
-pub(crate) use self::usefulness::MatchCheckCtxt;
use crate::errors::*;
use crate::thir::util::UserAnnotatedTyHelpers;
@@ -18,17 +15,14 @@ use rustc_hir::pat_util::EnumerateAndAdjustIterator;
use rustc_hir::RangeEnd;
use rustc_index::Idx;
use rustc_middle::mir::interpret::{ErrorHandled, GlobalId, LitToConstError, LitToConstInput};
-use rustc_middle::mir::{self, BorrowKind, Const, Mutability, UserTypeProjection};
+use rustc_middle::mir::{self, BorrowKind, Const, Mutability};
use rustc_middle::thir::{
Ascription, BindingMode, FieldPat, LocalVarId, Pat, PatKind, PatRange, PatRangeBoundary,
};
use rustc_middle::ty::layout::IntegerExt;
-use rustc_middle::ty::{
- self, AdtDef, CanonicalUserTypeAnnotation, GenericArg, GenericArgsRef, Region, Ty, TyCtxt,
- TypeVisitableExt, UserType,
-};
+use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, Ty, TyCtxt, TypeVisitableExt};
use rustc_span::def_id::LocalDefId;
-use rustc_span::{ErrorGuaranteed, Span, Symbol};
+use rustc_span::{ErrorGuaranteed, Span};
use rustc_target::abi::{FieldIdx, Integer};
use std::cmp::Ordering;
@@ -111,7 +105,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
let msg = format!(
"found bad range pattern endpoint `{expr:?}` outside of error recovery"
);
- return Err(self.tcx.sess.delay_span_bug(expr.span, msg));
+ return Err(self.tcx.sess.span_delayed_bug(expr.span, msg));
};
Ok((Some(PatRangeBoundary::Finite(value)), ascr, inline_const))
}
@@ -180,8 +174,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
span: Span,
) -> Result<PatKind<'tcx>, ErrorGuaranteed> {
if lo_expr.is_none() && hi_expr.is_none() {
- let msg = format!("found twice-open range pattern (`..`) outside of error recovery");
- return Err(self.tcx.sess.delay_span_bug(span, msg));
+ let msg = "found twice-open range pattern (`..`) outside of error recovery";
+ return Err(self.tcx.sess.span_delayed_bug(span, msg));
}
let (lo, lo_ascr, lo_inline) = self.lower_pattern_range_endpoint(lo_expr)?;
@@ -254,6 +248,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
let kind = match pat.kind {
hir::PatKind::Wild => PatKind::Wild,
+ hir::PatKind::Never => PatKind::Never,
+
hir::PatKind::Lit(value) => self.lower_lit(value),
hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => {
@@ -266,16 +262,16 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
return self.lower_path(qpath, pat.hir_id, pat.span);
}
- hir::PatKind::Ref(ref subpattern, _) | hir::PatKind::Box(ref subpattern) => {
+ hir::PatKind::Ref(subpattern, _) | hir::PatKind::Box(subpattern) => {
PatKind::Deref { subpattern: self.lower_pattern(subpattern) }
}
- hir::PatKind::Slice(ref prefix, ref slice, ref suffix) => {
+ hir::PatKind::Slice(prefix, ref slice, suffix) => {
self.slice_or_array_pattern(pat.span, ty, prefix, slice, suffix)
}
- hir::PatKind::Tuple(ref pats, ddpos) => {
- let ty::Tuple(ref tys) = ty.kind() else {
+ hir::PatKind::Tuple(pats, ddpos) => {
+ let ty::Tuple(tys) = ty.kind() else {
span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", ty);
};
let subpatterns = self.lower_tuple_subpats(pats, tys.len(), ddpos);
@@ -325,7 +321,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
}
}
- hir::PatKind::TupleStruct(ref qpath, ref pats, ddpos) => {
+ hir::PatKind::TupleStruct(ref qpath, pats, ddpos) => {
let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
let ty::Adt(adt_def, _) = ty.kind() else {
span_bug!(pat.span, "tuple struct pattern not applied to an ADT {:?}", ty);
@@ -335,20 +331,20 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns)
}
- hir::PatKind::Struct(ref qpath, ref fields, _) => {
+ hir::PatKind::Struct(ref qpath, fields, _) => {
let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
let subpatterns = fields
.iter()
.map(|field| FieldPat {
field: self.typeck_results.field_index(field.hir_id),
- pattern: self.lower_pattern(&field.pat),
+ pattern: self.lower_pattern(field.pat),
})
.collect();
self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns)
}
- hir::PatKind::Or(ref pats) => PatKind::Or { pats: self.lower_patterns(pats) },
+ hir::PatKind::Or(pats) => PatKind::Or { pats: self.lower_patterns(pats) },
};
Box::new(Pat { span, ty, kind })
@@ -701,146 +697,3 @@ impl<'tcx> UserAnnotatedTyHelpers<'tcx> for PatCtxt<'_, 'tcx> {
self.typeck_results
}
}
-
-trait PatternFoldable<'tcx>: Sized {
- fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- self.super_fold_with(folder)
- }
-
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self;
-}
-
-trait PatternFolder<'tcx>: Sized {
- fn fold_pattern(&mut self, pattern: &Pat<'tcx>) -> Pat<'tcx> {
- pattern.super_fold_with(self)
- }
-
- fn fold_pattern_kind(&mut self, kind: &PatKind<'tcx>) -> PatKind<'tcx> {
- kind.super_fold_with(self)
- }
-}
-
-impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Box<T> {
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- let content: T = (**self).fold_with(folder);
- Box::new(content)
- }
-}
-
-impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Vec<T> {
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- self.iter().map(|t| t.fold_with(folder)).collect()
- }
-}
-
-impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Box<[T]> {
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- self.iter().map(|t| t.fold_with(folder)).collect()
- }
-}
-
-impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Option<T> {
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- self.as_ref().map(|t| t.fold_with(folder))
- }
-}
-
-macro_rules! ClonePatternFoldableImpls {
- (<$lt_tcx:tt> $($ty:ty),+) => {
- $(
- impl<$lt_tcx> PatternFoldable<$lt_tcx> for $ty {
- fn super_fold_with<F: PatternFolder<$lt_tcx>>(&self, _: &mut F) -> Self {
- Clone::clone(self)
- }
- }
- )+
- }
-}
-
-ClonePatternFoldableImpls! { <'tcx>
- Span, FieldIdx, Mutability, Symbol, LocalVarId, usize,
- Region<'tcx>, Ty<'tcx>, BindingMode, AdtDef<'tcx>,
- GenericArgsRef<'tcx>, &'tcx GenericArg<'tcx>, UserType<'tcx>,
- UserTypeProjection, CanonicalUserTypeAnnotation<'tcx>
-}
-
-impl<'tcx> PatternFoldable<'tcx> for FieldPat<'tcx> {
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- FieldPat { field: self.field.fold_with(folder), pattern: self.pattern.fold_with(folder) }
- }
-}
-
-impl<'tcx> PatternFoldable<'tcx> for Pat<'tcx> {
- fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_pattern(self)
- }
-
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- Pat {
- ty: self.ty.fold_with(folder),
- span: self.span.fold_with(folder),
- kind: self.kind.fold_with(folder),
- }
- }
-}
-
-impl<'tcx> PatternFoldable<'tcx> for PatKind<'tcx> {
- fn fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_pattern_kind(self)
- }
-
- fn super_fold_with<F: PatternFolder<'tcx>>(&self, folder: &mut F) -> Self {
- match *self {
- PatKind::Wild => PatKind::Wild,
- PatKind::Error(e) => PatKind::Error(e),
- PatKind::AscribeUserType {
- ref subpattern,
- ascription: Ascription { ref annotation, variance },
- } => PatKind::AscribeUserType {
- subpattern: subpattern.fold_with(folder),
- ascription: Ascription { annotation: annotation.fold_with(folder), variance },
- },
- PatKind::Binding { mutability, name, mode, var, ty, ref subpattern, is_primary } => {
- PatKind::Binding {
- mutability: mutability.fold_with(folder),
- name: name.fold_with(folder),
- mode: mode.fold_with(folder),
- var: var.fold_with(folder),
- ty: ty.fold_with(folder),
- subpattern: subpattern.fold_with(folder),
- is_primary,
- }
- }
- PatKind::Variant { adt_def, args, variant_index, ref subpatterns } => {
- PatKind::Variant {
- adt_def: adt_def.fold_with(folder),
- args: args.fold_with(folder),
- variant_index,
- subpatterns: subpatterns.fold_with(folder),
- }
- }
- PatKind::Leaf { ref subpatterns } => {
- PatKind::Leaf { subpatterns: subpatterns.fold_with(folder) }
- }
- PatKind::Deref { ref subpattern } => {
- PatKind::Deref { subpattern: subpattern.fold_with(folder) }
- }
- PatKind::Constant { value } => PatKind::Constant { value },
- PatKind::InlineConstant { def, subpattern: ref pattern } => {
- PatKind::InlineConstant { def, subpattern: pattern.fold_with(folder) }
- }
- PatKind::Range(ref range) => PatKind::Range(range.clone()),
- PatKind::Slice { ref prefix, ref slice, ref suffix } => PatKind::Slice {
- prefix: prefix.fold_with(folder),
- slice: slice.fold_with(folder),
- suffix: suffix.fold_with(folder),
- },
- PatKind::Array { ref prefix, ref slice, ref suffix } => PatKind::Array {
- prefix: prefix.fold_with(folder),
- slice: slice.fold_with(folder),
- suffix: suffix.fold_with(folder),
- },
- PatKind::Or { ref pats } => PatKind::Or { pats: pats.fold_with(folder) },
- }
- }
-}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
deleted file mode 100644
index da7b6587a..000000000
--- a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
+++ /dev/null
@@ -1,1205 +0,0 @@
-//! Note: tests specific to this file can be found in:
-//!
-//! - `ui/pattern/usefulness`
-//! - `ui/or-patterns`
-//! - `ui/consts/const_in_pattern`
-//! - `ui/rfc-2008-non-exhaustive`
-//! - `ui/half-open-range-patterns`
-//! - probably many others
-//!
-//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific
-//! reason not to, for example if they depend on a particular feature like `or_patterns`.
-//!
-//! -----
-//!
-//! This file includes the logic for exhaustiveness and reachability checking for pattern-matching.
-//! Specifically, given a list of patterns for a type, we can tell whether:
-//! (a) each pattern is reachable (reachability)
-//! (b) the patterns cover every possible value for the type (exhaustiveness)
-//!
-//! The algorithm implemented here is a modified version of the one described in [this
-//! paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). We have however generalized
-//! it to accommodate the variety of patterns that Rust supports. We thus explain our version here,
-//! without being as rigorous.
-//!
-//!
-//! # Summary
-//!
-//! The core of the algorithm is the notion of "usefulness". A pattern `q` is said to be *useful*
-//! relative to another pattern `p` of the same type if there is a value that is matched by `q` and
-//! not matched by `p`. This generalizes to many `p`s: `q` is useful w.r.t. a list of patterns
-//! `p_1 .. p_n` if there is a value that is matched by `q` and by none of the `p_i`. We write
-//! `usefulness(p_1 .. p_n, q)` for a function that returns a list of such values. The aim of this
-//! file is to compute it efficiently.
-//!
-//! This is enough to compute reachability: a pattern in a `match` expression is reachable iff it
-//! is useful w.r.t. the patterns above it:
-//! ```rust
-//! # fn foo(x: Option<i32>) {
-//! match x {
-//! Some(_) => {},
-//! None => {}, // reachable: `None` is matched by this but not the branch above
-//! Some(0) => {}, // unreachable: all the values this matches are already matched by
-//! // `Some(_)` above
-//! }
-//! # }
-//! ```
-//!
-//! This is also enough to compute exhaustiveness: a match is exhaustive iff the wildcard `_`
-//! pattern is _not_ useful w.r.t. the patterns in the match. The values returned by `usefulness`
-//! are used to tell the user which values are missing.
-//! ```compile_fail,E0004
-//! # fn foo(x: Option<i32>) {
-//! match x {
-//! Some(0) => {},
-//! None => {},
-//! // not exhaustive: `_` is useful because it matches `Some(1)`
-//! }
-//! # }
-//! ```
-//!
-//! The entrypoint of this file is the [`compute_match_usefulness`] function, which computes
-//! reachability for each match branch and exhaustiveness for the whole match.
-//!
-//!
-//! # Constructors and fields
-//!
-//! Note: we will often abbreviate "constructor" as "ctor".
-//!
-//! The idea that powers everything that is done in this file is the following: a (matchable)
-//! value is made from a constructor applied to a number of subvalues. Examples of constructors are
-//! `Some`, `None`, `(,)` (the 2-tuple constructor), `Foo {..}` (the constructor for a struct
-//! `Foo`), and `2` (the constructor for the number `2`). This is natural when we think of
-//! pattern-matching, and this is the basis for what follows.
-//!
-//! Some of the ctors listed above might feel weird: `None` and `2` don't take any arguments.
-//! That's ok: those are ctors that take a list of 0 arguments; they are the simplest case of
-//! ctors. We treat `2` as a ctor because `u64` and other number types behave exactly like a huge
-//! `enum`, with one variant for each number. This allows us to see any matchable value as made up
-//! from a tree of ctors, each having a set number of children. For example: `Foo { bar: None,
-//! baz: Ok(0) }` is made from 4 different ctors, namely `Foo{..}`, `None`, `Ok` and `0`.
-//!
-//! This idea can be extended to patterns: they are also made from constructors applied to fields.
-//! A pattern for a given type is allowed to use all the ctors for values of that type (which we
-//! call "value constructors"), but there are also pattern-only ctors. The most important one is
-//! the wildcard (`_`), and the others are integer ranges (`0..=10`), variable-length slices (`[x,
-//! ..]`), and or-patterns (`Ok(0) | Err(_)`). Examples of valid patterns are `42`, `Some(_)`, `Foo
-//! { bar: Some(0) | None, baz: _ }`. Note that a binder in a pattern (e.g. `Some(x)`) matches the
-//! same values as a wildcard (e.g. `Some(_)`), so we treat both as wildcards.
-//!
-//! From this deconstruction we can compute whether a given value matches a given pattern; we
-//! simply look at ctors one at a time. Given a pattern `p` and a value `v`, we want to compute
-//! `matches!(v, p)`. It's mostly straightforward: we compare the head ctors and when they match
-//! we compare their fields recursively. A few representative examples:
-//!
-//! - `matches!(v, _) := true`
-//! - `matches!((v0, v1), (p0, p1)) := matches!(v0, p0) && matches!(v1, p1)`
-//! - `matches!(Foo { bar: v0, baz: v1 }, Foo { bar: p0, baz: p1 }) := matches!(v0, p0) && matches!(v1, p1)`
-//! - `matches!(Ok(v0), Ok(p0)) := matches!(v0, p0)`
-//! - `matches!(Ok(v0), Err(p0)) := false` (incompatible variants)
-//! - `matches!(v, 1..=100) := matches!(v, 1) || ... || matches!(v, 100)`
-//! - `matches!([v0], [p0, .., p1]) := false` (incompatible lengths)
-//! - `matches!([v0, v1, v2], [p0, .., p1]) := matches!(v0, p0) && matches!(v2, p1)`
-//! - `matches!(v, p0 | p1) := matches!(v, p0) || matches!(v, p1)`
-//!
-//! Constructors, fields and relevant operations are defined in the [`super::deconstruct_pat`] module.
-//!
-//! Note: this constructors/fields distinction may not straightforwardly apply to every Rust type.
-//! For example a value of type `Rc<u64>` can't be deconstructed that way, and `&str` has an
-//! infinitude of constructors. There are also subtleties with visibility of fields and
-//! uninhabitedness and various other things. The constructors idea can be extended to handle most
-//! of these subtleties though; caveats are documented where relevant throughout the code.
-//!
-//! Whether constructors cover each other is computed by [`Constructor::is_covered_by`].
-//!
-//!
-//! # Specialization
-//!
-//! Recall that we wish to compute `usefulness(p_1 .. p_n, q)`: given a list of patterns `p_1 ..
-//! p_n` and a pattern `q`, all of the same type, we want to find a list of values (called
-//! "witnesses") that are matched by `q` and by none of the `p_i`. We obviously don't just
-//! enumerate all possible values. From the discussion above we see that we can proceed
-//! ctor-by-ctor: for each value ctor of the given type, we ask "is there a value that starts with
-//! this constructor and matches `q` and none of the `p_i`?". As we saw above, there's a lot we can
-//! say from knowing only the first constructor of our candidate value.
-//!
-//! Let's take the following example:
-//! ```compile_fail,E0004
-//! # enum Enum { Variant1(()), Variant2(Option<bool>, u32)}
-//! # fn foo(x: Enum) {
-//! match x {
-//! Enum::Variant1(_) => {} // `p1`
-//! Enum::Variant2(None, 0) => {} // `p2`
-//! Enum::Variant2(Some(_), 0) => {} // `q`
-//! }
-//! # }
-//! ```
-//!
-//! We can easily see that if our candidate value `v` starts with `Variant1` it will not match `q`.
-//! If `v = Variant2(v0, v1)` however, whether or not it matches `p2` and `q` will depend on `v0`
-//! and `v1`. In fact, such a `v` will be a witness of usefulness of `q` exactly when the tuple
-//! `(v0, v1)` is a witness of usefulness of `q'` in the following reduced match:
-//!
-//! ```compile_fail,E0004
-//! # fn foo(x: (Option<bool>, u32)) {
-//! match x {
-//! (None, 0) => {} // `p2'`
-//! (Some(_), 0) => {} // `q'`
-//! }
-//! # }
-//! ```
-//!
-//! This motivates a new step in computing usefulness, that we call _specialization_.
-//! Specialization consist of filtering a list of patterns for those that match a constructor, and
-//! then looking into the constructor's fields. This enables usefulness to be computed recursively.
-//!
-//! Instead of acting on a single pattern in each row, we will consider a list of patterns for each
-//! row, and we call such a list a _pattern-stack_. The idea is that we will specialize the
-//! leftmost pattern, which amounts to popping the constructor and pushing its fields, which feels
-//! like a stack. We note a pattern-stack simply with `[p_1 ... p_n]`.
-//! Here's a sequence of specializations of a list of pattern-stacks, to illustrate what's
-//! happening:
-//! ```ignore (illustrative)
-//! [Enum::Variant1(_)]
-//! [Enum::Variant2(None, 0)]
-//! [Enum::Variant2(Some(_), 0)]
-//! //==>> specialize with `Variant2`
-//! [None, 0]
-//! [Some(_), 0]
-//! //==>> specialize with `Some`
-//! [_, 0]
-//! //==>> specialize with `true` (say the type was `bool`)
-//! [0]
-//! //==>> specialize with `0`
-//! []
-//! ```
-//!
-//! The function `specialize(c, p)` takes a value constructor `c` and a pattern `p`, and returns 0
-//! or more pattern-stacks. If `c` does not match the head constructor of `p`, it returns nothing;
-//! otherwise if returns the fields of the constructor. This only returns more than one
-//! pattern-stack if `p` has a pattern-only constructor.
-//!
-//! - Specializing for the wrong constructor returns nothing
-//!
-//! `specialize(None, Some(p0)) := []`
-//!
-//! - Specializing for the correct constructor returns a single row with the fields
-//!
-//! `specialize(Variant1, Variant1(p0, p1, p2)) := [[p0, p1, p2]]`
-//!
-//! `specialize(Foo{..}, Foo { bar: p0, baz: p1 }) := [[p0, p1]]`
-//!
-//! - For or-patterns, we specialize each branch and concatenate the results
-//!
-//! `specialize(c, p0 | p1) := specialize(c, p0) ++ specialize(c, p1)`
-//!
-//! - We treat the other pattern constructors as if they were a large or-pattern of all the
-//! possibilities:
-//!
-//! `specialize(c, _) := specialize(c, Variant1(_) | Variant2(_, _) | ...)`
-//!
-//! `specialize(c, 1..=100) := specialize(c, 1 | ... | 100)`
-//!
-//! `specialize(c, [p0, .., p1]) := specialize(c, [p0, p1] | [p0, _, p1] | [p0, _, _, p1] | ...)`
-//!
-//! - If `c` is a pattern-only constructor, `specialize` is defined on a case-by-case basis. See
-//! the discussion about constructor splitting in [`super::deconstruct_pat`].
-//!
-//!
-//! We then extend this function to work with pattern-stacks as input, by acting on the first
-//! column and keeping the other columns untouched.
-//!
-//! Specialization for the whole matrix is done in [`Matrix::specialize_constructor`]. Note that
-//! or-patterns in the first column are expanded before being stored in the matrix. Specialization
-//! for a single patstack is done from a combination of [`Constructor::is_covered_by`] and
-//! [`PatStack::pop_head_constructor`]. The internals of how it's done mostly live in the
-//! [`super::deconstruct_pat::Fields`] struct.
-//!
-//!
-//! # Computing usefulness
-//!
-//! We now have all we need to compute usefulness. The inputs to usefulness are a list of
-//! pattern-stacks `p_1 ... p_n` (one per row), and a new pattern_stack `q`. The paper and this
-//! file calls the list of patstacks a _matrix_. They must all have the same number of columns and
-//! the patterns in a given column must all have the same type. `usefulness` returns a (possibly
-//! empty) list of witnesses of usefulness. These witnesses will also be pattern-stacks.
-//!
-//! - base case: `n_columns == 0`.
-//! Since a pattern-stack functions like a tuple of patterns, an empty one functions like the
-//! unit type. Thus `q` is useful iff there are no rows above it, i.e. if `n == 0`.
-//!
-//! - inductive case: `n_columns > 0`.
-//! We need a way to list the constructors we want to try. We will be more clever in the next
-//! section but for now assume we list all value constructors for the type of the first column.
-//!
-//! - for each such ctor `c`:
-//!
-//! - for each `q'` returned by `specialize(c, q)`:
-//!
-//! - we compute `usefulness(specialize(c, p_1) ... specialize(c, p_n), q')`
-//!
-//! - for each witness found, we revert specialization by pushing the constructor `c` on top.
-//!
-//! - We return the concatenation of all the witnesses found, if any.
-//!
-//! Example:
-//! ```ignore (illustrative)
-//! [Some(true)] // p_1
-//! [None] // p_2
-//! [Some(_)] // q
-//! //==>> try `None`: `specialize(None, q)` returns nothing
-//! //==>> try `Some`: `specialize(Some, q)` returns a single row
-//! [true] // p_1'
-//! [_] // q'
-//! //==>> try `true`: `specialize(true, q')` returns a single row
-//! [] // p_1''
-//! [] // q''
-//! //==>> base case; `n != 0` so `q''` is not useful.
-//! //==>> go back up a step
-//! [true] // p_1'
-//! [_] // q'
-//! //==>> try `false`: `specialize(false, q')` returns a single row
-//! [] // q''
-//! //==>> base case; `n == 0` so `q''` is useful. We return the single witness `[]`
-//! witnesses:
-//! []
-//! //==>> undo the specialization with `false`
-//! witnesses:
-//! [false]
-//! //==>> undo the specialization with `Some`
-//! witnesses:
-//! [Some(false)]
-//! //==>> we have tried all the constructors. The output is the single witness `[Some(false)]`.
-//! ```
-//!
-//! This computation is done in [`is_useful`]. In practice we don't care about the list of
-//! witnesses when computing reachability; we only need to know whether any exist. We do keep the
-//! witnesses when computing exhaustiveness to report them to the user.
-//!
-//!
-//! # Making usefulness tractable: constructor splitting
-//!
-//! We're missing one last detail: which constructors do we list? Naively listing all value
-//! constructors cannot work for types like `u64` or `&str`, so we need to be more clever. The
-//! first obvious insight is that we only want to list constructors that are covered by the head
-//! constructor of `q`. If it's a value constructor, we only try that one. If it's a pattern-only
-//! constructor, we use the final clever idea for this algorithm: _constructor splitting_, where we
-//! group together constructors that behave the same.
-//!
-//! The details are not necessary to understand this file, so we explain them in
-//! [`super::deconstruct_pat`]. Splitting is done by the [`Constructor::split`] function.
-//!
-//! # Constants in patterns
-//!
-//! There are two kinds of constants in patterns:
-//!
-//! * literals (`1`, `true`, `"foo"`)
-//! * named or inline consts (`FOO`, `const { 5 + 6 }`)
-//!
-//! The latter are converted into other patterns with literals at the leaves. For example
-//! `const_to_pat(const { [1, 2, 3] })` becomes an `Array(vec![Const(1), Const(2), Const(3)])`
-//! pattern. This gets problematic when comparing the constant via `==` would behave differently
-//! from matching on the constant converted to a pattern. Situations like that can occur, when
-//! the user implements `PartialEq` manually, and thus could make `==` behave arbitrarily different.
-//! In order to honor the `==` implementation, constants of types that implement `PartialEq` manually
-//! stay as a full constant and become an `Opaque` pattern. These `Opaque` patterns do not participate
-//! in exhaustiveness, specialization or overlap checking.
-
-use self::ArmType::*;
-use self::Usefulness::*;
-use super::deconstruct_pat::{
- Constructor, ConstructorSet, DeconstructedPat, IntRange, MaybeInfiniteInt, SplitConstructorSet,
- WitnessPat,
-};
-use crate::errors::{
- NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap,
- OverlappingRangeEndpoints, Uncovered,
-};
-
-use rustc_data_structures::captures::Captures;
-
-use rustc_arena::TypedArena;
-use rustc_data_structures::stack::ensure_sufficient_stack;
-use rustc_hir::def_id::DefId;
-use rustc_hir::HirId;
-use rustc_middle::ty::{self, Ty, TyCtxt};
-use rustc_session::lint;
-use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
-use rustc_span::{Span, DUMMY_SP};
-
-use smallvec::{smallvec, SmallVec};
-use std::fmt;
-
-pub(crate) struct MatchCheckCtxt<'p, 'tcx> {
- pub(crate) tcx: TyCtxt<'tcx>,
- /// The module in which the match occurs. This is necessary for
- /// checking inhabited-ness of types because whether a type is (visibly)
- /// inhabited can depend on whether it was defined in the current module or
- /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty
- /// outside its module and should not be matchable with an empty match statement.
- pub(crate) module: DefId,
- pub(crate) param_env: ty::ParamEnv<'tcx>,
- pub(crate) pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
- /// The span of the whole match, if applicable.
- pub(crate) match_span: Option<Span>,
- /// Only produce `NON_EXHAUSTIVE_OMITTED_PATTERNS` lint on refutable patterns.
- pub(crate) refutable: bool,
-}
-
-impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
- pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
- if self.tcx.features().exhaustive_patterns {
- !ty.is_inhabited_from(self.tcx, self.module, self.param_env)
- } else {
- false
- }
- }
-
- /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`.
- pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool {
- match ty.kind() {
- ty::Adt(def, ..) => {
- def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did().is_local()
- }
- _ => false,
- }
- }
-}
-
-#[derive(Copy, Clone)]
-pub(super) struct PatCtxt<'a, 'p, 'tcx> {
- pub(super) cx: &'a MatchCheckCtxt<'p, 'tcx>,
- /// Type of the current column under investigation.
- pub(super) ty: Ty<'tcx>,
- /// Span of the current pattern under investigation.
- pub(super) span: Span,
- /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
- /// subpattern.
- pub(super) is_top_level: bool,
-}
-
-impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("PatCtxt").field("ty", &self.ty).finish()
- }
-}
-
-/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
-/// works well.
-#[derive(Clone)]
-pub(crate) struct PatStack<'p, 'tcx> {
- pub(crate) pats: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>,
-}
-
-impl<'p, 'tcx> PatStack<'p, 'tcx> {
- fn from_pattern(pat: &'p DeconstructedPat<'p, 'tcx>) -> Self {
- Self::from_vec(smallvec![pat])
- }
-
- fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>) -> Self {
- PatStack { pats: vec }
- }
-
- fn is_empty(&self) -> bool {
- self.pats.is_empty()
- }
-
- fn len(&self) -> usize {
- self.pats.len()
- }
-
- fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> {
- self.pats[0]
- }
-
- fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> {
- self.pats.iter().copied()
- }
-
- // Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
- // or-pattern. Panics if `self` is empty.
- fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = PatStack<'p, 'tcx>> + Captures<'a> {
- self.head().iter_fields().map(move |pat| {
- let mut new_patstack = PatStack::from_pattern(pat);
- new_patstack.pats.extend_from_slice(&self.pats[1..]);
- new_patstack
- })
- }
-
- // Recursively expand all patterns into their subpatterns and push each `PatStack` to matrix.
- fn expand_and_extend<'a>(&'a self, matrix: &mut Matrix<'p, 'tcx>) {
- if !self.is_empty() && self.head().is_or_pat() {
- for pat in self.head().iter_fields() {
- let mut new_patstack = PatStack::from_pattern(pat);
- new_patstack.pats.extend_from_slice(&self.pats[1..]);
- if !new_patstack.is_empty() && new_patstack.head().is_or_pat() {
- new_patstack.expand_and_extend(matrix);
- } else if !new_patstack.is_empty() {
- matrix.push(new_patstack);
- }
- }
- }
- }
-
- /// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
- ///
- /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
- /// fields filled with wild patterns.
- ///
- /// This is roughly the inverse of `Constructor::apply`.
- fn pop_head_constructor(
- &self,
- pcx: &PatCtxt<'_, 'p, 'tcx>,
- ctor: &Constructor<'tcx>,
- ) -> PatStack<'p, 'tcx> {
- // We pop the head pattern and push the new fields extracted from the arguments of
- // `self.head()`.
- let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(pcx, ctor);
- new_fields.extend_from_slice(&self.pats[1..]);
- PatStack::from_vec(new_fields)
- }
-}
-
-/// Pretty-printing for matrix row.
-impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "+")?;
- for pat in self.iter() {
- write!(f, " {pat:?} +")?;
- }
- Ok(())
- }
-}
-
-/// A 2D matrix.
-#[derive(Clone)]
-pub(super) struct Matrix<'p, 'tcx> {
- pub patterns: Vec<PatStack<'p, 'tcx>>,
-}
-
-impl<'p, 'tcx> Matrix<'p, 'tcx> {
- fn empty() -> Self {
- Matrix { patterns: vec![] }
- }
-
- /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
- /// expands it.
- fn push(&mut self, row: PatStack<'p, 'tcx>) {
- if !row.is_empty() && row.head().is_or_pat() {
- row.expand_and_extend(self);
- } else {
- self.patterns.push(row);
- }
- }
-
- /// Iterate over the first component of each row
- fn heads<'a>(
- &'a self,
- ) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Clone + Captures<'a> {
- self.patterns.iter().map(|r| r.head())
- }
-
- /// This computes `S(constructor, self)`. See top of the file for explanations.
- fn specialize_constructor(
- &self,
- pcx: &PatCtxt<'_, 'p, 'tcx>,
- ctor: &Constructor<'tcx>,
- ) -> Matrix<'p, 'tcx> {
- let mut matrix = Matrix::empty();
- for row in &self.patterns {
- if ctor.is_covered_by(pcx, row.head().ctor()) {
- let new_row = row.pop_head_constructor(pcx, ctor);
- matrix.push(new_row);
- }
- }
- matrix
- }
-}
-
-/// Pretty-printer for matrices of patterns, example:
-///
-/// ```text
-/// + _ + [] +
-/// + true + [First] +
-/// + true + [Second(true)] +
-/// + false + [_] +
-/// + _ + [_, _, tail @ ..] +
-/// ```
-impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "\n")?;
-
- let Matrix { patterns: m, .. } = self;
- let pretty_printed_matrix: Vec<Vec<String>> =
- m.iter().map(|row| row.iter().map(|pat| format!("{pat:?}")).collect()).collect();
-
- let column_count = m.iter().map(|row| row.len()).next().unwrap_or(0);
- assert!(m.iter().all(|row| row.len() == column_count));
- let column_widths: Vec<usize> = (0..column_count)
- .map(|col| pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0))
- .collect();
-
- for row in pretty_printed_matrix {
- write!(f, "+")?;
- for (column, pat_str) in row.into_iter().enumerate() {
- write!(f, " ")?;
- write!(f, "{:1$}", pat_str, column_widths[column])?;
- write!(f, " +")?;
- }
- write!(f, "\n")?;
- }
- Ok(())
- }
-}
-
-/// This carries the results of computing usefulness, as described at the top of the file. When
-/// checking usefulness of a match branch, we use the `NoWitnesses` variant, which also keeps track
-/// of potential unreachable sub-patterns (in the presence of or-patterns). When checking
-/// exhaustiveness of a whole match, we use the `WithWitnesses` variant, which carries a list of
-/// witnesses of non-exhaustiveness when there are any.
-/// Which variant to use is dictated by `ArmType`.
-#[derive(Debug, Clone)]
-enum Usefulness<'tcx> {
- /// If we don't care about witnesses, simply remember if the pattern was useful.
- NoWitnesses { useful: bool },
- /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
- /// pattern is unreachable.
- WithWitnesses(Vec<WitnessStack<'tcx>>),
-}
-
-impl<'tcx> Usefulness<'tcx> {
- fn new_useful(preference: ArmType) -> Self {
- match preference {
- // A single (empty) witness of reachability.
- FakeExtraWildcard => WithWitnesses(vec![WitnessStack(vec![])]),
- RealArm => NoWitnesses { useful: true },
- }
- }
-
- fn new_not_useful(preference: ArmType) -> Self {
- match preference {
- FakeExtraWildcard => WithWitnesses(vec![]),
- RealArm => NoWitnesses { useful: false },
- }
- }
-
- fn is_useful(&self) -> bool {
- match self {
- Usefulness::NoWitnesses { useful } => *useful,
- Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(),
- }
- }
-
- /// Combine usefulnesses from two branches. This is an associative operation.
- fn extend(&mut self, other: Self) {
- match (&mut *self, other) {
- (WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {}
- (WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o),
- (WithWitnesses(s), WithWitnesses(o)) => s.extend(o),
- (NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => {
- *s_useful = *s_useful || o_useful
- }
- _ => unreachable!(),
- }
- }
-
- /// After calculating usefulness after a specialization, call this to reconstruct a usefulness
- /// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
- /// with the results of specializing with the other constructors.
- fn apply_constructor(
- self,
- pcx: &PatCtxt<'_, '_, 'tcx>,
- matrix: &Matrix<'_, 'tcx>, // used to compute missing ctors
- ctor: &Constructor<'tcx>,
- ) -> Self {
- match self {
- NoWitnesses { .. } => self,
- WithWitnesses(ref witnesses) if witnesses.is_empty() => self,
- WithWitnesses(witnesses) => {
- let new_witnesses = if let Constructor::Missing { .. } = ctor {
- let mut missing = ConstructorSet::for_ty(pcx.cx, pcx.ty)
- .compute_missing(pcx, matrix.heads().map(DeconstructedPat::ctor));
- if missing.iter().any(|c| c.is_non_exhaustive()) {
- // We only report `_` here; listing other constructors would be redundant.
- missing = vec![Constructor::NonExhaustive];
- }
-
- // We got the special `Missing` constructor, so each of the missing constructors
- // gives a new pattern that is not caught by the match.
- // We construct for each missing constructor a version of this constructor with
- // wildcards for fields, i.e. that matches everything that can be built with it.
- // For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get
- // the pattern `Some(_)`.
- let new_patterns: Vec<WitnessPat<'_>> = missing
- .into_iter()
- .map(|missing_ctor| WitnessPat::wild_from_ctor(pcx, missing_ctor.clone()))
- .collect();
-
- witnesses
- .into_iter()
- .flat_map(|witness| {
- new_patterns.iter().map(move |pat| {
- let mut stack = witness.clone();
- stack.0.push(pat.clone());
- stack
- })
- })
- .collect()
- } else {
- witnesses
- .into_iter()
- .map(|witness| witness.apply_constructor(pcx, &ctor))
- .collect()
- };
- WithWitnesses(new_witnesses)
- }
- }
- }
-}
-
-#[derive(Copy, Clone, Debug)]
-enum ArmType {
- FakeExtraWildcard,
- RealArm,
-}
-
-/// A witness-tuple of non-exhaustiveness for error reporting, represented as a list of patterns (in
-/// reverse order of construction) with wildcards inside to represent elements that can take any
-/// inhabitant of the type as a value.
-///
-/// This mirrors `PatStack`: they function similarly, except `PatStack` contains user patterns we
-/// are inspecting, and `WitnessStack` contains witnesses we are constructing.
-/// FIXME(Nadrieril): use the same order of patterns for both
-///
-/// A `WitnessStack` should have the same types and length as the `PatStacks` we are inspecting
-/// (except we store the patterns in reverse order). Because Rust `match` is always against a single
-/// pattern, at the end the stack will have length 1. In the middle of the algorithm, it can contain
-/// multiple patterns.
-///
-/// For example, if we are constructing a witness for the match against
-///
-/// ```compile_fail,E0004
-/// struct Pair(Option<(u32, u32)>, bool);
-/// # fn foo(p: Pair) {
-/// match p {
-/// Pair(None, _) => {}
-/// Pair(_, false) => {}
-/// }
-/// # }
-/// ```
-///
-/// We'll perform the following steps (among others):
-/// - Start with a matrix representing the match
-/// `PatStack(vec![Pair(None, _)])`
-/// `PatStack(vec![Pair(_, false)])`
-/// - Specialize with `Pair`
-/// `PatStack(vec![None, _])`
-/// `PatStack(vec![_, false])`
-/// - Specialize with `Some`
-/// `PatStack(vec![_, false])`
-/// - Specialize with `_`
-/// `PatStack(vec![false])`
-/// - Specialize with `true`
-/// // no patstacks left
-/// - This is a non-exhaustive match: we have the empty witness stack as a witness.
-/// `WitnessStack(vec![])`
-/// - Apply `true`
-/// `WitnessStack(vec![true])`
-/// - Apply `_`
-/// `WitnessStack(vec![true, _])`
-/// - Apply `Some`
-/// `WitnessStack(vec![true, Some(_)])`
-/// - Apply `Pair`
-/// `WitnessStack(vec![Pair(Some(_), true)])`
-///
-/// The final `Pair(Some(_), true)` is then the resulting witness.
-#[derive(Debug, Clone)]
-pub(crate) struct WitnessStack<'tcx>(Vec<WitnessPat<'tcx>>);
-
-impl<'tcx> WitnessStack<'tcx> {
- /// Asserts that the witness contains a single pattern, and returns it.
- fn single_pattern(self) -> WitnessPat<'tcx> {
- assert_eq!(self.0.len(), 1);
- self.0.into_iter().next().unwrap()
- }
-
- /// Constructs a partial witness for a pattern given a list of
- /// patterns expanded by the specialization step.
- ///
- /// When a pattern P is discovered to be useful, this function is used bottom-up
- /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset
- /// of values, V, where each value in that set is not covered by any previously
- /// used patterns and is covered by the pattern P'. Examples:
- ///
- /// left_ty: tuple of 3 elements
- /// pats: [10, 20, _] => (10, 20, _)
- ///
- /// left_ty: struct X { a: (bool, &'static str), b: usize}
- /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
- fn apply_constructor(mut self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) -> Self {
- let pat = {
- let len = self.0.len();
- let arity = ctor.arity(pcx);
- let fields = self.0.drain((len - arity)..).rev().collect();
- WitnessPat::new(ctor.clone(), fields, pcx.ty)
- };
-
- self.0.push(pat);
-
- self
- }
-}
-
-/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
-/// The algorithm from the paper has been modified to correctly handle empty
-/// types. The changes are:
-/// (0) We don't exit early if the pattern matrix has zero rows. We just
-/// continue to recurse over columns.
-/// (1) all_constructors will only return constructors that are statically
-/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
-///
-/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
-/// to a set of such vectors `m` - this is defined as there being a set of
-/// inputs that will match `v` but not any of the sets in `m`.
-///
-/// All the patterns at each column of the `matrix ++ v` matrix must have the same type.
-///
-/// This is used both for reachability checking (if a pattern isn't useful in
-/// relation to preceding patterns, it is not reachable) and exhaustiveness
-/// checking (if a wildcard pattern is useful in relation to a matrix, the
-/// matrix isn't exhaustive).
-///
-/// `is_under_guard` is used to inform if the pattern has a guard. If it
-/// has one it must not be inserted into the matrix. This shouldn't be
-/// relied on for soundness.
-#[instrument(level = "debug", skip(cx, matrix, lint_root), ret)]
-fn is_useful<'p, 'tcx>(
- cx: &MatchCheckCtxt<'p, 'tcx>,
- matrix: &Matrix<'p, 'tcx>,
- v: &PatStack<'p, 'tcx>,
- witness_preference: ArmType,
- lint_root: HirId,
- is_under_guard: bool,
- is_top_level: bool,
-) -> Usefulness<'tcx> {
- debug!(?matrix, ?v);
- let Matrix { patterns: rows, .. } = matrix;
-
- // The base case. We are pattern-matching on () and the return value is
- // based on whether our matrix has a row or not.
- // NOTE: This could potentially be optimized by checking rows.is_empty()
- // first and then, if v is non-empty, the return value is based on whether
- // the type of the tuple we're checking is inhabited or not.
- if v.is_empty() {
- let ret = if rows.is_empty() {
- Usefulness::new_useful(witness_preference)
- } else {
- Usefulness::new_not_useful(witness_preference)
- };
- debug!(?ret);
- return ret;
- }
-
- debug_assert!(rows.iter().all(|r| r.len() == v.len()));
-
- // If the first pattern is an or-pattern, expand it.
- let mut ret = Usefulness::new_not_useful(witness_preference);
- if v.head().is_or_pat() {
- debug!("expanding or-pattern");
- // We try each or-pattern branch in turn.
- let mut matrix = matrix.clone();
- for v in v.expand_or_pat() {
- debug!(?v);
- let usefulness = ensure_sufficient_stack(|| {
- is_useful(cx, &matrix, &v, witness_preference, lint_root, is_under_guard, false)
- });
- debug!(?usefulness);
- ret.extend(usefulness);
- // If pattern has a guard don't add it to the matrix.
- if !is_under_guard {
- // We push the already-seen patterns into the matrix in order to detect redundant
- // branches like `Some(_) | Some(0)`.
- matrix.push(v);
- }
- }
- } else {
- let mut ty = v.head().ty();
-
- // Opaque types can't get destructured/split, but the patterns can
- // actually hint at hidden types, so we use the patterns' types instead.
- if let ty::Alias(ty::Opaque, ..) = ty.kind() {
- if let Some(row) = rows.first() {
- ty = row.head().ty();
- }
- }
- debug!("v.head: {:?}, v.span: {:?}", v.head(), v.head().span());
- let pcx = &PatCtxt { cx, ty, span: v.head().span(), is_top_level };
-
- let v_ctor = v.head().ctor();
- debug!(?v_ctor);
- // We split the head constructor of `v`.
- let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
- // For each constructor, we compute whether there's a value that starts with it that would
- // witness the usefulness of `v`.
- let start_matrix = &matrix;
- for ctor in split_ctors {
- debug!("specialize({:?})", ctor);
- // We cache the result of `Fields::wildcards` because it is used a lot.
- let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
- let v = v.pop_head_constructor(pcx, &ctor);
- let usefulness = ensure_sufficient_stack(|| {
- is_useful(
- cx,
- &spec_matrix,
- &v,
- witness_preference,
- lint_root,
- is_under_guard,
- false,
- )
- });
- let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
- ret.extend(usefulness);
- }
- }
-
- if ret.is_useful() {
- v.head().set_reachable();
- }
-
- ret
-}
-
-/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that
-/// inspect the same subvalue".
-/// This is used to traverse patterns column-by-column for lints. Despite similarities with
-/// `is_useful`, this is a different traversal. Notably this is linear in the depth of patterns,
-/// whereas `is_useful` is worst-case exponential (exhaustiveness is NP-complete).
-#[derive(Debug)]
-struct PatternColumn<'p, 'tcx> {
- patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>,
-}
-
-impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
- fn new(patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>) -> Self {
- Self { patterns }
- }
-
- fn is_empty(&self) -> bool {
- self.patterns.is_empty()
- }
- fn head_ty(&self) -> Option<Ty<'tcx>> {
- if self.patterns.len() == 0 {
- return None;
- }
- // If the type is opaque and it is revealed anywhere in the column, we take the revealed
- // version. Otherwise we could encounter constructors for the revealed type and crash.
- let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..));
- let first_ty = self.patterns[0].ty();
- if is_opaque(first_ty) {
- for pat in &self.patterns {
- let ty = pat.ty();
- if !is_opaque(ty) {
- return Some(ty);
- }
- }
- }
- Some(first_ty)
- }
-
- fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> {
- let column_ctors = self.patterns.iter().map(|p| p.ctor());
- ConstructorSet::for_ty(pcx.cx, pcx.ty).split(pcx, column_ctors)
- }
- fn iter<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
- self.patterns.iter().copied()
- }
-
- /// Does specialization: given a constructor, this takes the patterns from the column that match
- /// the constructor, and outputs their fields.
- /// This returns one column per field of the constructor. The normally all have the same length
- /// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns
- /// which may change the lengths.
- fn specialize(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Vec<Self> {
- let arity = ctor.arity(pcx);
- if arity == 0 {
- return Vec::new();
- }
-
- // We specialize the column by `ctor`. This gives us `arity`-many columns of patterns. These
- // columns may have different lengths in the presence of or-patterns (this is why we can't
- // reuse `Matrix`).
- let mut specialized_columns: Vec<_> =
- (0..arity).map(|_| Self { patterns: Vec::new() }).collect();
- let relevant_patterns =
- self.patterns.iter().filter(|pat| ctor.is_covered_by(pcx, pat.ctor()));
- for pat in relevant_patterns {
- let specialized = pat.specialize(pcx, &ctor);
- for (subpat, column) in specialized.iter().zip(&mut specialized_columns) {
- if subpat.is_or_pat() {
- column.patterns.extend(subpat.flatten_or_pat())
- } else {
- column.patterns.push(subpat)
- }
- }
- }
-
- assert!(
- !specialized_columns[0].is_empty(),
- "ctor {ctor:?} was listed as present but isn't;
- there is an inconsistency between `Constructor::is_covered_by` and `ConstructorSet::split`"
- );
- specialized_columns
- }
-}
-
-/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned
-/// in a given column.
-#[instrument(level = "debug", skip(cx), ret)]
-fn collect_nonexhaustive_missing_variants<'p, 'tcx>(
- cx: &MatchCheckCtxt<'p, 'tcx>,
- column: &PatternColumn<'p, 'tcx>,
-) -> Vec<WitnessPat<'tcx>> {
- let Some(ty) = column.head_ty() else {
- return Vec::new();
- };
- let pcx = &PatCtxt { cx, ty, span: DUMMY_SP, is_top_level: false };
-
- let set = column.analyze_ctors(pcx);
- if set.present.is_empty() {
- // We can't consistently handle the case where no constructors are present (since this would
- // require digging deep through any type in case there's a non_exhaustive enum somewhere),
- // so for consistency we refuse to handle the top-level case, where we could handle it.
- return vec![];
- }
-
- let mut witnesses = Vec::new();
- if cx.is_foreign_non_exhaustive_enum(ty) {
- witnesses.extend(
- set.missing
- .into_iter()
- // This will list missing visible variants.
- .filter(|c| !matches!(c, Constructor::Hidden | Constructor::NonExhaustive))
- .map(|missing_ctor| WitnessPat::wild_from_ctor(pcx, missing_ctor)),
- )
- }
-
- // Recurse into the fields.
- for ctor in set.present {
- let specialized_columns = column.specialize(pcx, &ctor);
- let wild_pat = WitnessPat::wild_from_ctor(pcx, ctor);
- for (i, col_i) in specialized_columns.iter().enumerate() {
- // Compute witnesses for each column.
- let wits_for_col_i = collect_nonexhaustive_missing_variants(cx, col_i);
- // For each witness, we build a new pattern in the shape of `ctor(_, _, wit, _, _)`,
- // adding enough wildcards to match `arity`.
- for wit in wits_for_col_i {
- let mut pat = wild_pat.clone();
- pat.fields[i] = wit;
- witnesses.push(pat);
- }
- }
- }
- witnesses
-}
-
-/// Traverse the patterns to warn the user about ranges that overlap on their endpoints.
-#[instrument(level = "debug", skip(cx, lint_root))]
-fn lint_overlapping_range_endpoints<'p, 'tcx>(
- cx: &MatchCheckCtxt<'p, 'tcx>,
- column: &PatternColumn<'p, 'tcx>,
- lint_root: HirId,
-) {
- let Some(ty) = column.head_ty() else {
- return;
- };
- let pcx = &PatCtxt { cx, ty, span: DUMMY_SP, is_top_level: false };
-
- let set = column.analyze_ctors(pcx);
-
- if IntRange::is_integral(ty) {
- let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| {
- let overlap_as_pat = overlap.to_diagnostic_pat(ty, cx.tcx);
- let overlaps: Vec<_> = overlapped_spans
- .iter()
- .copied()
- .map(|span| Overlap { range: overlap_as_pat.clone(), span })
- .collect();
- cx.tcx.emit_spanned_lint(
- lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
- lint_root,
- this_span,
- OverlappingRangeEndpoints { overlap: overlaps, range: this_span },
- );
- };
-
- // If two ranges overlapped, the split set will contain their intersection as a singleton.
- let split_int_ranges = set.present.iter().filter_map(|c| c.as_int_range());
- for overlap_range in split_int_ranges.clone() {
- if overlap_range.is_singleton() {
- let overlap: MaybeInfiniteInt = overlap_range.lo;
- // Ranges that look like `lo..=overlap`.
- let mut prefixes: SmallVec<[_; 1]> = Default::default();
- // Ranges that look like `overlap..=hi`.
- let mut suffixes: SmallVec<[_; 1]> = Default::default();
- // Iterate on patterns that contained `overlap`.
- for pat in column.iter() {
- let this_span = pat.span();
- let Constructor::IntRange(this_range) = pat.ctor() else { continue };
- if this_range.is_singleton() {
- // Don't lint when one of the ranges is a singleton.
- continue;
- }
- if this_range.lo == overlap {
- // `this_range` looks like `overlap..=this_range.hi`; it overlaps with any
- // ranges that look like `lo..=overlap`.
- if !prefixes.is_empty() {
- emit_lint(overlap_range, this_span, &prefixes);
- }
- suffixes.push(this_span)
- } else if this_range.hi == overlap.plus_one() {
- // `this_range` looks like `this_range.lo..=overlap`; it overlaps with any
- // ranges that look like `overlap..=hi`.
- if !suffixes.is_empty() {
- emit_lint(overlap_range, this_span, &suffixes);
- }
- prefixes.push(this_span)
- }
- }
- }
- }
- } else {
- // Recurse into the fields.
- for ctor in set.present {
- for col in column.specialize(pcx, &ctor) {
- lint_overlapping_range_endpoints(cx, &col, lint_root);
- }
- }
- }
-}
-
-/// The arm of a match expression.
-#[derive(Clone, Copy, Debug)]
-pub(crate) struct MatchArm<'p, 'tcx> {
- /// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`.
- pub(crate) pat: &'p DeconstructedPat<'p, 'tcx>,
- pub(crate) hir_id: HirId,
- pub(crate) has_guard: bool,
-}
-
-/// Indicates whether or not a given arm is reachable.
-#[derive(Clone, Debug)]
-pub(crate) enum Reachability {
- /// The arm is reachable. This additionally carries a set of or-pattern branches that have been
- /// found to be unreachable despite the overall arm being reachable. Used only in the presence
- /// of or-patterns, otherwise it stays empty.
- Reachable(Vec<Span>),
- /// The arm is unreachable.
- Unreachable,
-}
-
-/// The output of checking a match for exhaustiveness and arm reachability.
-pub(crate) struct UsefulnessReport<'p, 'tcx> {
- /// For each arm of the input, whether that arm is reachable after the arms above it.
- pub(crate) arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Reachability)>,
- /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
- /// exhaustiveness.
- pub(crate) non_exhaustiveness_witnesses: Vec<WitnessPat<'tcx>>,
-}
-
-/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
-/// of its arms are reachable.
-///
-/// Note: the input patterns must have been lowered through
-/// `check_match::MatchVisitor::lower_pattern`.
-#[instrument(skip(cx, arms), level = "debug")]
-pub(crate) fn compute_match_usefulness<'p, 'tcx>(
- cx: &MatchCheckCtxt<'p, 'tcx>,
- arms: &[MatchArm<'p, 'tcx>],
- lint_root: HirId,
- scrut_ty: Ty<'tcx>,
- scrut_span: Span,
-) -> UsefulnessReport<'p, 'tcx> {
- let mut matrix = Matrix::empty();
- let arm_usefulness: Vec<_> = arms
- .iter()
- .copied()
- .map(|arm| {
- debug!(?arm);
- let v = PatStack::from_pattern(arm.pat);
- is_useful(cx, &matrix, &v, RealArm, arm.hir_id, arm.has_guard, true);
- if !arm.has_guard {
- matrix.push(v);
- }
- let reachability = if arm.pat.is_reachable() {
- Reachability::Reachable(arm.pat.unreachable_spans())
- } else {
- Reachability::Unreachable
- };
- (arm, reachability)
- })
- .collect();
-
- let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty, DUMMY_SP));
- let v = PatStack::from_pattern(wild_pattern);
- let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, lint_root, false, true);
- let non_exhaustiveness_witnesses: Vec<_> = match usefulness {
- WithWitnesses(pats) => pats.into_iter().map(|w| w.single_pattern()).collect(),
- NoWitnesses { .. } => bug!(),
- };
-
- let pat_column = arms.iter().flat_map(|arm| arm.pat.flatten_or_pat()).collect::<Vec<_>>();
- let pat_column = PatternColumn::new(pat_column);
- lint_overlapping_range_endpoints(cx, &pat_column, lint_root);
-
- // Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting
- // `if let`s. Only run if the match is exhaustive otherwise the error is redundant.
- if cx.refutable && non_exhaustiveness_witnesses.is_empty() {
- if !matches!(
- cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, lint_root).0,
- rustc_session::lint::Level::Allow
- ) {
- let witnesses = collect_nonexhaustive_missing_variants(cx, &pat_column);
-
- if !witnesses.is_empty() {
- // Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns`
- // is not exhaustive enough.
- //
- // NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`.
- cx.tcx.emit_spanned_lint(
- NON_EXHAUSTIVE_OMITTED_PATTERNS,
- lint_root,
- scrut_span,
- NonExhaustiveOmittedPattern {
- scrut_ty,
- uncovered: Uncovered::new(scrut_span, cx, witnesses),
- },
- );
- }
- } else {
- // We used to allow putting the `#[allow(non_exhaustive_omitted_patterns)]` on a match
- // arm. This no longer makes sense so we warn users, to avoid silently breaking their
- // usage of the lint.
- for arm in arms {
- let (lint_level, lint_level_source) =
- cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.hir_id);
- if !matches!(lint_level, rustc_session::lint::Level::Allow) {
- let decorator = NonExhaustiveOmittedPatternLintOnArm {
- lint_span: lint_level_source.span(),
- suggest_lint_on_match: cx.match_span.map(|span| span.shrink_to_lo()),
- lint_level: lint_level.as_str(),
- lint_name: "non_exhaustive_omitted_patterns",
- };
-
- use rustc_errors::DecorateLint;
- let mut err = cx.tcx.sess.struct_span_warn(arm.pat.span(), "");
- err.set_primary_message(decorator.msg());
- decorator.decorate_lint(&mut err);
- err.emit();
- }
- }
- }
- }
-
- UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses }
-}
diff --git a/compiler/rustc_mir_build/src/thir/print.rs b/compiler/rustc_mir_build/src/thir/print.rs
index c3b2309b7..28be31399 100644
--- a/compiler/rustc_mir_build/src/thir/print.rs
+++ b/compiler/rustc_mir_build/src/thir/print.rs
@@ -31,8 +31,8 @@ const INDENT: &str = " ";
macro_rules! print_indented {
($writer:ident, $s:expr, $indent_lvl:expr) => {
- let indent = (0..$indent_lvl).map(|_| INDENT).collect::<Vec<_>>().concat();
- writeln!($writer, "{}{}", indent, $s).expect("unable to write to ThirPrinter");
+ $writer.indent($indent_lvl);
+ writeln!($writer, "{}", $s).expect("unable to write to ThirPrinter");
};
}
@@ -48,6 +48,12 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
Self { thir, fmt: String::new() }
}
+ fn indent(&mut self, level: usize) {
+ for _ in 0..level {
+ self.fmt.push_str(INDENT);
+ }
+ }
+
fn print(&mut self) {
print_indented!(self, "params: [", 0);
for param in self.thir.params.iter() {
@@ -85,23 +91,11 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
}
fn print_block(&mut self, block_id: BlockId, depth_lvl: usize) {
- let Block {
- targeted_by_break,
- opt_destruction_scope,
- span,
- region_scope,
- stmts,
- expr,
- safety_mode,
- } = &self.thir.blocks[block_id];
+ let Block { targeted_by_break, span, region_scope, stmts, expr, safety_mode } =
+ &self.thir.blocks[block_id];
print_indented!(self, "Block {", depth_lvl);
print_indented!(self, format!("targeted_by_break: {}", targeted_by_break), depth_lvl + 1);
- print_indented!(
- self,
- format!("opt_destruction_scope: {:?}", opt_destruction_scope),
- depth_lvl + 1
- );
print_indented!(self, format!("span: {:?}", span), depth_lvl + 1);
print_indented!(self, format!("region_scope: {:?}", region_scope), depth_lvl + 1);
print_indented!(self, format!("safety_mode: {:?}", safety_mode), depth_lvl + 1);
@@ -127,14 +121,9 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
}
fn print_stmt(&mut self, stmt_id: StmtId, depth_lvl: usize) {
- let Stmt { kind, opt_destruction_scope } = &self.thir.stmts[stmt_id];
+ let Stmt { kind } = &self.thir.stmts[stmt_id];
print_indented!(self, "Stmt {", depth_lvl);
- print_indented!(
- self,
- format!("opt_destruction_scope: {:?}", opt_destruction_scope),
- depth_lvl + 1
- );
match kind {
StmtKind::Expr { scope, expr } => {
@@ -636,6 +625,9 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
PatKind::Wild => {
print_indented!(self, "Wild", depth_lvl + 1);
}
+ PatKind::Never => {
+ print_indented!(self, "Never", depth_lvl + 1);
+ }
PatKind::AscribeUserType { ascription, subpattern } => {
print_indented!(self, "AscribeUserType: {", depth_lvl + 1);
print_indented!(self, format!("ascription: {:?}", ascription), depth_lvl + 2);