summaryrefslogtreecommitdiffstats
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_abi/Cargo.toml24
-rw-r--r--compiler/rustc_abi/src/layout.rs945
-rw-r--r--compiler/rustc_abi/src/lib.rs1502
-rw-r--r--compiler/rustc_arena/src/tests.rs16
-rw-r--r--compiler/rustc_ast/Cargo.toml2
-rw-r--r--compiler/rustc_ast/src/ast.rs419
-rw-r--r--compiler/rustc_ast/src/attr/mod.rs313
-rw-r--r--compiler/rustc_ast/src/lib.rs1
-rw-r--r--compiler/rustc_ast/src/mut_visit.rs91
-rw-r--r--compiler/rustc_ast/src/token.rs88
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs28
-rw-r--r--compiler/rustc_ast/src/util/case.rs6
-rw-r--r--compiler/rustc_ast/src/util/classify.rs5
-rw-r--r--compiler/rustc_ast/src/util/comments.rs4
-rw-r--r--compiler/rustc_ast/src/util/literal.rs88
-rw-r--r--compiler/rustc_ast/src/util/parser.rs28
-rw-r--r--compiler/rustc_ast/src/util/unicode.rs2
-rw-r--r--compiler/rustc_ast/src/visit.rs293
-rw-r--r--compiler/rustc_ast_lowering/Cargo.toml2
-rw-r--r--compiler/rustc_ast_lowering/src/asm.rs43
-rw-r--r--compiler/rustc_ast_lowering/src/block.rs10
-rw-r--r--compiler/rustc_ast_lowering/src/errors.rs3
-rw-r--r--compiler/rustc_ast_lowering/src/expr.rs439
-rw-r--r--compiler/rustc_ast_lowering/src/index.rs10
-rw-r--r--compiler/rustc_ast_lowering/src/item.rs343
-rw-r--r--compiler/rustc_ast_lowering/src/lib.rs365
-rw-r--r--compiler/rustc_ast_lowering/src/pat.rs59
-rw-r--r--compiler/rustc_ast_lowering/src/path.rs58
-rw-r--r--compiler/rustc_ast_passes/src/ast_validation.rs102
-rw-r--r--compiler/rustc_ast_passes/src/feature_gate.rs20
-rw-r--r--compiler/rustc_ast_pretty/src/helpers.rs4
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs274
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state/expr.rs168
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state/item.rs128
-rw-r--r--compiler/rustc_attr/src/builtin.rs144
-rw-r--r--compiler/rustc_attr/src/session_diagnostics.rs4
-rw-r--r--compiler/rustc_baked_icu_data/Cargo.toml15
-rw-r--r--compiler/rustc_baked_icu_data/src/data/any.rs42
-rw-r--r--compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs733
-rw-r--r--compiler/rustc_baked_icu_data/src/data/fallback/mod.rs4
-rw-r--r--compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs207
-rw-r--r--compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs41
-rw-r--r--compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs2
-rw-r--r--compiler/rustc_baked_icu_data/src/data/list/and_v1.rs1161
-rw-r--r--compiler/rustc_baked_icu_data/src/data/list/mod.rs2
-rw-r--r--compiler/rustc_baked_icu_data/src/data/mod.rs90
-rw-r--r--compiler/rustc_baked_icu_data/src/lib.rs46
-rw-r--r--compiler/rustc_borrowck/src/borrow_set.rs10
-rw-r--r--compiler/rustc_borrowck/src/borrowck_errors.rs13
-rw-r--r--compiler/rustc_borrowck/src/constraint_generation.rs2
-rw-r--r--compiler/rustc_borrowck/src/constraints/graph.rs1
-rw-r--r--compiler/rustc_borrowck/src/constraints/mod.rs6
-rw-r--r--compiler/rustc_borrowck/src/consumers.rs2
-rw-r--r--compiler/rustc_borrowck/src/dataflow.rs12
-rw-r--r--compiler/rustc_borrowck/src/def_use.rs2
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs59
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs355
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs23
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/find_all_local_uses.rs3
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/find_use.rs3
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/mod.rs123
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs43
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/region_errors.rs24
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/region_name.rs36
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/var_name.rs3
-rw-r--r--compiler/rustc_borrowck/src/facts.rs2
-rw-r--r--compiler/rustc_borrowck/src/invalidation.rs67
-rw-r--r--compiler/rustc_borrowck/src/lib.rs115
-rw-r--r--compiler/rustc_borrowck/src/location.rs2
-rw-r--r--compiler/rustc_borrowck/src/member_constraints.rs4
-rw-r--r--compiler/rustc_borrowck/src/nll.rs19
-rw-r--r--compiler/rustc_borrowck/src/path_utils.rs2
-rw-r--r--compiler/rustc_borrowck/src/place_ext.rs2
-rw-r--r--compiler/rustc_borrowck/src/places_conflict.rs16
-rw-r--r--compiler/rustc_borrowck/src/prefixes.rs2
-rw-r--r--compiler/rustc_borrowck/src/region_infer/dump_mir.rs5
-rw-r--r--compiler/rustc_borrowck/src/region_infer/graphviz.rs2
-rw-r--r--compiler/rustc_borrowck/src/region_infer/mod.rs175
-rw-r--r--compiler/rustc_borrowck/src/region_infer/opaque_types.rs119
-rw-r--r--compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs2
-rw-r--r--compiler/rustc_borrowck/src/region_infer/values.rs2
-rw-r--r--compiler/rustc_borrowck/src/renumber.rs2
-rw-r--r--compiler/rustc_borrowck/src/session_diagnostics.rs94
-rw-r--r--compiler/rustc_borrowck/src/type_check/canonical.rs13
-rw-r--r--compiler/rustc_borrowck/src/type_check/constraint_conversion.rs82
-rw-r--r--compiler/rustc_borrowck/src/type_check/free_region_relations.rs5
-rw-r--r--compiler/rustc_borrowck/src/type_check/input_output.rs7
-rw-r--r--compiler/rustc_borrowck/src/type_check/liveness/polonius.rs4
-rw-r--r--compiler/rustc_borrowck/src/type_check/mod.rs200
-rw-r--r--compiler/rustc_borrowck/src/type_check/relate_tys.rs17
-rw-r--r--compiler/rustc_borrowck/src/universal_regions.rs133
-rw-r--r--compiler/rustc_borrowck/src/used_muts.rs2
-rw-r--r--compiler/rustc_builtin_macros/Cargo.toml2
-rw-r--r--compiler/rustc_builtin_macros/src/alloc_error_handler.rs93
-rw-r--r--compiler/rustc_builtin_macros/src/asm.rs8
-rw-r--r--compiler/rustc_builtin_macros/src/assert.rs10
-rw-r--r--compiler/rustc_builtin_macros/src/assert/context.rs71
-rw-r--r--compiler/rustc_builtin_macros/src/cfg_accessible.rs7
-rw-r--r--compiler/rustc_builtin_macros/src/concat.rs28
-rw-r--r--compiler/rustc_builtin_macros/src/concat_bytes.rs108
-rw-r--r--compiler/rustc_builtin_macros/src/derive.rs59
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/bounds.rs4
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/clone.rs22
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs15
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs6
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs6
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs6
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/debug.rs3
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/decodable.rs23
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/default.rs21
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/encodable.rs11
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/generic/mod.rs225
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/generic/ty.rs9
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/hash.rs3
-rw-r--r--compiler/rustc_builtin_macros/src/deriving/mod.rs43
-rw-r--r--compiler/rustc_builtin_macros/src/edition_panic.rs36
-rw-r--r--compiler/rustc_builtin_macros/src/env.rs2
-rw-r--r--compiler/rustc_builtin_macros/src/format_foreign.rs34
-rw-r--r--compiler/rustc_builtin_macros/src/format_foreign/printf/tests.rs2
-rw-r--r--compiler/rustc_builtin_macros/src/format_foreign/shell/tests.rs2
-rw-r--r--compiler/rustc_builtin_macros/src/global_allocator.rs36
-rw-r--r--compiler/rustc_builtin_macros/src/lib.rs7
-rw-r--r--compiler/rustc_builtin_macros/src/proc_macro_harness.rs14
-rw-r--r--compiler/rustc_builtin_macros/src/source_util.rs7
-rw-r--r--compiler/rustc_builtin_macros/src/standard_library_imports.rs4
-rw-r--r--compiler/rustc_builtin_macros/src/test.rs142
-rw-r--r--compiler/rustc_builtin_macros/src/test_harness.rs23
-rw-r--r--compiler/rustc_builtin_macros/src/type_ascribe.rs35
-rw-r--r--compiler/rustc_builtin_macros/src/util.rs11
-rw-r--r--compiler/rustc_codegen_cranelift/Cargo.lock6
-rw-r--r--compiler/rustc_codegen_cranelift/Cargo.toml1
-rw-r--r--compiler/rustc_codegen_cranelift/build_system/utils.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/abi/mod.rs25
-rw-r--r--compiler/rustc_codegen_cranelift/src/allocator.rs9
-rw-r--r--compiler/rustc_codegen_cranelift/src/archive.rs224
-rw-r--r--compiler/rustc_codegen_cranelift/src/config.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/constant.rs77
-rw-r--r--compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/main_shim.rs8
-rw-r--r--compiler/rustc_codegen_cranelift/src/value_and_place.rs10
-rw-r--r--compiler/rustc_codegen_gcc/Cargo.lock14
-rw-r--r--compiler/rustc_codegen_gcc/Cargo.toml4
-rw-r--r--compiler/rustc_codegen_gcc/example/alloc_system.rs8
-rw-r--r--compiler/rustc_codegen_gcc/src/allocator.rs11
-rw-r--r--compiler/rustc_codegen_gcc/src/archive.rs177
-rw-r--r--compiler/rustc_codegen_gcc/src/builder.rs9
-rw-r--r--compiler/rustc_codegen_gcc/src/consts.rs31
-rw-r--r--compiler/rustc_codegen_gcc/src/context.rs9
-rw-r--r--compiler/rustc_codegen_gcc/src/debuginfo.rs57
-rw-r--r--compiler/rustc_codegen_gcc/src/declare.rs4
-rw-r--r--compiler/rustc_codegen_gcc/src/errors.rs21
-rw-r--r--compiler/rustc_codegen_gcc/src/lib.rs12
-rw-r--r--compiler/rustc_codegen_gcc/src/type_.rs44
-rw-r--r--compiler/rustc_codegen_llvm/Cargo.toml2
-rw-r--r--compiler/rustc_codegen_llvm/src/abi.rs40
-rw-r--r--compiler/rustc_codegen_llvm/src/allocator.rs5
-rw-r--r--compiler/rustc_codegen_llvm/src/asm.rs76
-rw-r--r--compiler/rustc_codegen_llvm/src/attributes.rs37
-rw-r--r--compiler/rustc_codegen_llvm/src/back/archive.rs164
-rw-r--r--compiler/rustc_codegen_llvm/src/back/lto.rs9
-rw-r--r--compiler/rustc_codegen_llvm/src/back/write.rs16
-rw-r--r--compiler/rustc_codegen_llvm/src/builder.rs23
-rw-r--r--compiler/rustc_codegen_llvm/src/callee.rs15
-rw-r--r--compiler/rustc_codegen_llvm/src/consts.rs62
-rw-r--r--compiler/rustc_codegen_llvm/src/context.rs22
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs5
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs18
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/mod.rs12
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/utils.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/declare.rs22
-rw-r--r--compiler/rustc_codegen_llvm/src/errors.rs120
-rw-r--r--compiler/rustc_codegen_llvm/src/intrinsic.rs17
-rw-r--r--compiler/rustc_codegen_llvm/src/lib.rs10
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/ffi.rs30
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/mod.rs7
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm_util.rs89
-rw-r--r--compiler/rustc_codegen_llvm/src/mono_item.rs7
-rw-r--r--compiler/rustc_codegen_llvm/src/type_.rs10
-rw-r--r--compiler/rustc_codegen_llvm/src/type_of.rs2
-rw-r--r--compiler/rustc_codegen_ssa/Cargo.toml1
-rw-r--r--compiler/rustc_codegen_ssa/src/back/archive.rs265
-rw-r--r--compiler/rustc_codegen_ssa/src/back/link.rs882
-rw-r--r--compiler/rustc_codegen_ssa/src/back/linker.rs12
-rw-r--r--compiler/rustc_codegen_ssa/src/back/metadata.rs82
-rw-r--r--compiler/rustc_codegen_ssa/src/back/symbol_export.rs10
-rw-r--r--compiler/rustc_codegen_ssa/src/back/write.rs42
-rw-r--r--compiler/rustc_codegen_ssa/src/base.rs41
-rw-r--r--compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs36
-rw-r--r--compiler/rustc_codegen_ssa/src/errors.rs194
-rw-r--r--compiler/rustc_codegen_ssa/src/glue.rs2
-rw-r--r--compiler/rustc_codegen_ssa/src/lib.rs15
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/block.rs551
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/constant.rs9
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/debuginfo.rs71
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/mod.rs28
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/operand.rs4
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs211
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/rvalue.rs173
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/statement.rs47
-rw-r--r--compiler/rustc_codegen_ssa/src/mono_item.rs12
-rw-r--r--compiler/rustc_codegen_ssa/src/target_features.rs5
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/backend.rs2
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/builder.rs4
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/debuginfo.rs5
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/type_.rs1
-rw-r--r--compiler/rustc_const_eval/Cargo.toml1
-rw-r--r--compiler/rustc_const_eval/src/const_eval/error.rs14
-rw-r--r--compiler/rustc_const_eval/src/const_eval/eval_queries.rs39
-rw-r--r--compiler/rustc_const_eval/src/const_eval/machine.rs188
-rw-r--r--compiler/rustc_const_eval/src/const_eval/mod.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/eval_context.rs108
-rw-r--r--compiler/rustc_const_eval/src/interpret/intern.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/intrinsics.rs26
-rw-r--r--compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs4
-rw-r--r--compiler/rustc_const_eval/src/interpret/machine.rs20
-rw-r--r--compiler/rustc_const_eval/src/interpret/memory.rs48
-rw-r--r--compiler/rustc_const_eval/src/interpret/operand.rs109
-rw-r--r--compiler/rustc_const_eval/src/interpret/place.rs56
-rw-r--r--compiler/rustc_const_eval/src/interpret/projection.rs12
-rw-r--r--compiler/rustc_const_eval/src/interpret/step.rs54
-rw-r--r--compiler/rustc_const_eval/src/interpret/traits.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs24
-rw-r--r--compiler/rustc_const_eval/src/interpret/visitor.rs2
-rw-r--r--compiler/rustc_const_eval/src/transform/check_consts/check.rs81
-rw-r--r--compiler/rustc_const_eval/src/transform/check_consts/mod.rs18
-rw-r--r--compiler/rustc_const_eval/src/transform/check_consts/ops.rs20
-rw-r--r--compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs24
-rw-r--r--compiler/rustc_const_eval/src/transform/promote_consts.rs13
-rw-r--r--compiler/rustc_const_eval/src/transform/validate.rs76
-rw-r--r--compiler/rustc_const_eval/src/util/aggregate.rs3
-rw-r--r--compiler/rustc_const_eval/src/util/call_kind.rs22
-rw-r--r--compiler/rustc_const_eval/src/util/compare_types.rs63
-rw-r--r--compiler/rustc_const_eval/src/util/mod.rs4
-rw-r--r--compiler/rustc_const_eval/src/util/type_name.rs (renamed from compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs)31
-rw-r--r--compiler/rustc_data_structures/Cargo.toml4
-rw-r--r--compiler/rustc_data_structures/src/fingerprint.rs2
-rw-r--r--compiler/rustc_data_structures/src/functor.rs36
-rw-r--r--compiler/rustc_data_structures/src/intern.rs88
-rw-r--r--compiler/rustc_data_structures/src/memmap.rs10
-rw-r--r--compiler/rustc_data_structures/src/owning_ref/mod.rs24
-rw-r--r--compiler/rustc_data_structures/src/profiling.rs6
-rw-r--r--compiler/rustc_data_structures/src/sorted_map.rs8
-rw-r--r--compiler/rustc_data_structures/src/sorted_map/index_map.rs9
-rw-r--r--compiler/rustc_data_structures/src/stable_hasher.rs105
-rw-r--r--compiler/rustc_data_structures/src/sync.rs13
-rw-r--r--compiler/rustc_driver/src/lib.rs86
-rw-r--r--compiler/rustc_error_codes/src/error_codes.rs1
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0207.md75
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0322.md3
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0382.md2
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0706.md2
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0760.md4
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0791.md41
-rw-r--r--compiler/rustc_error_messages/Cargo.toml8
-rw-r--r--compiler/rustc_error_messages/locales/en-US/borrowck.ftl68
-rw-r--r--compiler/rustc_error_messages/locales/en-US/codegen_gcc.ftl6
-rw-r--r--compiler/rustc_error_messages/locales/en-US/codegen_llvm.ftl49
-rw-r--r--compiler/rustc_error_messages/locales/en-US/codegen_ssa.ftl73
-rw-r--r--compiler/rustc_error_messages/locales/en-US/hir_analysis.ftl42
-rw-r--r--compiler/rustc_error_messages/locales/en-US/hir_typeck.ftl48
-rw-r--r--compiler/rustc_error_messages/locales/en-US/infer.ftl5
-rw-r--r--compiler/rustc_error_messages/locales/en-US/lint.ftl3
-rw-r--r--compiler/rustc_error_messages/locales/en-US/metadata.ftl18
-rw-r--r--compiler/rustc_error_messages/locales/en-US/middle.ftl7
-rw-r--r--compiler/rustc_error_messages/locales/en-US/monomorphize.ftl3
-rw-r--r--compiler/rustc_error_messages/locales/en-US/parse.ftl364
-rw-r--r--compiler/rustc_error_messages/locales/en-US/parser.ftl371
-rw-r--r--compiler/rustc_error_messages/locales/en-US/passes.ftl54
-rw-r--r--compiler/rustc_error_messages/locales/en-US/resolve.ftl211
-rw-r--r--compiler/rustc_error_messages/locales/en-US/session.ftl28
-rw-r--r--compiler/rustc_error_messages/src/lib.rs97
-rw-r--r--compiler/rustc_errors/Cargo.toml4
-rw-r--r--compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs7
-rw-r--r--compiler/rustc_errors/src/diagnostic.rs82
-rw-r--r--compiler/rustc_errors/src/diagnostic_builder.rs16
-rw-r--r--compiler/rustc_errors/src/diagnostic_impls.rs34
-rw-r--r--compiler/rustc_errors/src/emitter.rs196
-rw-r--r--compiler/rustc_errors/src/json.rs10
-rw-r--r--compiler/rustc_errors/src/json/tests.rs1
-rw-r--r--compiler/rustc_errors/src/lib.rs71
-rw-r--r--compiler/rustc_errors/src/translation.rs44
-rw-r--r--compiler/rustc_expand/Cargo.toml15
-rw-r--r--compiler/rustc_expand/src/base.rs36
-rw-r--r--compiler/rustc_expand/src/build.rs50
-rw-r--r--compiler/rustc_expand/src/config.rs2
-rw-r--r--compiler/rustc_expand/src/expand.rs42
-rw-r--r--compiler/rustc_expand/src/mbe.rs3
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs257
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs99
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs407
-rw-r--r--compiler/rustc_expand/src/parse/tests.rs2
-rw-r--r--compiler/rustc_expand/src/placeholders.rs14
-rw-r--r--compiler/rustc_expand/src/proc_macro.rs1
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs21
-rw-r--r--compiler/rustc_expand/src/tests.rs109
-rw-r--r--compiler/rustc_feature/src/accepted.rs4
-rw-r--r--compiler/rustc_feature/src/active.rs21
-rw-r--r--compiler/rustc_feature/src/builtin_attrs.rs20
-rw-r--r--compiler/rustc_fs_util/src/lib.rs2
-rw-r--r--compiler/rustc_graphviz/src/lib.rs12
-rw-r--r--compiler/rustc_hir/src/arena.rs1
-rw-r--r--compiler/rustc_hir/src/def.rs22
-rw-r--r--compiler/rustc_hir/src/definitions.rs8
-rw-r--r--compiler/rustc_hir/src/hir.rs223
-rw-r--r--compiler/rustc_hir/src/hir_id.rs22
-rw-r--r--compiler/rustc_hir/src/intravisit.rs742
-rw-r--r--compiler/rustc_hir/src/lang_items.rs230
-rw-r--r--compiler/rustc_hir/src/lib.rs3
-rw-r--r--compiler/rustc_hir/src/pat_util.rs5
-rw-r--r--compiler/rustc_hir/src/tests.rs38
-rw-r--r--compiler/rustc_hir/src/weak_lang_items.rs59
-rw-r--r--compiler/rustc_hir_analysis/src/astconv/errors.rs7
-rw-r--r--compiler/rustc_hir_analysis/src/astconv/generics.rs5
-rw-r--r--compiler/rustc_hir_analysis/src/astconv/mod.rs240
-rw-r--r--compiler/rustc_hir_analysis/src/bounds.rs11
-rw-r--r--compiler/rustc_hir_analysis/src/check/check.rs263
-rw-r--r--compiler/rustc_hir_analysis/src/check/compare_method.rs576
-rw-r--r--compiler/rustc_hir_analysis/src/check/dropck.rs39
-rw-r--r--compiler/rustc_hir_analysis/src/check/intrinsic.rs17
-rw-r--r--compiler/rustc_hir_analysis/src/check/mod.rs6
-rw-r--r--compiler/rustc_hir_analysis/src/check/region.rs47
-rw-r--r--compiler/rustc_hir_analysis/src/check/wfcheck.rs220
-rw-r--r--compiler/rustc_hir_analysis/src/check_unused.rs19
-rw-r--r--compiler/rustc_hir_analysis/src/coherence/builtin.rs17
-rw-r--r--compiler/rustc_hir_analysis/src/coherence/mod.rs66
-rw-r--r--compiler/rustc_hir_analysis/src/coherence/orphan.rs59
-rw-r--r--compiler/rustc_hir_analysis/src/collect.rs250
-rw-r--r--compiler/rustc_hir_analysis/src/collect/generics_of.rs13
-rw-r--r--compiler/rustc_hir_analysis/src/collect/item_bounds.rs8
-rw-r--r--compiler/rustc_hir_analysis/src/collect/lifetimes.rs577
-rw-r--r--compiler/rustc_hir_analysis/src/collect/predicates_of.rs191
-rw-r--r--compiler/rustc_hir_analysis/src/collect/type_of.rs18
-rw-r--r--compiler/rustc_hir_analysis/src/constrained_generic_params.rs3
-rw-r--r--compiler/rustc_hir_analysis/src/errors.rs20
-rw-r--r--compiler/rustc_hir_analysis/src/hir_wf_check.rs6
-rw-r--r--compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs149
-rw-r--r--compiler/rustc_hir_analysis/src/lib.rs52
-rw-r--r--compiler/rustc_hir_analysis/src/outlives/explicit.rs43
-rw-r--r--compiler/rustc_hir_analysis/src/outlives/mod.rs24
-rw-r--r--compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs71
-rw-r--r--compiler/rustc_hir_analysis/src/variance/constraints.rs4
-rw-r--r--compiler/rustc_hir_analysis/src/variance/mod.rs131
-rw-r--r--compiler/rustc_hir_analysis/src/variance/terms.rs18
-rw-r--r--compiler/rustc_hir_pretty/src/lib.rs25
-rw-r--r--compiler/rustc_hir_typeck/src/_match.rs160
-rw-r--r--compiler/rustc_hir_typeck/src/autoderef.rs19
-rw-r--r--compiler/rustc_hir_typeck/src/callee.rs70
-rw-r--r--compiler/rustc_hir_typeck/src/cast.rs52
-rw-r--r--compiler/rustc_hir_typeck/src/check.rs139
-rw-r--r--compiler/rustc_hir_typeck/src/closure.rs250
-rw-r--r--compiler/rustc_hir_typeck/src/coercion.rs174
-rw-r--r--compiler/rustc_hir_typeck/src/demand.rs129
-rw-r--r--compiler/rustc_hir_typeck/src/errors.rs80
-rw-r--r--compiler/rustc_hir_typeck/src/expectation.rs6
-rw-r--r--compiler/rustc_hir_typeck/src/expr.rs148
-rw-r--r--compiler/rustc_hir_typeck/src/expr_use_visitor.rs43
-rw-r--r--compiler/rustc_hir_typeck/src/fallback.rs28
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs249
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs86
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs66
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs149
-rw-r--r--compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_build.rs63
-rw-r--r--compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/mod.rs4
-rw-r--r--compiler/rustc_hir_typeck/src/generator_interior/mod.rs129
-rw-r--r--compiler/rustc_hir_typeck/src/inherited.rs82
-rw-r--r--compiler/rustc_hir_typeck/src/intrinsicck.rs7
-rw-r--r--compiler/rustc_hir_typeck/src/lib.rs100
-rw-r--r--compiler/rustc_hir_typeck/src/mem_categorization.rs2
-rw-r--r--compiler/rustc_hir_typeck/src/method/confirm.rs26
-rw-r--r--compiler/rustc_hir_typeck/src/method/mod.rs169
-rw-r--r--compiler/rustc_hir_typeck/src/method/prelude2021.rs28
-rw-r--r--compiler/rustc_hir_typeck/src/method/probe.rs210
-rw-r--r--compiler/rustc_hir_typeck/src/method/suggest.rs555
-rw-r--r--compiler/rustc_hir_typeck/src/op.rs37
-rw-r--r--compiler/rustc_hir_typeck/src/pat.rs106
-rw-r--r--compiler/rustc_hir_typeck/src/place_op.rs7
-rw-r--r--compiler/rustc_hir_typeck/src/upvar.rs24
-rw-r--r--compiler/rustc_hir_typeck/src/writeback.rs52
-rw-r--r--compiler/rustc_incremental/src/persist/dirty_clean.rs6
-rw-r--r--compiler/rustc_incremental/src/persist/fs.rs7
-rw-r--r--compiler/rustc_incremental/src/persist/save.rs4
-rw-r--r--compiler/rustc_index/Cargo.toml8
-rw-r--r--compiler/rustc_index/src/lib.rs22
-rw-r--r--compiler/rustc_index/src/vec.rs7
-rw-r--r--compiler/rustc_infer/src/errors/mod.rs51
-rw-r--r--compiler/rustc_infer/src/errors/note_and_explain.rs7
-rw-r--r--compiler/rustc_infer/src/infer/at.rs5
-rw-r--r--compiler/rustc_infer/src/infer/canonical/canonicalizer.rs12
-rw-r--r--compiler/rustc_infer/src/infer/canonical/mod.rs18
-rw-r--r--compiler/rustc_infer/src/infer/canonical/query_response.rs25
-rw-r--r--compiler/rustc_infer/src/infer/combine.rs55
-rw-r--r--compiler/rustc_infer/src/infer/equate.rs8
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/mod.rs933
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs9
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs4
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs5
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/named_anon_conflict.rs2
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs5
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_relation.rs79
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs38
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/util.rs2
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/note.rs2
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/note_region.rs427
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/suggest.rs672
-rw-r--r--compiler/rustc_infer/src/infer/free_regions.rs8
-rw-r--r--compiler/rustc_infer/src/infer/freshen.rs3
-rw-r--r--compiler/rustc_infer/src/infer/glb.rs9
-rw-r--r--compiler/rustc_infer/src/infer/higher_ranked/mod.rs9
-rw-r--r--compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs3
-rw-r--r--compiler/rustc_infer/src/infer/lub.rs9
-rw-r--r--compiler/rustc_infer/src/infer/mod.rs210
-rw-r--r--compiler/rustc_infer/src/infer/nll_relate/mod.rs49
-rw-r--r--compiler/rustc_infer/src/infer/note.rs203
-rw-r--r--compiler/rustc_infer/src/infer/opaque_types.rs102
-rw-r--r--compiler/rustc_infer/src/infer/opaque_types/table.rs8
-rw-r--r--compiler/rustc_infer/src/infer/outlives/mod.rs14
-rw-r--r--compiler/rustc_infer/src/infer/outlives/obligations.rs3
-rw-r--r--compiler/rustc_infer/src/infer/outlives/test_type_match.rs16
-rw-r--r--compiler/rustc_infer/src/infer/projection.rs10
-rw-r--r--compiler/rustc_infer/src/infer/region_constraints/leak_check.rs7
-rw-r--r--compiler/rustc_infer/src/infer/region_constraints/mod.rs4
-rw-r--r--compiler/rustc_infer/src/infer/resolve.rs86
-rw-r--r--compiler/rustc_infer/src/infer/sub.rs21
-rw-r--r--compiler/rustc_infer/src/lib.rs1
-rw-r--r--compiler/rustc_infer/src/traits/engine.rs10
-rw-r--r--compiler/rustc_infer/src/traits/error_reporting/mod.rs4
-rw-r--r--compiler/rustc_infer/src/traits/mod.rs33
-rw-r--r--compiler/rustc_infer/src/traits/util.rs28
-rw-r--r--compiler/rustc_interface/Cargo.toml8
-rw-r--r--compiler/rustc_interface/src/interface.rs4
-rw-r--r--compiler/rustc_interface/src/lib.rs1
-rw-r--r--compiler/rustc_interface/src/passes.rs68
-rw-r--r--compiler/rustc_interface/src/proc_macro_decls.rs15
-rw-r--r--compiler/rustc_interface/src/queries.rs41
-rw-r--r--compiler/rustc_interface/src/tests.rs3
-rw-r--r--compiler/rustc_interface/src/util.rs126
-rw-r--r--compiler/rustc_lexer/Cargo.toml2
-rw-r--r--compiler/rustc_lexer/src/lib.rs27
-rw-r--r--compiler/rustc_lexer/src/unescape.rs130
-rw-r--r--compiler/rustc_lexer/src/unescape/tests.rs35
-rw-r--r--compiler/rustc_lint/src/builtin.rs288
-rw-r--r--compiler/rustc_lint/src/context.rs25
-rw-r--r--compiler/rustc_lint/src/deref_into_dyn_supertrait.rs92
-rw-r--r--compiler/rustc_lint/src/early.rs193
-rw-r--r--compiler/rustc_lint/src/errors.rs2
-rw-r--r--compiler/rustc_lint/src/for_loops_over_fallibles.rs11
-rw-r--r--compiler/rustc_lint/src/hidden_unicode_codepoints.rs15
-rw-r--r--compiler/rustc_lint/src/internal.rs12
-rw-r--r--compiler/rustc_lint/src/late.rs132
-rw-r--r--compiler/rustc_lint/src/let_underscore.rs5
-rw-r--r--compiler/rustc_lint/src/levels.rs7
-rw-r--r--compiler/rustc_lint/src/lib.rs278
-rw-r--r--compiler/rustc_lint/src/non_fmt_panic.rs23
-rw-r--r--compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs33
-rw-r--r--compiler/rustc_lint/src/pass_by_value.rs4
-rw-r--r--compiler/rustc_lint/src/passes.rs27
-rw-r--r--compiler/rustc_lint/src/traits.rs3
-rw-r--r--compiler/rustc_lint/src/types.rs51
-rw-r--r--compiler/rustc_lint/src/unused.rs447
-rw-r--r--compiler/rustc_lint_defs/src/builtin.rs96
-rw-r--r--compiler/rustc_llvm/build.rs17
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h1
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp17
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp71
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp96
-rw-r--r--compiler/rustc_log/Cargo.toml1
-rw-r--r--compiler/rustc_log/src/lib.rs7
-rw-r--r--compiler/rustc_macros/src/diagnostics/diagnostic.rs72
-rw-r--r--compiler/rustc_macros/src/diagnostics/diagnostic_builder.rs14
-rw-r--r--compiler/rustc_macros/src/diagnostics/error.rs4
-rw-r--r--compiler/rustc_macros/src/diagnostics/mod.rs2
-rw-r--r--compiler/rustc_macros/src/diagnostics/subdiagnostic.rs4
-rw-r--r--compiler/rustc_macros/src/diagnostics/utils.rs142
-rw-r--r--compiler/rustc_macros/src/lib.rs3
-rw-r--r--compiler/rustc_macros/src/newtype.rs26
-rw-r--r--compiler/rustc_macros/src/query.rs26
-rw-r--r--compiler/rustc_metadata/src/creader.rs105
-rw-r--r--compiler/rustc_metadata/src/errors.rs28
-rw-r--r--compiler/rustc_metadata/src/fs.rs20
-rw-r--r--compiler/rustc_metadata/src/lib.rs2
-rw-r--r--compiler/rustc_metadata/src/locator.rs9
-rw-r--r--compiler/rustc_metadata/src/native_libs.rs13
-rw-r--r--compiler/rustc_metadata/src/rmeta/decoder.rs156
-rw-r--r--compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs38
-rw-r--r--compiler/rustc_metadata/src/rmeta/encoder.rs123
-rw-r--r--compiler/rustc_metadata/src/rmeta/mod.rs18
-rw-r--r--compiler/rustc_metadata/src/rmeta/table.rs2
-rw-r--r--compiler/rustc_middle/Cargo.toml6
-rw-r--r--compiler/rustc_middle/src/arena.rs9
-rw-r--r--compiler/rustc_middle/src/error.rs17
-rw-r--r--compiler/rustc_middle/src/hir/map/mod.rs87
-rw-r--r--compiler/rustc_middle/src/hir/mod.rs9
-rw-r--r--compiler/rustc_middle/src/infer/canonical.rs14
-rw-r--r--compiler/rustc_middle/src/lib.rs3
-rw-r--r--compiler/rustc_middle/src/lint.rs2
-rw-r--r--compiler/rustc_middle/src/middle/codegen_fn_attrs.rs5
-rw-r--r--compiler/rustc_middle/src/middle/lang_items.rs10
-rw-r--r--compiler/rustc_middle/src/middle/privacy.rs209
-rw-r--r--compiler/rustc_middle/src/mir/generic_graphviz.rs7
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation.rs849
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation/init_mask.rs530
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs321
-rw-r--r--compiler/rustc_middle/src/mir/interpret/allocation/tests.rs19
-rw-r--r--compiler/rustc_middle/src/mir/interpret/error.rs36
-rw-r--r--compiler/rustc_middle/src/mir/interpret/mod.rs7
-rw-r--r--compiler/rustc_middle/src/mir/interpret/pointer.rs27
-rw-r--r--compiler/rustc_middle/src/mir/interpret/queries.rs4
-rw-r--r--compiler/rustc_middle/src/mir/interpret/value.rs20
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs178
-rw-r--r--compiler/rustc_middle/src/mir/pretty.rs73
-rw-r--r--compiler/rustc_middle/src/mir/syntax.rs31
-rw-r--r--compiler/rustc_middle/src/mir/type_foldable.rs2
-rw-r--r--compiler/rustc_middle/src/mir/visit.rs20
-rw-r--r--compiler/rustc_middle/src/query/keys.rs (renamed from compiler/rustc_query_impl/src/keys.rs)23
-rw-r--r--compiler/rustc_middle/src/query/mod.rs86
-rw-r--r--compiler/rustc_middle/src/thir.rs3
-rw-r--r--compiler/rustc_middle/src/traits/mod.rs69
-rw-r--r--compiler/rustc_middle/src/traits/select.rs16
-rw-r--r--compiler/rustc_middle/src/traits/specialization_graph.rs19
-rw-r--r--compiler/rustc_middle/src/traits/structural_impls.rs24
-rw-r--r--compiler/rustc_middle/src/ty/_match.rs9
-rw-r--r--compiler/rustc_middle/src/ty/abstract_const.rs200
-rw-r--r--compiler/rustc_middle/src/ty/adjustment.rs19
-rw-r--r--compiler/rustc_middle/src/ty/adt.rs24
-rw-r--r--compiler/rustc_middle/src/ty/closure.rs49
-rw-r--r--compiler/rustc_middle/src/ty/codec.rs28
-rw-r--r--compiler/rustc_middle/src/ty/consts.rs66
-rw-r--r--compiler/rustc_middle/src/ty/consts/int.rs12
-rw-r--r--compiler/rustc_middle/src/ty/consts/kind.rs33
-rw-r--r--compiler/rustc_middle/src/ty/context.rs463
-rw-r--r--compiler/rustc_middle/src/ty/diagnostics.rs32
-rw-r--r--compiler/rustc_middle/src/ty/error.rs59
-rw-r--r--compiler/rustc_middle/src/ty/fast_reject.rs27
-rw-r--r--compiler/rustc_middle/src/ty/flags.rs37
-rw-r--r--compiler/rustc_middle/src/ty/fold.rs65
-rw-r--r--compiler/rustc_middle/src/ty/generics.rs25
-rw-r--r--compiler/rustc_middle/src/ty/impls_ty.rs13
-rw-r--r--compiler/rustc_middle/src/ty/inhabitedness/inhabited_predicate.rs7
-rw-r--r--compiler/rustc_middle/src/ty/inhabitedness/mod.rs116
-rw-r--r--compiler/rustc_middle/src/ty/instance.rs63
-rw-r--r--compiler/rustc_middle/src/ty/layout.rs41
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs668
-rw-r--r--compiler/rustc_middle/src/ty/opaque_types.rs9
-rw-r--r--compiler/rustc_middle/src/ty/parameterized.rs8
-rw-r--r--compiler/rustc_middle/src/ty/print/mod.rs6
-rw-r--r--compiler/rustc_middle/src/ty/print/pretty.rs183
-rw-r--r--compiler/rustc_middle/src/ty/query.rs64
-rw-r--r--compiler/rustc_middle/src/ty/relate.rs101
-rw-r--r--compiler/rustc_middle/src/ty/structural_impls.rs30
-rw-r--r--compiler/rustc_middle/src/ty/sty.rs71
-rw-r--r--compiler/rustc_middle/src/ty/subst.rs125
-rw-r--r--compiler/rustc_middle/src/ty/trait_def.rs4
-rw-r--r--compiler/rustc_middle/src/ty/util.rs39
-rw-r--r--compiler/rustc_middle/src/ty/visit.rs18
-rw-r--r--compiler/rustc_middle/src/ty/vtable.rs2
-rw-r--r--compiler/rustc_middle/src/ty/walk.rs18
-rw-r--r--compiler/rustc_middle/src/values.rs23
-rw-r--r--compiler/rustc_mir_build/Cargo.toml1
-rw-r--r--compiler/rustc_mir_build/src/build/block.rs9
-rw-r--r--compiler/rustc_mir_build/src/build/custom/mod.rs155
-rw-r--r--compiler/rustc_mir_build/src/build/custom/parse.rs256
-rw-r--r--compiler/rustc_mir_build/src/build/custom/parse/instruction.rs105
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_constant.rs163
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_place.rs168
-rw-r--r--compiler/rustc_mir_build/src/build/expr/as_rvalue.rs58
-rw-r--r--compiler/rustc_mir_build/src/build/expr/category.rs28
-rw-r--r--compiler/rustc_mir_build/src/build/expr/into.rs21
-rw-r--r--compiler/rustc_mir_build/src/build/matches/mod.rs123
-rw-r--r--compiler/rustc_mir_build/src/build/matches/simplify.rs19
-rw-r--r--compiler/rustc_mir_build/src/build/matches/test.rs52
-rw-r--r--compiler/rustc_mir_build/src/build/matches/util.rs35
-rw-r--r--compiler/rustc_mir_build/src/build/misc.rs4
-rw-r--r--compiler/rustc_mir_build/src/build/mod.rs39
-rw-r--r--compiler/rustc_mir_build/src/build/scope.rs64
-rw-r--r--compiler/rustc_mir_build/src/lib.rs1
-rw-r--r--compiler/rustc_mir_build/src/thir/constant.rs19
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/expr.rs65
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/mod.rs11
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/check_match.rs40
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs3
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs30
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/mod.rs14
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/usefulness.rs2
-rw-r--r--compiler/rustc_mir_dataflow/src/elaborate_drops.rs7
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/engine.rs9
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/graphviz.rs5
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/lattice.rs34
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/init_locals.rs122
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/mod.rs2
-rw-r--r--compiler/rustc_mir_dataflow/src/lib.rs1
-rw-r--r--compiler/rustc_mir_dataflow/src/value_analysis.rs927
-rw-r--r--compiler/rustc_mir_transform/Cargo.toml1
-rw-r--r--compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs61
-rw-r--r--compiler/rustc_mir_transform/src/add_retag.rs53
-rw-r--r--compiler/rustc_mir_transform/src/check_unsafety.rs55
-rw-r--r--compiler/rustc_mir_transform/src/const_prop.rs13
-rw-r--r--compiler/rustc_mir_transform/src/const_prop_lint.rs38
-rw-r--r--compiler/rustc_mir_transform/src/coverage/debug.rs4
-rw-r--r--compiler/rustc_mir_transform/src/dataflow_const_prop.rs530
-rw-r--r--compiler/rustc_mir_transform/src/dead_store_elimination.rs2
-rw-r--r--compiler/rustc_mir_transform/src/deduce_param_attrs.rs48
-rw-r--r--compiler/rustc_mir_transform/src/dest_prop.rs1299
-rw-r--r--compiler/rustc_mir_transform/src/dump_mir.rs11
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_box_derefs.rs2
-rw-r--r--compiler/rustc_mir_transform/src/function_item_references.rs2
-rw-r--r--compiler/rustc_mir_transform/src/generator.rs115
-rw-r--r--compiler/rustc_mir_transform/src/inline.rs60
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs18
-rw-r--r--compiler/rustc_mir_transform/src/pass_manager.rs106
-rw-r--r--compiler/rustc_mir_transform/src/remove_zsts.rs27
-rw-r--r--compiler/rustc_mir_transform/src/required_consts.rs2
-rw-r--r--compiler/rustc_mir_transform/src/shim.rs28
-rw-r--r--compiler/rustc_mir_transform/src/simplify.rs5
-rw-r--r--compiler/rustc_mir_transform/src/simplify_branches.rs6
-rw-r--r--compiler/rustc_mir_transform/src/sroa.rs348
-rw-r--r--compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs2
-rw-r--r--compiler/rustc_monomorphize/src/collector.rs65
-rw-r--r--compiler/rustc_monomorphize/src/errors.rs7
-rw-r--r--compiler/rustc_monomorphize/src/lib.rs14
-rw-r--r--compiler/rustc_parse/Cargo.toml7
-rw-r--r--compiler/rustc_parse/src/errors.rs438
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs126
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs42
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs4
-rw-r--r--compiler/rustc_parse/src/lib.rs10
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs37
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs14
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs85
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs435
-rw-r--r--compiler/rustc_parse/src/parser/item.rs171
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs142
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs16
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs18
-rw-r--r--compiler/rustc_parse/src/parser/path.rs17
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs86
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs69
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs68
-rw-r--r--compiler/rustc_parse_format/src/lib.rs141
-rw-r--r--compiler/rustc_passes/src/check_attr.rs96
-rw-r--r--compiler/rustc_passes/src/dead.rs200
-rw-r--r--compiler/rustc_passes/src/errors.rs79
-rw-r--r--compiler/rustc_passes/src/hir_id_validator.rs75
-rw-r--r--compiler/rustc_passes/src/hir_stats.rs7
-rw-r--r--compiler/rustc_passes/src/lang_items.rs34
-rw-r--r--compiler/rustc_passes/src/lib.rs2
-rw-r--r--compiler/rustc_passes/src/liveness.rs50
-rw-r--r--compiler/rustc_passes/src/loops.rs2
-rw-r--r--compiler/rustc_passes/src/reachable.rs8
-rw-r--r--compiler/rustc_passes/src/stability.rs34
-rw-r--r--compiler/rustc_passes/src/upvars.rs7
-rw-r--r--compiler/rustc_passes/src/weak_lang_items.rs22
-rw-r--r--compiler/rustc_privacy/src/lib.rs116
-rw-r--r--compiler/rustc_query_impl/Cargo.toml2
-rw-r--r--compiler/rustc_query_impl/src/lib.rs6
-rw-r--r--compiler/rustc_query_impl/src/on_disk_cache.rs6
-rw-r--r--compiler/rustc_query_impl/src/plumbing.rs46
-rw-r--r--compiler/rustc_query_impl/src/profiling_support.rs51
-rw-r--r--compiler/rustc_query_system/Cargo.toml2
-rw-r--r--compiler/rustc_query_system/src/cache.rs4
-rw-r--r--compiler/rustc_query_system/src/dep_graph/dep_node.rs36
-rw-r--r--compiler/rustc_query_system/src/dep_graph/graph.rs268
-rw-r--r--compiler/rustc_query_system/src/dep_graph/mod.rs3
-rw-r--r--compiler/rustc_query_system/src/ich/hcx.rs39
-rw-r--r--compiler/rustc_query_system/src/query/caches.rs207
-rw-r--r--compiler/rustc_query_system/src/query/config.rs65
-rw-r--r--compiler/rustc_query_system/src/query/job.rs6
-rw-r--r--compiler/rustc_query_system/src/query/mod.rs4
-rw-r--r--compiler/rustc_query_system/src/query/plumbing.rs280
-rw-r--r--compiler/rustc_query_system/src/values.rs8
-rw-r--r--compiler/rustc_resolve/Cargo.toml8
-rw-r--r--compiler/rustc_resolve/src/build_reduced_graph.rs125
-rw-r--r--compiler/rustc_resolve/src/check_unused.rs8
-rw-r--r--compiler/rustc_resolve/src/def_collector.rs29
-rw-r--r--compiler/rustc_resolve/src/diagnostics.rs649
-rw-r--r--compiler/rustc_resolve/src/effective_visibilities.rs227
-rw-r--r--compiler/rustc_resolve/src/errors.rs474
-rw-r--r--compiler/rustc_resolve/src/ident.rs8
-rw-r--r--compiler/rustc_resolve/src/imports.rs220
-rw-r--r--compiler/rustc_resolve/src/late.rs163
-rw-r--r--compiler/rustc_resolve/src/late/diagnostics.rs186
-rw-r--r--compiler/rustc_resolve/src/lib.rs72
-rw-r--r--compiler/rustc_resolve/src/macros.rs2
-rw-r--r--compiler/rustc_save_analysis/src/dump_visitor.rs24
-rw-r--r--compiler/rustc_save_analysis/src/lib.rs18
-rw-r--r--compiler/rustc_save_analysis/src/sig.rs10
-rw-r--r--compiler/rustc_serialize/Cargo.toml2
-rw-r--r--compiler/rustc_serialize/src/leb128.rs21
-rw-r--r--compiler/rustc_serialize/src/opaque.rs36
-rw-r--r--compiler/rustc_session/Cargo.toml8
-rw-r--r--compiler/rustc_session/src/cgu_reuse_tracker.rs2
-rw-r--r--compiler/rustc_session/src/config.rs110
-rw-r--r--compiler/rustc_session/src/errors.rs166
-rw-r--r--compiler/rustc_session/src/filesearch.rs136
-rw-r--r--compiler/rustc_session/src/options.rs9
-rw-r--r--compiler/rustc_session/src/output.rs29
-rw-r--r--compiler/rustc_session/src/parse.rs6
-rw-r--r--compiler/rustc_session/src/session.rs81
-rw-r--r--compiler/rustc_span/src/analyze_source_file.rs4
-rw-r--r--compiler/rustc_span/src/caching_source_map_view.rs2
-rw-r--r--compiler/rustc_span/src/def_id.rs12
-rw-r--r--compiler/rustc_span/src/hygiene.rs4
-rw-r--r--compiler/rustc_span/src/lib.rs20
-rw-r--r--compiler/rustc_span/src/source_map.rs91
-rw-r--r--compiler/rustc_span/src/source_map/tests.rs11
-rw-r--r--compiler/rustc_span/src/span_encoding.rs2
-rw-r--r--compiler/rustc_span/src/symbol.rs70
-rw-r--r--compiler/rustc_symbol_mangling/src/legacy.rs2
-rw-r--r--compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs10
-rw-r--r--compiler/rustc_symbol_mangling/src/v0.rs20
-rw-r--r--compiler/rustc_target/Cargo.toml1
-rw-r--r--compiler/rustc_target/src/abi/call/loongarch.rs342
-rw-r--r--compiler/rustc_target/src/abi/call/mod.rs32
-rw-r--r--compiler/rustc_target/src/abi/call/sparc64.rs18
-rw-r--r--compiler/rustc_target/src/abi/mod.rs1330
-rw-r--r--compiler/rustc_target/src/json.rs25
-rw-r--r--compiler/rustc_target/src/lib.rs5
-rw-r--r--compiler/rustc_target/src/spec/aarch64_apple_darwin.rs19
-rw-r--r--compiler/rustc_target/src/spec/aarch64_apple_ios.rs20
-rw-r--r--compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs7
-rw-r--r--compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs22
-rw-r--r--compiler/rustc_target/src/spec/aarch64_apple_tvos.rs7
-rw-r--r--compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs22
-rw-r--r--compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs28
-rw-r--r--compiler/rustc_target/src/spec/abi.rs22
-rw-r--r--compiler/rustc_target/src/spec/aix_base.rs32
-rw-r--r--compiler/rustc_target/src/spec/apple/tests.rs35
-rw-r--r--compiler/rustc_target/src/spec/apple_base.rs157
-rw-r--r--compiler/rustc_target/src/spec/apple_sdk_base.rs72
-rw-r--r--compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs4
-rw-r--r--compiler/rustc_target/src/spec/armv7_apple_ios.rs15
-rw-r--r--compiler/rustc_target/src/spec/armv7k_apple_watchos.rs10
-rw-r--r--compiler/rustc_target/src/spec/armv7s_apple_ios.rs7
-rw-r--r--compiler/rustc_target/src/spec/i386_apple_ios.rs16
-rw-r--r--compiler/rustc_target/src/spec/i686_apple_darwin.rs23
-rw-r--r--compiler/rustc_target/src/spec/linux_kernel_base.rs18
-rw-r--r--compiler/rustc_target/src/spec/mipsel_sony_psx.rs37
-rw-r--r--compiler/rustc_target/src/spec/mod.rs92
-rw-r--r--compiler/rustc_target/src/spec/nto_qnx_base.rs19
-rw-r--r--compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs23
-rw-r--r--compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs2
-rw-r--r--compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs2
-rw-r--r--compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs2
-rw-r--r--compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs2
-rw-r--r--compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs2
-rw-r--r--compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs2
-rw-r--r--compiler/rustc_target/src/spec/tests/tests_impl.rs9
-rw-r--r--compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs7
-rw-r--r--compiler/rustc_target/src/spec/wasm32_wasi.rs10
-rw-r--r--compiler/rustc_target/src/spec/wasm_base.rs4
-rw-r--r--compiler/rustc_target/src/spec/windows_gnullvm_base.rs8
-rw-r--r--compiler/rustc_target/src/spec/x86_64_apple_darwin.rs21
-rw-r--r--compiler/rustc_target/src/spec/x86_64_apple_ios.rs12
-rw-r--r--compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs7
-rw-r--r--compiler/rustc_target/src/spec/x86_64_apple_tvos.rs8
-rw-r--r--compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs14
-rw-r--r--compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs21
-rw-r--r--compiler/rustc_target/src/spec/x86_64_unknown_none_linuxkernel.rs28
-rw-r--r--compiler/rustc_trait_selection/src/autoderef.rs39
-rw-r--r--compiler/rustc_trait_selection/src/errors.rs11
-rw-r--r--compiler/rustc_trait_selection/src/infer.rs67
-rw-r--r--compiler/rustc_trait_selection/src/lib.rs2
-rw-r--r--compiler/rustc_trait_selection/src/traits/auto_trait.rs100
-rw-r--r--compiler/rustc_trait_selection/src/traits/chalk_fulfill.rs33
-rw-r--r--compiler/rustc_trait_selection/src/traits/coherence.rs68
-rw-r--r--compiler/rustc_trait_selection/src/traits/const_evaluatable.rs354
-rw-r--r--compiler/rustc_trait_selection/src/traits/engine.rs108
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs52
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs498
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs398
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs242
-rw-r--r--compiler/rustc_trait_selection/src/traits/fulfill.rs164
-rw-r--r--compiler/rustc_trait_selection/src/traits/misc.rs2
-rw-r--r--compiler/rustc_trait_selection/src/traits/mod.rs518
-rw-r--r--compiler/rustc_trait_selection/src/traits/object_safety.rs78
-rw-r--r--compiler/rustc_trait_selection/src/traits/on_unimplemented.rs392
-rw-r--r--compiler/rustc_trait_selection/src/traits/outlives_bounds.rs28
-rw-r--r--compiler/rustc_trait_selection/src/traits/project.rs577
-rw-r--r--compiler/rustc_trait_selection/src/traits/query/evaluate_obligation.rs8
-rw-r--r--compiler/rustc_trait_selection/src/traits/query/normalize.rs32
-rw-r--r--compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs6
-rw-r--r--compiler/rustc_trait_selection/src/traits/relationships.rs28
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs386
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/confirmation.rs214
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/mod.rs482
-rw-r--r--compiler/rustc_trait_selection/src/traits/specialize/mod.rs137
-rw-r--r--compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs68
-rw-r--r--compiler/rustc_trait_selection/src/traits/structural_match.rs61
-rw-r--r--compiler/rustc_trait_selection/src/traits/util.rs53
-rw-r--r--compiler/rustc_trait_selection/src/traits/vtable.rs386
-rw-r--r--compiler/rustc_trait_selection/src/traits/wf.rs102
-rw-r--r--compiler/rustc_traits/Cargo.toml7
-rw-r--r--compiler/rustc_traits/src/chalk/db.rs43
-rw-r--r--compiler/rustc_traits/src/chalk/lowering.rs127
-rw-r--r--compiler/rustc_traits/src/codegen.rs (renamed from compiler/rustc_trait_selection/src/traits/codegen.rs)17
-rw-r--r--compiler/rustc_traits/src/dropck_outlives.rs209
-rw-r--r--compiler/rustc_traits/src/evaluate_obligation.rs2
-rw-r--r--compiler/rustc_traits/src/implied_outlives_bounds.rs49
-rw-r--r--compiler/rustc_traits/src/lib.rs3
-rw-r--r--compiler/rustc_traits/src/normalize_erasing_regions.rs16
-rw-r--r--compiler/rustc_traits/src/normalize_projection_ty.rs7
-rw-r--r--compiler/rustc_traits/src/type_op.rs229
-rw-r--r--compiler/rustc_transmute/src/layout/tree.rs7
-rw-r--r--compiler/rustc_transmute/src/lib.rs2
-rw-r--r--compiler/rustc_ty_utils/Cargo.toml2
-rw-r--r--compiler/rustc_ty_utils/src/consts.rs572
-rw-r--r--compiler/rustc_ty_utils/src/instance.rs10
-rw-r--r--compiler/rustc_ty_utils/src/layout.rs991
-rw-r--r--compiler/rustc_ty_utils/src/layout_sanity_check.rs536
-rw-r--r--compiler/rustc_ty_utils/src/lib.rs2
-rw-r--r--compiler/rustc_ty_utils/src/structural_match.rs44
-rw-r--r--compiler/rustc_ty_utils/src/ty.rs73
-rw-r--r--compiler/rustc_type_ir/src/lib.rs8
-rw-r--r--compiler/rustc_type_ir/src/sty.rs592
-rw-r--r--compiler/rustc_type_ir/src/ty_info.rs122
817 files changed, 37897 insertions, 26596 deletions
diff --git a/compiler/rustc_abi/Cargo.toml b/compiler/rustc_abi/Cargo.toml
new file mode 100644
index 000000000..48b199cb8
--- /dev/null
+++ b/compiler/rustc_abi/Cargo.toml
@@ -0,0 +1,24 @@
+[package]
+name = "rustc_abi"
+version = "0.0.0"
+edition = "2021"
+
+[dependencies]
+bitflags = "1.2.1"
+tracing = "0.1"
+rand = { version = "0.8.4", default-features = false, optional = true }
+rand_xoshiro = { version = "0.6.0", optional = true }
+rustc_data_structures = { path = "../rustc_data_structures", optional = true }
+rustc_index = { path = "../rustc_index", default-features = false }
+rustc_macros = { path = "../rustc_macros", optional = true }
+rustc_serialize = { path = "../rustc_serialize", optional = true }
+
+[features]
+default = ["nightly", "randomize"]
+randomize = ["rand", "rand_xoshiro"]
+nightly = [
+ "rustc_data_structures",
+ "rustc_index/nightly",
+ "rustc_macros",
+ "rustc_serialize",
+]
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
new file mode 100644
index 000000000..9c2cf58ef
--- /dev/null
+++ b/compiler/rustc_abi/src/layout.rs
@@ -0,0 +1,945 @@
+use super::*;
+use std::{
+ borrow::Borrow,
+ cmp,
+ fmt::Debug,
+ iter,
+ ops::{Bound, Deref},
+};
+
+#[cfg(feature = "randomize")]
+use rand::{seq::SliceRandom, SeedableRng};
+#[cfg(feature = "randomize")]
+use rand_xoshiro::Xoshiro128StarStar;
+
+use tracing::debug;
+
+// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`.
+// This is used to go between `memory_index` (source field order to memory order)
+// and `inverse_memory_index` (memory order to source field order).
+// See also `FieldsShape::Arbitrary::memory_index` for more details.
+// FIXME(eddyb) build a better abstraction for permutations, if possible.
+fn invert_mapping(map: &[u32]) -> Vec<u32> {
+ let mut inverse = vec![0; map.len()];
+ for i in 0..map.len() {
+ inverse[map[i] as usize] = i as u32;
+ }
+ inverse
+}
+
+pub trait LayoutCalculator {
+ type TargetDataLayoutRef: Borrow<TargetDataLayout>;
+
+ fn delay_bug(&self, txt: &str);
+ fn current_data_layout(&self) -> Self::TargetDataLayoutRef;
+
+ fn scalar_pair<V: Idx>(&self, a: Scalar, b: Scalar) -> LayoutS<V> {
+ let dl = self.current_data_layout();
+ let dl = dl.borrow();
+ let b_align = b.align(dl);
+ let align = a.align(dl).max(b_align).max(dl.aggregate_align);
+ let b_offset = a.size(dl).align_to(b_align.abi);
+ let size = (b_offset + b.size(dl)).align_to(align.abi);
+
+ // HACK(nox): We iter on `b` and then `a` because `max_by_key`
+ // returns the last maximum.
+ let largest_niche = Niche::from_scalar(dl, b_offset, b)
+ .into_iter()
+ .chain(Niche::from_scalar(dl, Size::ZERO, a))
+ .max_by_key(|niche| niche.available(dl));
+
+ LayoutS {
+ variants: Variants::Single { index: V::new(0) },
+ fields: FieldsShape::Arbitrary {
+ offsets: vec![Size::ZERO, b_offset],
+ memory_index: vec![0, 1],
+ },
+ abi: Abi::ScalarPair(a, b),
+ largest_niche,
+ align,
+ size,
+ }
+ }
+
+ fn univariant<'a, V: Idx, F: Deref<Target = &'a LayoutS<V>> + Debug>(
+ &self,
+ dl: &TargetDataLayout,
+ fields: &[F],
+ repr: &ReprOptions,
+ kind: StructKind,
+ ) -> Option<LayoutS<V>> {
+ let pack = repr.pack;
+ let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
+ let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
+ let optimize = !repr.inhibit_struct_field_reordering_opt();
+ if optimize {
+ let end =
+ if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
+ let optimizing = &mut inverse_memory_index[..end];
+ let effective_field_align = |f: &F| {
+ if let Some(pack) = pack {
+ // return the packed alignment in bytes
+ f.align.abi.min(pack).bytes()
+ } else {
+ // returns log2(effective-align).
+ // This is ok since `pack` applies to all fields equally.
+ // The calculation assumes that size is an integer multiple of align, except for ZSTs.
+ //
+ // group [u8; 4] with align-4 or [u8; 6] with align-2 fields
+ f.align.abi.bytes().max(f.size.bytes()).trailing_zeros() as u64
+ }
+ };
+
+ // If `-Z randomize-layout` was enabled for the type definition we can shuffle
+ // the field ordering to try and catch some code making assumptions about layouts
+ // we don't guarantee
+ if repr.can_randomize_type_layout() && cfg!(feature = "randomize") {
+ #[cfg(feature = "randomize")]
+ {
+ // `ReprOptions.layout_seed` is a deterministic seed that we can use to
+ // randomize field ordering with
+ let mut rng = Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed);
+
+ // Shuffle the ordering of the fields
+ optimizing.shuffle(&mut rng);
+ }
+ // Otherwise we just leave things alone and actually optimize the type's fields
+ } else {
+ match kind {
+ StructKind::AlwaysSized | StructKind::MaybeUnsized => {
+ optimizing.sort_by_key(|&x| {
+ // Place ZSTs first to avoid "interesting offsets",
+ // especially with only one or two non-ZST fields.
+ // Then place largest alignments first, largest niches within an alignment group last
+ let f = &fields[x as usize];
+ let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
+ (!f.is_zst(), cmp::Reverse(effective_field_align(f)), niche_size)
+ });
+ }
+
+ StructKind::Prefixed(..) => {
+ // Sort in ascending alignment so that the layout stays optimal
+ // regardless of the prefix.
+ // And put the largest niche in an alignment group at the end
+ // so it can be used as discriminant in jagged enums
+ optimizing.sort_by_key(|&x| {
+ let f = &fields[x as usize];
+ let niche_size = f.largest_niche.map_or(0, |n| n.available(dl));
+ (effective_field_align(f), niche_size)
+ });
+ }
+ }
+
+ // FIXME(Kixiron): We can always shuffle fields within a given alignment class
+ // regardless of the status of `-Z randomize-layout`
+ }
+ }
+ // inverse_memory_index holds field indices by increasing memory offset.
+ // That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
+ // We now write field offsets to the corresponding offset slot;
+ // field 5 with offset 0 puts 0 in offsets[5].
+ // At the bottom of this function, we invert `inverse_memory_index` to
+ // produce `memory_index` (see `invert_mapping`).
+ let mut sized = true;
+ let mut offsets = vec![Size::ZERO; fields.len()];
+ let mut offset = Size::ZERO;
+ let mut largest_niche = None;
+ let mut largest_niche_available = 0;
+ if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
+ let prefix_align =
+ if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
+ align = align.max(AbiAndPrefAlign::new(prefix_align));
+ offset = prefix_size.align_to(prefix_align);
+ }
+ for &i in &inverse_memory_index {
+ let field = &fields[i as usize];
+ if !sized {
+ self.delay_bug(&format!(
+ "univariant: field #{} comes after unsized field",
+ offsets.len(),
+ ));
+ }
+
+ if field.is_unsized() {
+ sized = false;
+ }
+
+ // Invariant: offset < dl.obj_size_bound() <= 1<<61
+ let field_align = if let Some(pack) = pack {
+ field.align.min(AbiAndPrefAlign::new(pack))
+ } else {
+ field.align
+ };
+ offset = offset.align_to(field_align.abi);
+ align = align.max(field_align);
+
+ debug!("univariant offset: {:?} field: {:#?}", offset, field);
+ offsets[i as usize] = offset;
+
+ if let Some(mut niche) = field.largest_niche {
+ let available = niche.available(dl);
+ if available > largest_niche_available {
+ largest_niche_available = available;
+ niche.offset += offset;
+ largest_niche = Some(niche);
+ }
+ }
+
+ offset = offset.checked_add(field.size, dl)?;
+ }
+ if let Some(repr_align) = repr.align {
+ align = align.max(AbiAndPrefAlign::new(repr_align));
+ }
+ debug!("univariant min_size: {:?}", offset);
+ let min_size = offset;
+ // As stated above, inverse_memory_index holds field indices by increasing offset.
+ // This makes it an already-sorted view of the offsets vec.
+ // To invert it, consider:
+ // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
+ // Field 5 would be the first element, so memory_index is i:
+ // Note: if we didn't optimize, it's already right.
+ let memory_index =
+ if optimize { invert_mapping(&inverse_memory_index) } else { inverse_memory_index };
+ let size = min_size.align_to(align.abi);
+ let mut abi = Abi::Aggregate { sized };
+ // Unpack newtype ABIs and find scalar pairs.
+ if sized && size.bytes() > 0 {
+ // All other fields must be ZSTs.
+ let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
+
+ match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
+ // We have exactly one non-ZST field.
+ (Some((i, field)), None, None) => {
+ // Field fills the struct and it has a scalar or scalar pair ABI.
+ if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
+ {
+ match field.abi {
+ // For plain scalars, or vectors of them, we can't unpack
+ // newtypes for `#[repr(C)]`, as that affects C ABIs.
+ Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
+ abi = field.abi;
+ }
+ // But scalar pairs are Rust-specific and get
+ // treated as aggregates by C ABIs anyway.
+ Abi::ScalarPair(..) => {
+ abi = field.abi;
+ }
+ _ => {}
+ }
+ }
+ }
+
+ // Two non-ZST fields, and they're both scalars.
+ (Some((i, a)), Some((j, b)), None) => {
+ match (a.abi, b.abi) {
+ (Abi::Scalar(a), Abi::Scalar(b)) => {
+ // Order by the memory placement, not source order.
+ let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
+ ((i, a), (j, b))
+ } else {
+ ((j, b), (i, a))
+ };
+ let pair = self.scalar_pair::<V>(a, b);
+ let pair_offsets = match pair.fields {
+ FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
+ assert_eq!(memory_index, &[0, 1]);
+ offsets
+ }
+ _ => panic!(),
+ };
+ if offsets[i] == pair_offsets[0]
+ && offsets[j] == pair_offsets[1]
+ && align == pair.align
+ && size == pair.size
+ {
+ // We can use `ScalarPair` only when it matches our
+ // already computed layout (including `#[repr(C)]`).
+ abi = pair.abi;
+ }
+ }
+ _ => {}
+ }
+ }
+
+ _ => {}
+ }
+ }
+ if fields.iter().any(|f| f.abi.is_uninhabited()) {
+ abi = Abi::Uninhabited;
+ }
+ Some(LayoutS {
+ variants: Variants::Single { index: V::new(0) },
+ fields: FieldsShape::Arbitrary { offsets, memory_index },
+ abi,
+ largest_niche,
+ align,
+ size,
+ })
+ }
+
+ fn layout_of_never_type<V: Idx>(&self) -> LayoutS<V> {
+ let dl = self.current_data_layout();
+ let dl = dl.borrow();
+ LayoutS {
+ variants: Variants::Single { index: V::new(0) },
+ fields: FieldsShape::Primitive,
+ abi: Abi::Uninhabited,
+ largest_niche: None,
+ align: dl.i8_align,
+ size: Size::ZERO,
+ }
+ }
+
+ fn layout_of_struct_or_enum<'a, V: Idx, F: Deref<Target = &'a LayoutS<V>> + Debug>(
+ &self,
+ repr: &ReprOptions,
+ variants: &IndexVec<V, Vec<F>>,
+ is_enum: bool,
+ is_unsafe_cell: bool,
+ scalar_valid_range: (Bound<u128>, Bound<u128>),
+ discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
+ discriminants: impl Iterator<Item = (V, i128)>,
+ niche_optimize_enum: bool,
+ always_sized: bool,
+ ) -> Option<LayoutS<V>> {
+ let dl = self.current_data_layout();
+ let dl = dl.borrow();
+
+ let scalar_unit = |value: Primitive| {
+ let size = value.size(dl);
+ assert!(size.bits() <= 128);
+ Scalar::Initialized { value, valid_range: WrappingRange::full(size) }
+ };
+
+ // A variant is absent if it's uninhabited and only has ZST fields.
+ // Present uninhabited variants only require space for their fields,
+ // but *not* an encoding of the discriminant (e.g., a tag value).
+ // See issue #49298 for more details on the need to leave space
+ // for non-ZST uninhabited data (mostly partial initialization).
+ let absent = |fields: &[F]| {
+ let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited());
+ let is_zst = fields.iter().all(|f| f.is_zst());
+ uninhabited && is_zst
+ };
+ let (present_first, present_second) = {
+ let mut present_variants = variants
+ .iter_enumerated()
+ .filter_map(|(i, v)| if absent(v) { None } else { Some(i) });
+ (present_variants.next(), present_variants.next())
+ };
+ let present_first = match present_first {
+ Some(present_first) => present_first,
+ // Uninhabited because it has no variants, or only absent ones.
+ None if is_enum => {
+ return Some(self.layout_of_never_type());
+ }
+ // If it's a struct, still compute a layout so that we can still compute the
+ // field offsets.
+ None => V::new(0),
+ };
+
+ let is_struct = !is_enum ||
+ // Only one variant is present.
+ (present_second.is_none() &&
+ // Representation optimizations are allowed.
+ !repr.inhibit_enum_layout_opt());
+ if is_struct {
+ // Struct, or univariant enum equivalent to a struct.
+ // (Typechecking will reject discriminant-sizing attrs.)
+
+ let v = present_first;
+ let kind = if is_enum || variants[v].is_empty() {
+ StructKind::AlwaysSized
+ } else {
+ if !always_sized { StructKind::MaybeUnsized } else { StructKind::AlwaysSized }
+ };
+
+ let mut st = self.univariant(dl, &variants[v], repr, kind)?;
+ st.variants = Variants::Single { index: v };
+
+ if is_unsafe_cell {
+ let hide_niches = |scalar: &mut _| match scalar {
+ Scalar::Initialized { value, valid_range } => {
+ *valid_range = WrappingRange::full(value.size(dl))
+ }
+ // Already doesn't have any niches
+ Scalar::Union { .. } => {}
+ };
+ match &mut st.abi {
+ Abi::Uninhabited => {}
+ Abi::Scalar(scalar) => hide_niches(scalar),
+ Abi::ScalarPair(a, b) => {
+ hide_niches(a);
+ hide_niches(b);
+ }
+ Abi::Vector { element, count: _ } => hide_niches(element),
+ Abi::Aggregate { sized: _ } => {}
+ }
+ st.largest_niche = None;
+ return Some(st);
+ }
+
+ let (start, end) = scalar_valid_range;
+ match st.abi {
+ Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => {
+ // Enlarging validity ranges would result in missed
+ // optimizations, *not* wrongly assuming the inner
+ // value is valid. e.g. unions already enlarge validity ranges,
+ // because the values may be uninitialized.
+ //
+ // Because of that we only check that the start and end
+ // of the range is representable with this scalar type.
+
+ let max_value = scalar.size(dl).unsigned_int_max();
+ if let Bound::Included(start) = start {
+ // FIXME(eddyb) this might be incorrect - it doesn't
+ // account for wrap-around (end < start) ranges.
+ assert!(start <= max_value, "{start} > {max_value}");
+ scalar.valid_range_mut().start = start;
+ }
+ if let Bound::Included(end) = end {
+ // FIXME(eddyb) this might be incorrect - it doesn't
+ // account for wrap-around (end < start) ranges.
+ assert!(end <= max_value, "{end} > {max_value}");
+ scalar.valid_range_mut().end = end;
+ }
+
+ // Update `largest_niche` if we have introduced a larger niche.
+ let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
+ if let Some(niche) = niche {
+ match st.largest_niche {
+ Some(largest_niche) => {
+ // Replace the existing niche even if they're equal,
+ // because this one is at a lower offset.
+ if largest_niche.available(dl) <= niche.available(dl) {
+ st.largest_niche = Some(niche);
+ }
+ }
+ None => st.largest_niche = Some(niche),
+ }
+ }
+ }
+ _ => assert!(
+ start == Bound::Unbounded && end == Bound::Unbounded,
+ "nonscalar layout for layout_scalar_valid_range type: {:#?}",
+ st,
+ ),
+ }
+
+ return Some(st);
+ }
+
+ // At this point, we have handled all unions and
+ // structs. (We have also handled univariant enums
+ // that allow representation optimization.)
+ assert!(is_enum);
+
+ // Until we've decided whether to use the tagged or
+ // niche filling LayoutS, we don't want to intern the
+ // variant layouts, so we can't store them in the
+ // overall LayoutS. Store the overall LayoutS
+ // and the variant LayoutSs here until then.
+ struct TmpLayout<V: Idx> {
+ layout: LayoutS<V>,
+ variants: IndexVec<V, LayoutS<V>>,
+ }
+
+ let calculate_niche_filling_layout = || -> Option<TmpLayout<V>> {
+ if niche_optimize_enum {
+ return None;
+ }
+
+ if variants.len() < 2 {
+ return None;
+ }
+
+ let mut align = dl.aggregate_align;
+ let mut variant_layouts = variants
+ .iter_enumerated()
+ .map(|(j, v)| {
+ let mut st = self.univariant(dl, v, repr, StructKind::AlwaysSized)?;
+ st.variants = Variants::Single { index: j };
+
+ align = align.max(st.align);
+
+ Some(st)
+ })
+ .collect::<Option<IndexVec<V, _>>>()?;
+
+ let largest_variant_index = variant_layouts
+ .iter_enumerated()
+ .max_by_key(|(_i, layout)| layout.size.bytes())
+ .map(|(i, _layout)| i)?;
+
+ let all_indices = (0..=variants.len() - 1).map(V::new);
+ let needs_disc = |index: V| index != largest_variant_index && !absent(&variants[index]);
+ let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap().index()
+ ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap().index();
+
+ let count = niche_variants.size_hint().1.unwrap() as u128;
+
+ // Find the field with the largest niche
+ let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index]
+ .iter()
+ .enumerate()
+ .filter_map(|(j, field)| Some((j, field.largest_niche?)))
+ .max_by_key(|(_, niche)| niche.available(dl))
+ .and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?;
+ let niche_offset =
+ niche.offset + variant_layouts[largest_variant_index].fields.offset(field_index);
+ let niche_size = niche.value.size(dl);
+ let size = variant_layouts[largest_variant_index].size.align_to(align.abi);
+
+ let all_variants_fit = variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
+ if i == largest_variant_index {
+ return true;
+ }
+
+ layout.largest_niche = None;
+
+ if layout.size <= niche_offset {
+ // This variant will fit before the niche.
+ return true;
+ }
+
+ // Determine if it'll fit after the niche.
+ let this_align = layout.align.abi;
+ let this_offset = (niche_offset + niche_size).align_to(this_align);
+
+ if this_offset + layout.size > size {
+ return false;
+ }
+
+ // It'll fit, but we need to make some adjustments.
+ match layout.fields {
+ FieldsShape::Arbitrary { ref mut offsets, .. } => {
+ for (j, offset) in offsets.iter_mut().enumerate() {
+ if !variants[i][j].is_zst() {
+ *offset += this_offset;
+ }
+ }
+ }
+ _ => {
+ panic!("Layout of fields should be Arbitrary for variants")
+ }
+ }
+
+ // It can't be a Scalar or ScalarPair because the offset isn't 0.
+ if !layout.abi.is_uninhabited() {
+ layout.abi = Abi::Aggregate { sized: true };
+ }
+ layout.size += this_offset;
+
+ true
+ });
+
+ if !all_variants_fit {
+ return None;
+ }
+
+ let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
+
+ let others_zst = variant_layouts
+ .iter_enumerated()
+ .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
+ let same_size = size == variant_layouts[largest_variant_index].size;
+ let same_align = align == variant_layouts[largest_variant_index].align;
+
+ let abi = if variant_layouts.iter().all(|v| v.abi.is_uninhabited()) {
+ Abi::Uninhabited
+ } else if same_size && same_align && others_zst {
+ match variant_layouts[largest_variant_index].abi {
+ // When the total alignment and size match, we can use the
+ // same ABI as the scalar variant with the reserved niche.
+ Abi::Scalar(_) => Abi::Scalar(niche_scalar),
+ Abi::ScalarPair(first, second) => {
+ // Only the niche is guaranteed to be initialised,
+ // so use union layouts for the other primitive.
+ if niche_offset == Size::ZERO {
+ Abi::ScalarPair(niche_scalar, second.to_union())
+ } else {
+ Abi::ScalarPair(first.to_union(), niche_scalar)
+ }
+ }
+ _ => Abi::Aggregate { sized: true },
+ }
+ } else {
+ Abi::Aggregate { sized: true }
+ };
+
+ let layout = LayoutS {
+ variants: Variants::Multiple {
+ tag: niche_scalar,
+ tag_encoding: TagEncoding::Niche {
+ untagged_variant: largest_variant_index,
+ niche_variants: (V::new(*niche_variants.start())
+ ..=V::new(*niche_variants.end())),
+ niche_start,
+ },
+ tag_field: 0,
+ variants: IndexVec::new(),
+ },
+ fields: FieldsShape::Arbitrary {
+ offsets: vec![niche_offset],
+ memory_index: vec![0],
+ },
+ abi,
+ largest_niche,
+ size,
+ align,
+ };
+
+ Some(TmpLayout { layout, variants: variant_layouts })
+ };
+
+ let niche_filling_layout = calculate_niche_filling_layout();
+
+ let (mut min, mut max) = (i128::MAX, i128::MIN);
+ let discr_type = repr.discr_type();
+ let bits = Integer::from_attr(dl, discr_type).size().bits();
+ for (i, mut val) in discriminants {
+ if variants[i].iter().any(|f| f.abi.is_uninhabited()) {
+ continue;
+ }
+ if discr_type.is_signed() {
+ // sign extend the raw representation to be an i128
+ val = (val << (128 - bits)) >> (128 - bits);
+ }
+ if val < min {
+ min = val;
+ }
+ if val > max {
+ max = val;
+ }
+ }
+ // We might have no inhabited variants, so pretend there's at least one.
+ if (min, max) == (i128::MAX, i128::MIN) {
+ min = 0;
+ max = 0;
+ }
+ assert!(min <= max, "discriminant range is {}...{}", min, max);
+ let (min_ity, signed) = discr_range_of_repr(min, max); //Integer::repr_discr(tcx, ty, &repr, min, max);
+
+ let mut align = dl.aggregate_align;
+ let mut size = Size::ZERO;
+
+ // We're interested in the smallest alignment, so start large.
+ let mut start_align = Align::from_bytes(256).unwrap();
+ assert_eq!(Integer::for_align(dl, start_align), None);
+
+ // repr(C) on an enum tells us to make a (tag, union) layout,
+ // so we need to grow the prefix alignment to be at least
+ // the alignment of the union. (This value is used both for
+ // determining the alignment of the overall enum, and the
+ // determining the alignment of the payload after the tag.)
+ let mut prefix_align = min_ity.align(dl).abi;
+ if repr.c() {
+ for fields in variants {
+ for field in fields {
+ prefix_align = prefix_align.max(field.align.abi);
+ }
+ }
+ }
+
+ // Create the set of structs that represent each variant.
+ let mut layout_variants = variants
+ .iter_enumerated()
+ .map(|(i, field_layouts)| {
+ let mut st = self.univariant(
+ dl,
+ field_layouts,
+ repr,
+ StructKind::Prefixed(min_ity.size(), prefix_align),
+ )?;
+ st.variants = Variants::Single { index: i };
+ // Find the first field we can't move later
+ // to make room for a larger discriminant.
+ for field in st.fields.index_by_increasing_offset().map(|j| &field_layouts[j]) {
+ if !field.is_zst() || field.align.abi.bytes() != 1 {
+ start_align = start_align.min(field.align.abi);
+ break;
+ }
+ }
+ size = cmp::max(size, st.size);
+ align = align.max(st.align);
+ Some(st)
+ })
+ .collect::<Option<IndexVec<V, _>>>()?;
+
+ // Align the maximum variant size to the largest alignment.
+ size = size.align_to(align.abi);
+
+ if size.bytes() >= dl.obj_size_bound() {
+ return None;
+ }
+
+ let typeck_ity = Integer::from_attr(dl, repr.discr_type());
+ if typeck_ity < min_ity {
+ // It is a bug if Layout decided on a greater discriminant size than typeck for
+ // some reason at this point (based on values discriminant can take on). Mostly
+ // because this discriminant will be loaded, and then stored into variable of
+ // type calculated by typeck. Consider such case (a bug): typeck decided on
+ // byte-sized discriminant, but layout thinks we need a 16-bit to store all
+ // discriminant values. That would be a bug, because then, in codegen, in order
+ // to store this 16-bit discriminant into 8-bit sized temporary some of the
+ // space necessary to represent would have to be discarded (or layout is wrong
+ // on thinking it needs 16 bits)
+ panic!(
+ "layout decided on a larger discriminant type ({:?}) than typeck ({:?})",
+ min_ity, typeck_ity
+ );
+ // However, it is fine to make discr type however large (as an optimisation)
+ // after this point – we’ll just truncate the value we load in codegen.
+ }
+
+ // Check to see if we should use a different type for the
+ // discriminant. We can safely use a type with the same size
+ // as the alignment of the first field of each variant.
+ // We increase the size of the discriminant to avoid LLVM copying
+ // padding when it doesn't need to. This normally causes unaligned
+ // load/stores and excessive memcpy/memset operations. By using a
+ // bigger integer size, LLVM can be sure about its contents and
+ // won't be so conservative.
+
+ // Use the initial field alignment
+ let mut ity = if repr.c() || repr.int.is_some() {
+ min_ity
+ } else {
+ Integer::for_align(dl, start_align).unwrap_or(min_ity)
+ };
+
+ // If the alignment is not larger than the chosen discriminant size,
+ // don't use the alignment as the final size.
+ if ity <= min_ity {
+ ity = min_ity;
+ } else {
+ // Patch up the variants' first few fields.
+ let old_ity_size = min_ity.size();
+ let new_ity_size = ity.size();
+ for variant in &mut layout_variants {
+ match variant.fields {
+ FieldsShape::Arbitrary { ref mut offsets, .. } => {
+ for i in offsets {
+ if *i <= old_ity_size {
+ assert_eq!(*i, old_ity_size);
+ *i = new_ity_size;
+ }
+ }
+ // We might be making the struct larger.
+ if variant.size <= old_ity_size {
+ variant.size = new_ity_size;
+ }
+ }
+ _ => panic!(),
+ }
+ }
+ }
+
+ let tag_mask = ity.size().unsigned_int_max();
+ let tag = Scalar::Initialized {
+ value: Int(ity, signed),
+ valid_range: WrappingRange {
+ start: (min as u128 & tag_mask),
+ end: (max as u128 & tag_mask),
+ },
+ };
+ let mut abi = Abi::Aggregate { sized: true };
+
+ if layout_variants.iter().all(|v| v.abi.is_uninhabited()) {
+ abi = Abi::Uninhabited;
+ } else if tag.size(dl) == size {
+ // Make sure we only use scalar layout when the enum is entirely its
+ // own tag (i.e. it has no padding nor any non-ZST variant fields).
+ abi = Abi::Scalar(tag);
+ } else {
+ // Try to use a ScalarPair for all tagged enums.
+ let mut common_prim = None;
+ let mut common_prim_initialized_in_all_variants = true;
+ for (field_layouts, layout_variant) in iter::zip(variants, &layout_variants) {
+ let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
+ panic!();
+ };
+ let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
+ let (field, offset) = match (fields.next(), fields.next()) {
+ (None, None) => {
+ common_prim_initialized_in_all_variants = false;
+ continue;
+ }
+ (Some(pair), None) => pair,
+ _ => {
+ common_prim = None;
+ break;
+ }
+ };
+ let prim = match field.abi {
+ Abi::Scalar(scalar) => {
+ common_prim_initialized_in_all_variants &=
+ matches!(scalar, Scalar::Initialized { .. });
+ scalar.primitive()
+ }
+ _ => {
+ common_prim = None;
+ break;
+ }
+ };
+ if let Some(pair) = common_prim {
+ // This is pretty conservative. We could go fancier
+ // by conflating things like i32 and u32, or even
+ // realising that (u8, u8) could just cohabit with
+ // u16 or even u32.
+ if pair != (prim, offset) {
+ common_prim = None;
+ break;
+ }
+ } else {
+ common_prim = Some((prim, offset));
+ }
+ }
+ if let Some((prim, offset)) = common_prim {
+ let prim_scalar = if common_prim_initialized_in_all_variants {
+ scalar_unit(prim)
+ } else {
+ // Common prim might be uninit.
+ Scalar::Union { value: prim }
+ };
+ let pair = self.scalar_pair::<V>(tag, prim_scalar);
+ let pair_offsets = match pair.fields {
+ FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
+ assert_eq!(memory_index, &[0, 1]);
+ offsets
+ }
+ _ => panic!(),
+ };
+ if pair_offsets[0] == Size::ZERO
+ && pair_offsets[1] == *offset
+ && align == pair.align
+ && size == pair.size
+ {
+ // We can use `ScalarPair` only when it matches our
+ // already computed layout (including `#[repr(C)]`).
+ abi = pair.abi;
+ }
+ }
+ }
+
+ // If we pick a "clever" (by-value) ABI, we might have to adjust the ABI of the
+ // variants to ensure they are consistent. This is because a downcast is
+ // semantically a NOP, and thus should not affect layout.
+ if matches!(abi, Abi::Scalar(..) | Abi::ScalarPair(..)) {
+ for variant in &mut layout_variants {
+ // We only do this for variants with fields; the others are not accessed anyway.
+ // Also do not overwrite any already existing "clever" ABIs.
+ if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) {
+ variant.abi = abi;
+ // Also need to bump up the size and alignment, so that the entire value fits in here.
+ variant.size = cmp::max(variant.size, size);
+ variant.align.abi = cmp::max(variant.align.abi, align.abi);
+ }
+ }
+ }
+
+ let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
+
+ let tagged_layout = LayoutS {
+ variants: Variants::Multiple {
+ tag,
+ tag_encoding: TagEncoding::Direct,
+ tag_field: 0,
+ variants: IndexVec::new(),
+ },
+ fields: FieldsShape::Arbitrary { offsets: vec![Size::ZERO], memory_index: vec![0] },
+ largest_niche,
+ abi,
+ align,
+ size,
+ };
+
+ let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
+
+ let mut best_layout = match (tagged_layout, niche_filling_layout) {
+ (tl, Some(nl)) => {
+ // Pick the smaller layout; otherwise,
+ // pick the layout with the larger niche; otherwise,
+ // pick tagged as it has simpler codegen.
+ use cmp::Ordering::*;
+ let niche_size = |tmp_l: &TmpLayout<V>| {
+ tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
+ };
+ match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
+ (Greater, _) => nl,
+ (Equal, Less) => nl,
+ _ => tl,
+ }
+ }
+ (tl, None) => tl,
+ };
+
+ // Now we can intern the variant layouts and store them in the enum layout.
+ best_layout.layout.variants = match best_layout.layout.variants {
+ Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
+ Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
+ }
+ _ => panic!(),
+ };
+ Some(best_layout.layout)
+ }
+
+ fn layout_of_union<'a, V: Idx, F: Deref<Target = &'a LayoutS<V>> + Debug>(
+ &self,
+ repr: &ReprOptions,
+ variants: &IndexVec<V, Vec<F>>,
+ ) -> Option<LayoutS<V>> {
+ let dl = self.current_data_layout();
+ let dl = dl.borrow();
+ let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
+
+ if let Some(repr_align) = repr.align {
+ align = align.max(AbiAndPrefAlign::new(repr_align));
+ }
+
+ let optimize = !repr.inhibit_union_abi_opt();
+ let mut size = Size::ZERO;
+ let mut abi = Abi::Aggregate { sized: true };
+ let index = V::new(0);
+ for field in &variants[index] {
+ assert!(field.is_sized());
+ align = align.max(field.align);
+
+ // If all non-ZST fields have the same ABI, forward this ABI
+ if optimize && !field.is_zst() {
+ // Discard valid range information and allow undef
+ let field_abi = match field.abi {
+ Abi::Scalar(x) => Abi::Scalar(x.to_union()),
+ Abi::ScalarPair(x, y) => Abi::ScalarPair(x.to_union(), y.to_union()),
+ Abi::Vector { element: x, count } => {
+ Abi::Vector { element: x.to_union(), count }
+ }
+ Abi::Uninhabited | Abi::Aggregate { .. } => Abi::Aggregate { sized: true },
+ };
+
+ if size == Size::ZERO {
+ // first non ZST: initialize 'abi'
+ abi = field_abi;
+ } else if abi != field_abi {
+ // different fields have different ABI: reset to Aggregate
+ abi = Abi::Aggregate { sized: true };
+ }
+ }
+
+ size = cmp::max(size, field.size);
+ }
+
+ if let Some(pack) = repr.pack {
+ align = align.min(AbiAndPrefAlign::new(pack));
+ }
+
+ Some(LayoutS {
+ variants: Variants::Single { index },
+ fields: FieldsShape::Union(NonZeroUsize::new(variants[index].len())?),
+ abi,
+ largest_niche: None,
+ align,
+ size: size.align_to(align.abi),
+ })
+ }
+}
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
new file mode 100644
index 000000000..e14c9ea9a
--- /dev/null
+++ b/compiler/rustc_abi/src/lib.rs
@@ -0,0 +1,1502 @@
+#![cfg_attr(feature = "nightly", feature(step_trait, rustc_attrs, min_specialization))]
+
+use std::convert::{TryFrom, TryInto};
+use std::fmt;
+#[cfg(feature = "nightly")]
+use std::iter::Step;
+use std::num::{NonZeroUsize, ParseIntError};
+use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
+use std::str::FromStr;
+
+use bitflags::bitflags;
+#[cfg(feature = "nightly")]
+use rustc_data_structures::stable_hasher::StableOrd;
+use rustc_index::vec::{Idx, IndexVec};
+#[cfg(feature = "nightly")]
+use rustc_macros::HashStable_Generic;
+#[cfg(feature = "nightly")]
+use rustc_macros::{Decodable, Encodable};
+
+mod layout;
+
+pub use layout::LayoutCalculator;
+
+/// Requirements for a `StableHashingContext` to be used in this crate.
+/// This is a hack to allow using the `HashStable_Generic` derive macro
+/// instead of implementing everything in `rustc_middle`.
+pub trait HashStableContext {}
+
+use Integer::*;
+use Primitive::*;
+
+bitflags! {
+ #[derive(Default)]
+ #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
+ pub struct ReprFlags: u8 {
+ const IS_C = 1 << 0;
+ const IS_SIMD = 1 << 1;
+ const IS_TRANSPARENT = 1 << 2;
+ // Internal only for now. If true, don't reorder fields.
+ const IS_LINEAR = 1 << 3;
+ // If true, the type's layout can be randomized using
+ // the seed stored in `ReprOptions.layout_seed`
+ const RANDOMIZE_LAYOUT = 1 << 4;
+ // Any of these flags being set prevent field reordering optimisation.
+ const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits
+ | ReprFlags::IS_SIMD.bits
+ | ReprFlags::IS_LINEAR.bits;
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
+pub enum IntegerType {
+ /// Pointer sized integer type, i.e. isize and usize. The field shows signedness, that
+ /// is, `Pointer(true)` is isize.
+ Pointer(bool),
+ /// Fix sized integer type, e.g. i8, u32, i128 The bool field shows signedness, `Fixed(I8, false)` means `u8`
+ Fixed(Integer, bool),
+}
+
+impl IntegerType {
+ pub fn is_signed(&self) -> bool {
+ match self {
+ IntegerType::Pointer(b) => *b,
+ IntegerType::Fixed(_, b) => *b,
+ }
+ }
+}
+
+/// Represents the repr options provided by the user,
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
+#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
+pub struct ReprOptions {
+ pub int: Option<IntegerType>,
+ pub align: Option<Align>,
+ pub pack: Option<Align>,
+ pub flags: ReprFlags,
+ /// The seed to be used for randomizing a type's layout
+ ///
+ /// Note: This could technically be a `[u8; 16]` (a `u128`) which would
+ /// be the "most accurate" hash as it'd encompass the item and crate
+ /// hash without loss, but it does pay the price of being larger.
+ /// Everything's a tradeoff, a `u64` seed should be sufficient for our
+ /// purposes (primarily `-Z randomize-layout`)
+ pub field_shuffle_seed: u64,
+}
+
+impl ReprOptions {
+ #[inline]
+ pub fn simd(&self) -> bool {
+ self.flags.contains(ReprFlags::IS_SIMD)
+ }
+
+ #[inline]
+ pub fn c(&self) -> bool {
+ self.flags.contains(ReprFlags::IS_C)
+ }
+
+ #[inline]
+ pub fn packed(&self) -> bool {
+ self.pack.is_some()
+ }
+
+ #[inline]
+ pub fn transparent(&self) -> bool {
+ self.flags.contains(ReprFlags::IS_TRANSPARENT)
+ }
+
+ #[inline]
+ pub fn linear(&self) -> bool {
+ self.flags.contains(ReprFlags::IS_LINEAR)
+ }
+
+ /// Returns the discriminant type, given these `repr` options.
+ /// This must only be called on enums!
+ pub fn discr_type(&self) -> IntegerType {
+ self.int.unwrap_or(IntegerType::Pointer(true))
+ }
+
+ /// Returns `true` if this `#[repr()]` should inhabit "smart enum
+ /// layout" optimizations, such as representing `Foo<&T>` as a
+ /// single pointer.
+ pub fn inhibit_enum_layout_opt(&self) -> bool {
+ self.c() || self.int.is_some()
+ }
+
+ /// Returns `true` if this `#[repr()]` should inhibit struct field reordering
+ /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
+ pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
+ if let Some(pack) = self.pack {
+ if pack.bytes() == 1 {
+ return true;
+ }
+ }
+
+ self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
+ }
+
+ /// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
+ /// was enabled for its declaration crate
+ pub fn can_randomize_type_layout(&self) -> bool {
+ !self.inhibit_struct_field_reordering_opt()
+ && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
+ }
+
+ /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
+ pub fn inhibit_union_abi_opt(&self) -> bool {
+ self.c()
+ }
+}
+
+/// Parsed [Data layout](https://llvm.org/docs/LangRef.html#data-layout)
+/// for a target, which contains everything needed to compute layouts.
+#[derive(Debug, PartialEq, Eq)]
+pub struct TargetDataLayout {
+ pub endian: Endian,
+ pub i1_align: AbiAndPrefAlign,
+ pub i8_align: AbiAndPrefAlign,
+ pub i16_align: AbiAndPrefAlign,
+ pub i32_align: AbiAndPrefAlign,
+ pub i64_align: AbiAndPrefAlign,
+ pub i128_align: AbiAndPrefAlign,
+ pub f32_align: AbiAndPrefAlign,
+ pub f64_align: AbiAndPrefAlign,
+ pub pointer_size: Size,
+ pub pointer_align: AbiAndPrefAlign,
+ pub aggregate_align: AbiAndPrefAlign,
+
+ /// Alignments for vector types.
+ pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
+
+ pub instruction_address_space: AddressSpace,
+
+ /// Minimum size of #[repr(C)] enums (default I32 bits)
+ pub c_enum_min_size: Integer,
+}
+
+impl Default for TargetDataLayout {
+ /// Creates an instance of `TargetDataLayout`.
+ fn default() -> TargetDataLayout {
+ let align = |bits| Align::from_bits(bits).unwrap();
+ TargetDataLayout {
+ endian: Endian::Big,
+ i1_align: AbiAndPrefAlign::new(align(8)),
+ i8_align: AbiAndPrefAlign::new(align(8)),
+ i16_align: AbiAndPrefAlign::new(align(16)),
+ i32_align: AbiAndPrefAlign::new(align(32)),
+ i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
+ i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
+ f32_align: AbiAndPrefAlign::new(align(32)),
+ f64_align: AbiAndPrefAlign::new(align(64)),
+ pointer_size: Size::from_bits(64),
+ pointer_align: AbiAndPrefAlign::new(align(64)),
+ aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
+ vector_align: vec![
+ (Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
+ (Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
+ ],
+ instruction_address_space: AddressSpace::DATA,
+ c_enum_min_size: Integer::I32,
+ }
+ }
+}
+
+pub enum TargetDataLayoutErrors<'a> {
+ InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
+ InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
+ MissingAlignment { cause: &'a str },
+ InvalidAlignment { cause: &'a str, err: String },
+ InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
+ InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
+ InvalidBitsSize { err: String },
+}
+
+impl TargetDataLayout {
+ /// Parse data layout from an [llvm data layout string](https://llvm.org/docs/LangRef.html#data-layout)
+ ///
+ /// This function doesn't fill `c_enum_min_size` and it will always be `I32` since it can not be
+ /// determined from llvm string.
+ pub fn parse_from_llvm_datalayout_string<'a>(
+ input: &'a str,
+ ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
+ // Parse an address space index from a string.
+ let parse_address_space = |s: &'a str, cause: &'a str| {
+ s.parse::<u32>().map(AddressSpace).map_err(|err| {
+ TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
+ })
+ };
+
+ // Parse a bit count from a string.
+ let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
+ s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
+ kind,
+ bit: s,
+ cause,
+ err,
+ })
+ };
+
+ // Parse a size string.
+ let size = |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
+
+ // Parse an alignment string.
+ let align = |s: &[&'a str], cause: &'a str| {
+ if s.is_empty() {
+ return Err(TargetDataLayoutErrors::MissingAlignment { cause });
+ }
+ let align_from_bits = |bits| {
+ Align::from_bits(bits)
+ .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
+ };
+ let abi = parse_bits(s[0], "alignment", cause)?;
+ let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
+ Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
+ };
+
+ let mut dl = TargetDataLayout::default();
+ let mut i128_align_src = 64;
+ for spec in input.split('-') {
+ let spec_parts = spec.split(':').collect::<Vec<_>>();
+
+ match &*spec_parts {
+ ["e"] => dl.endian = Endian::Little,
+ ["E"] => dl.endian = Endian::Big,
+ [p] if p.starts_with('P') => {
+ dl.instruction_address_space = parse_address_space(&p[1..], "P")?
+ }
+ ["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
+ ["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?,
+ ["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?,
+ [p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
+ dl.pointer_size = size(s, p)?;
+ dl.pointer_align = align(a, p)?;
+ }
+ [s, ref a @ ..] if s.starts_with('i') => {
+ let Ok(bits) = s[1..].parse::<u64>() else {
+ size(&s[1..], "i")?; // For the user error.
+ continue;
+ };
+ let a = align(a, s)?;
+ match bits {
+ 1 => dl.i1_align = a,
+ 8 => dl.i8_align = a,
+ 16 => dl.i16_align = a,
+ 32 => dl.i32_align = a,
+ 64 => dl.i64_align = a,
+ _ => {}
+ }
+ if bits >= i128_align_src && bits <= 128 {
+ // Default alignment for i128 is decided by taking the alignment of
+ // largest-sized i{64..=128}.
+ i128_align_src = bits;
+ dl.i128_align = a;
+ }
+ }
+ [s, ref a @ ..] if s.starts_with('v') => {
+ let v_size = size(&s[1..], "v")?;
+ let a = align(a, s)?;
+ if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
+ v.1 = a;
+ continue;
+ }
+ // No existing entry, add a new one.
+ dl.vector_align.push((v_size, a));
+ }
+ _ => {} // Ignore everything else.
+ }
+ }
+ Ok(dl)
+ }
+
+ /// Returns exclusive upper bound on object size.
+ ///
+ /// The theoretical maximum object size is defined as the maximum positive `isize` value.
+ /// This ensures that the `offset` semantics remain well-defined by allowing it to correctly
+ /// index every address within an object along with one byte past the end, along with allowing
+ /// `isize` to store the difference between any two pointers into an object.
+ ///
+ /// The upper bound on 64-bit currently needs to be lower because LLVM uses a 64-bit integer
+ /// to represent object size in bits. It would need to be 1 << 61 to account for this, but is
+ /// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable
+ /// address space on 64-bit ARMv8 and x86_64.
+ #[inline]
+ pub fn obj_size_bound(&self) -> u64 {
+ match self.pointer_size.bits() {
+ 16 => 1 << 15,
+ 32 => 1 << 31,
+ 64 => 1 << 47,
+ bits => panic!("obj_size_bound: unknown pointer bit size {}", bits),
+ }
+ }
+
+ #[inline]
+ pub fn ptr_sized_integer(&self) -> Integer {
+ match self.pointer_size.bits() {
+ 16 => I16,
+ 32 => I32,
+ 64 => I64,
+ bits => panic!("ptr_sized_integer: unknown pointer bit size {}", bits),
+ }
+ }
+
+ #[inline]
+ pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
+ for &(size, align) in &self.vector_align {
+ if size == vec_size {
+ return align;
+ }
+ }
+ // Default to natural alignment, which is what LLVM does.
+ // That is, use the size, rounded up to a power of 2.
+ AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
+ }
+}
+
+pub trait HasDataLayout {
+ fn data_layout(&self) -> &TargetDataLayout;
+}
+
+impl HasDataLayout for TargetDataLayout {
+ #[inline]
+ fn data_layout(&self) -> &TargetDataLayout {
+ self
+ }
+}
+
+/// Endianness of the target, which must match cfg(target-endian).
+#[derive(Copy, Clone, PartialEq, Eq)]
+pub enum Endian {
+ Little,
+ Big,
+}
+
+impl Endian {
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ Self::Little => "little",
+ Self::Big => "big",
+ }
+ }
+}
+
+impl fmt::Debug for Endian {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.write_str(self.as_str())
+ }
+}
+
+impl FromStr for Endian {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "little" => Ok(Self::Little),
+ "big" => Ok(Self::Big),
+ _ => Err(format!(r#"unknown endian: "{}""#, s)),
+ }
+ }
+}
+
+/// Size of a type in bytes.
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
+pub struct Size {
+ raw: u64,
+}
+
+// Safety: Ord is implement as just comparing numerical values and numerical values
+// are not changed by (de-)serialization.
+#[cfg(feature = "nightly")]
+unsafe impl StableOrd for Size {}
+
+// This is debug-printed a lot in larger structs, don't waste too much space there
+impl fmt::Debug for Size {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Size({} bytes)", self.bytes())
+ }
+}
+
+impl Size {
+ pub const ZERO: Size = Size { raw: 0 };
+
+ /// Rounds `bits` up to the next-higher byte boundary, if `bits` is
+ /// not a multiple of 8.
+ pub fn from_bits(bits: impl TryInto<u64>) -> Size {
+ let bits = bits.try_into().ok().unwrap();
+ // Avoid potential overflow from `bits + 7`.
+ Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
+ }
+
+ #[inline]
+ pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
+ let bytes: u64 = bytes.try_into().ok().unwrap();
+ Size { raw: bytes }
+ }
+
+ #[inline]
+ pub fn bytes(self) -> u64 {
+ self.raw
+ }
+
+ #[inline]
+ pub fn bytes_usize(self) -> usize {
+ self.bytes().try_into().unwrap()
+ }
+
+ #[inline]
+ pub fn bits(self) -> u64 {
+ #[cold]
+ fn overflow(bytes: u64) -> ! {
+ panic!("Size::bits: {} bytes in bits doesn't fit in u64", bytes)
+ }
+
+ self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
+ }
+
+ #[inline]
+ pub fn bits_usize(self) -> usize {
+ self.bits().try_into().unwrap()
+ }
+
+ #[inline]
+ pub fn align_to(self, align: Align) -> Size {
+ let mask = align.bytes() - 1;
+ Size::from_bytes((self.bytes() + mask) & !mask)
+ }
+
+ #[inline]
+ pub fn is_aligned(self, align: Align) -> bool {
+ let mask = align.bytes() - 1;
+ self.bytes() & mask == 0
+ }
+
+ #[inline]
+ pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
+ let dl = cx.data_layout();
+
+ let bytes = self.bytes().checked_add(offset.bytes())?;
+
+ if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
+ }
+
+ #[inline]
+ pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
+ let dl = cx.data_layout();
+
+ let bytes = self.bytes().checked_mul(count)?;
+ if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
+ }
+
+ /// Truncates `value` to `self` bits and then sign-extends it to 128 bits
+ /// (i.e., if it is negative, fill with 1's on the left).
+ #[inline]
+ pub fn sign_extend(self, value: u128) -> u128 {
+ let size = self.bits();
+ if size == 0 {
+ // Truncated until nothing is left.
+ return 0;
+ }
+ // Sign-extend it.
+ let shift = 128 - size;
+ // Shift the unsigned value to the left, then shift back to the right as signed
+ // (essentially fills with sign bit on the left).
+ (((value << shift) as i128) >> shift) as u128
+ }
+
+ /// Truncates `value` to `self` bits.
+ #[inline]
+ pub fn truncate(self, value: u128) -> u128 {
+ let size = self.bits();
+ if size == 0 {
+ // Truncated until nothing is left.
+ return 0;
+ }
+ let shift = 128 - size;
+ // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
+ (value << shift) >> shift
+ }
+
+ #[inline]
+ pub fn signed_int_min(&self) -> i128 {
+ self.sign_extend(1_u128 << (self.bits() - 1)) as i128
+ }
+
+ #[inline]
+ pub fn signed_int_max(&self) -> i128 {
+ i128::MAX >> (128 - self.bits())
+ }
+
+ #[inline]
+ pub fn unsigned_int_max(&self) -> u128 {
+ u128::MAX >> (128 - self.bits())
+ }
+}
+
+// Panicking addition, subtraction and multiplication for convenience.
+// Avoid during layout computation, return `LayoutError` instead.
+
+impl Add for Size {
+ type Output = Size;
+ #[inline]
+ fn add(self, other: Size) -> Size {
+ Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
+ panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
+ }))
+ }
+}
+
+impl Sub for Size {
+ type Output = Size;
+ #[inline]
+ fn sub(self, other: Size) -> Size {
+ Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
+ panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
+ }))
+ }
+}
+
+impl Mul<Size> for u64 {
+ type Output = Size;
+ #[inline]
+ fn mul(self, size: Size) -> Size {
+ size * self
+ }
+}
+
+impl Mul<u64> for Size {
+ type Output = Size;
+ #[inline]
+ fn mul(self, count: u64) -> Size {
+ match self.bytes().checked_mul(count) {
+ Some(bytes) => Size::from_bytes(bytes),
+ None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
+ }
+ }
+}
+
+impl AddAssign for Size {
+ #[inline]
+ fn add_assign(&mut self, other: Size) {
+ *self = *self + other;
+ }
+}
+
+#[cfg(feature = "nightly")]
+impl Step for Size {
+ #[inline]
+ fn steps_between(start: &Self, end: &Self) -> Option<usize> {
+ u64::steps_between(&start.bytes(), &end.bytes())
+ }
+
+ #[inline]
+ fn forward_checked(start: Self, count: usize) -> Option<Self> {
+ u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
+ }
+
+ #[inline]
+ fn forward(start: Self, count: usize) -> Self {
+ Self::from_bytes(u64::forward(start.bytes(), count))
+ }
+
+ #[inline]
+ unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
+ Self::from_bytes(u64::forward_unchecked(start.bytes(), count))
+ }
+
+ #[inline]
+ fn backward_checked(start: Self, count: usize) -> Option<Self> {
+ u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
+ }
+
+ #[inline]
+ fn backward(start: Self, count: usize) -> Self {
+ Self::from_bytes(u64::backward(start.bytes(), count))
+ }
+
+ #[inline]
+ unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
+ Self::from_bytes(u64::backward_unchecked(start.bytes(), count))
+ }
+}
+
+/// Alignment of a type in bytes (always a power of two).
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
+pub struct Align {
+ pow2: u8,
+}
+
+// This is debug-printed a lot in larger structs, don't waste too much space there
+impl fmt::Debug for Align {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Align({} bytes)", self.bytes())
+ }
+}
+
+impl Align {
+ pub const ONE: Align = Align { pow2: 0 };
+ pub const MAX: Align = Align { pow2: 29 };
+
+ #[inline]
+ pub fn from_bits(bits: u64) -> Result<Align, String> {
+ Align::from_bytes(Size::from_bits(bits).bytes())
+ }
+
+ #[inline]
+ pub fn from_bytes(align: u64) -> Result<Align, String> {
+ // Treat an alignment of 0 bytes like 1-byte alignment.
+ if align == 0 {
+ return Ok(Align::ONE);
+ }
+
+ #[cold]
+ fn not_power_of_2(align: u64) -> String {
+ format!("`{}` is not a power of 2", align)
+ }
+
+ #[cold]
+ fn too_large(align: u64) -> String {
+ format!("`{}` is too large", align)
+ }
+
+ let mut bytes = align;
+ let mut pow2: u8 = 0;
+ while (bytes & 1) == 0 {
+ pow2 += 1;
+ bytes >>= 1;
+ }
+ if bytes != 1 {
+ return Err(not_power_of_2(align));
+ }
+ if pow2 > Self::MAX.pow2 {
+ return Err(too_large(align));
+ }
+
+ Ok(Align { pow2 })
+ }
+
+ #[inline]
+ pub fn bytes(self) -> u64 {
+ 1 << self.pow2
+ }
+
+ #[inline]
+ pub fn bits(self) -> u64 {
+ self.bytes() * 8
+ }
+
+ /// Computes the best alignment possible for the given offset
+ /// (the largest power of two that the offset is a multiple of).
+ ///
+ /// N.B., for an offset of `0`, this happens to return `2^64`.
+ #[inline]
+ pub fn max_for_offset(offset: Size) -> Align {
+ Align { pow2: offset.bytes().trailing_zeros() as u8 }
+ }
+
+ /// Lower the alignment, if necessary, such that the given offset
+ /// is aligned to it (the offset is a multiple of the alignment).
+ #[inline]
+ pub fn restrict_for_offset(self, offset: Size) -> Align {
+ self.min(Align::max_for_offset(offset))
+ }
+}
+
+/// A pair of alignments, ABI-mandated and preferred.
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+
+pub struct AbiAndPrefAlign {
+ pub abi: Align,
+ pub pref: Align,
+}
+
+impl AbiAndPrefAlign {
+ #[inline]
+ pub fn new(align: Align) -> AbiAndPrefAlign {
+ AbiAndPrefAlign { abi: align, pref: align }
+ }
+
+ #[inline]
+ pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
+ AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
+ }
+
+ #[inline]
+ pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
+ AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
+ }
+}
+
+/// Integers, also used for enum discriminants.
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+#[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_Generic))]
+
+pub enum Integer {
+ I8,
+ I16,
+ I32,
+ I64,
+ I128,
+}
+
+impl Integer {
+ #[inline]
+ pub fn size(self) -> Size {
+ match self {
+ I8 => Size::from_bytes(1),
+ I16 => Size::from_bytes(2),
+ I32 => Size::from_bytes(4),
+ I64 => Size::from_bytes(8),
+ I128 => Size::from_bytes(16),
+ }
+ }
+
+ /// Gets the Integer type from an IntegerType.
+ pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
+ let dl = cx.data_layout();
+
+ match ity {
+ IntegerType::Pointer(_) => dl.ptr_sized_integer(),
+ IntegerType::Fixed(x, _) => x,
+ }
+ }
+
+ pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
+ let dl = cx.data_layout();
+
+ match self {
+ I8 => dl.i8_align,
+ I16 => dl.i16_align,
+ I32 => dl.i32_align,
+ I64 => dl.i64_align,
+ I128 => dl.i128_align,
+ }
+ }
+
+ /// Finds the smallest Integer type which can represent the signed value.
+ #[inline]
+ pub fn fit_signed(x: i128) -> Integer {
+ match x {
+ -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
+ -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
+ -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
+ -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
+ _ => I128,
+ }
+ }
+
+ /// Finds the smallest Integer type which can represent the unsigned value.
+ #[inline]
+ pub fn fit_unsigned(x: u128) -> Integer {
+ match x {
+ 0..=0x0000_0000_0000_00ff => I8,
+ 0..=0x0000_0000_0000_ffff => I16,
+ 0..=0x0000_0000_ffff_ffff => I32,
+ 0..=0xffff_ffff_ffff_ffff => I64,
+ _ => I128,
+ }
+ }
+
+ /// Finds the smallest integer with the given alignment.
+ pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
+ let dl = cx.data_layout();
+
+ for candidate in [I8, I16, I32, I64, I128] {
+ if wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes() {
+ return Some(candidate);
+ }
+ }
+ None
+ }
+
+ /// Find the largest integer with the given alignment or less.
+ pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
+ let dl = cx.data_layout();
+
+ // FIXME(eddyb) maybe include I128 in the future, when it works everywhere.
+ for candidate in [I64, I32, I16] {
+ if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
+ return candidate;
+ }
+ }
+ I8
+ }
+
+ // FIXME(eddyb) consolidate this and other methods that find the appropriate
+ // `Integer` given some requirements.
+ #[inline]
+ pub fn from_size(size: Size) -> Result<Self, String> {
+ match size.bits() {
+ 8 => Ok(Integer::I8),
+ 16 => Ok(Integer::I16),
+ 32 => Ok(Integer::I32),
+ 64 => Ok(Integer::I64),
+ 128 => Ok(Integer::I128),
+ _ => Err(format!("rust does not support integers with {} bits", size.bits())),
+ }
+ }
+}
+
+/// Fundamental unit of memory access and layout.
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub enum Primitive {
+ /// The `bool` is the signedness of the `Integer` type.
+ ///
+ /// One would think we would not care about such details this low down,
+ /// but some ABIs are described in terms of C types and ISAs where the
+ /// integer arithmetic is done on {sign,zero}-extended registers, e.g.
+ /// a negative integer passed by zero-extension will appear positive in
+ /// the callee, and most operations on it will produce the wrong values.
+ Int(Integer, bool),
+ F32,
+ F64,
+ Pointer,
+}
+
+impl Primitive {
+ pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
+ let dl = cx.data_layout();
+
+ match self {
+ Int(i, _) => i.size(),
+ F32 => Size::from_bits(32),
+ F64 => Size::from_bits(64),
+ Pointer => dl.pointer_size,
+ }
+ }
+
+ pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
+ let dl = cx.data_layout();
+
+ match self {
+ Int(i, _) => i.align(dl),
+ F32 => dl.f32_align,
+ F64 => dl.f64_align,
+ Pointer => dl.pointer_align,
+ }
+ }
+
+ // FIXME(eddyb) remove, it's trivial thanks to `matches!`.
+ #[inline]
+ pub fn is_float(self) -> bool {
+ matches!(self, F32 | F64)
+ }
+
+ // FIXME(eddyb) remove, it's completely unused.
+ #[inline]
+ pub fn is_int(self) -> bool {
+ matches!(self, Int(..))
+ }
+
+ #[inline]
+ pub fn is_ptr(self) -> bool {
+ matches!(self, Pointer)
+ }
+}
+
+/// Inclusive wrap-around range of valid values, that is, if
+/// start > end, it represents `start..=MAX`,
+/// followed by `0..=end`.
+///
+/// That is, for an i8 primitive, a range of `254..=2` means following
+/// sequence:
+///
+/// 254 (-2), 255 (-1), 0, 1, 2
+///
+/// This is intended specifically to mirror LLVM’s `!range` metadata semantics.
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub struct WrappingRange {
+ pub start: u128,
+ pub end: u128,
+}
+
+impl WrappingRange {
+ pub fn full(size: Size) -> Self {
+ Self { start: 0, end: size.unsigned_int_max() }
+ }
+
+ /// Returns `true` if `v` is contained in the range.
+ #[inline(always)]
+ pub fn contains(&self, v: u128) -> bool {
+ if self.start <= self.end {
+ self.start <= v && v <= self.end
+ } else {
+ self.start <= v || v <= self.end
+ }
+ }
+
+ /// Returns `self` with replaced `start`
+ #[inline(always)]
+ pub fn with_start(mut self, start: u128) -> Self {
+ self.start = start;
+ self
+ }
+
+ /// Returns `self` with replaced `end`
+ #[inline(always)]
+ pub fn with_end(mut self, end: u128) -> Self {
+ self.end = end;
+ self
+ }
+
+ /// Returns `true` if `size` completely fills the range.
+ #[inline]
+ pub fn is_full_for(&self, size: Size) -> bool {
+ let max_value = size.unsigned_int_max();
+ debug_assert!(self.start <= max_value && self.end <= max_value);
+ self.start == (self.end.wrapping_add(1) & max_value)
+ }
+}
+
+impl fmt::Debug for WrappingRange {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ if self.start > self.end {
+ write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
+ } else {
+ write!(fmt, "{}..={}", self.start, self.end)?;
+ }
+ Ok(())
+ }
+}
+
+/// Information about one scalar component of a Rust type.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub enum Scalar {
+ Initialized {
+ value: Primitive,
+
+ // FIXME(eddyb) always use the shortest range, e.g., by finding
+ // the largest space between two consecutive valid values and
+ // taking everything else as the (shortest) valid range.
+ valid_range: WrappingRange,
+ },
+ Union {
+ /// Even for unions, we need to use the correct registers for the kind of
+ /// values inside the union, so we keep the `Primitive` type around. We
+ /// also use it to compute the size of the scalar.
+ /// However, unions never have niches and even allow undef,
+ /// so there is no `valid_range`.
+ value: Primitive,
+ },
+}
+
+impl Scalar {
+ #[inline]
+ pub fn is_bool(&self) -> bool {
+ matches!(
+ self,
+ Scalar::Initialized {
+ value: Int(I8, false),
+ valid_range: WrappingRange { start: 0, end: 1 }
+ }
+ )
+ }
+
+ /// Get the primitive representation of this type, ignoring the valid range and whether the
+ /// value is allowed to be undefined (due to being a union).
+ pub fn primitive(&self) -> Primitive {
+ match *self {
+ Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
+ }
+ }
+
+ pub fn align(self, cx: &impl HasDataLayout) -> AbiAndPrefAlign {
+ self.primitive().align(cx)
+ }
+
+ pub fn size(self, cx: &impl HasDataLayout) -> Size {
+ self.primitive().size(cx)
+ }
+
+ #[inline]
+ pub fn to_union(&self) -> Self {
+ Self::Union { value: self.primitive() }
+ }
+
+ #[inline]
+ pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
+ match *self {
+ Scalar::Initialized { valid_range, .. } => valid_range,
+ Scalar::Union { value } => WrappingRange::full(value.size(cx)),
+ }
+ }
+
+ #[inline]
+ /// Allows the caller to mutate the valid range. This operation will panic if attempted on a union.
+ pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
+ match self {
+ Scalar::Initialized { valid_range, .. } => valid_range,
+ Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
+ }
+ }
+
+ /// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole layout
+ #[inline]
+ pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
+ match *self {
+ Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
+ Scalar::Union { .. } => true,
+ }
+ }
+
+ /// Returns `true` if this type can be left uninit.
+ #[inline]
+ pub fn is_uninit_valid(&self) -> bool {
+ match *self {
+ Scalar::Initialized { .. } => false,
+ Scalar::Union { .. } => true,
+ }
+ }
+}
+
+/// Describes how the fields of a type are located in memory.
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub enum FieldsShape {
+ /// Scalar primitives and `!`, which never have fields.
+ Primitive,
+
+ /// All fields start at no offset. The `usize` is the field count.
+ Union(NonZeroUsize),
+
+ /// Array/vector-like placement, with all fields of identical types.
+ Array { stride: Size, count: u64 },
+
+ /// Struct-like placement, with precomputed offsets.
+ ///
+ /// Fields are guaranteed to not overlap, but note that gaps
+ /// before, between and after all the fields are NOT always
+ /// padding, and as such their contents may not be discarded.
+ /// For example, enum variants leave a gap at the start,
+ /// where the discriminant field in the enum layout goes.
+ Arbitrary {
+ /// Offsets for the first byte of each field,
+ /// ordered to match the source definition order.
+ /// This vector does not go in increasing order.
+ // FIXME(eddyb) use small vector optimization for the common case.
+ offsets: Vec<Size>,
+
+ /// Maps source order field indices to memory order indices,
+ /// depending on how the fields were reordered (if at all).
+ /// This is a permutation, with both the source order and the
+ /// memory order using the same (0..n) index ranges.
+ ///
+ /// Note that during computation of `memory_index`, sometimes
+ /// it is easier to operate on the inverse mapping (that is,
+ /// from memory order to source order), and that is usually
+ /// named `inverse_memory_index`.
+ ///
+ // FIXME(eddyb) build a better abstraction for permutations, if possible.
+ // FIXME(camlorn) also consider small vector optimization here.
+ memory_index: Vec<u32>,
+ },
+}
+
+impl FieldsShape {
+ #[inline]
+ pub fn count(&self) -> usize {
+ match *self {
+ FieldsShape::Primitive => 0,
+ FieldsShape::Union(count) => count.get(),
+ FieldsShape::Array { count, .. } => count.try_into().unwrap(),
+ FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
+ }
+ }
+
+ #[inline]
+ pub fn offset(&self, i: usize) -> Size {
+ match *self {
+ FieldsShape::Primitive => {
+ unreachable!("FieldsShape::offset: `Primitive`s have no fields")
+ }
+ FieldsShape::Union(count) => {
+ assert!(
+ i < count.get(),
+ "tried to access field {} of union with {} fields",
+ i,
+ count
+ );
+ Size::ZERO
+ }
+ FieldsShape::Array { stride, count } => {
+ let i = u64::try_from(i).unwrap();
+ assert!(i < count);
+ stride * i
+ }
+ FieldsShape::Arbitrary { ref offsets, .. } => offsets[i],
+ }
+ }
+
+ #[inline]
+ pub fn memory_index(&self, i: usize) -> usize {
+ match *self {
+ FieldsShape::Primitive => {
+ unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
+ }
+ FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
+ FieldsShape::Arbitrary { ref memory_index, .. } => memory_index[i].try_into().unwrap(),
+ }
+ }
+
+ /// Gets source indices of the fields by increasing offsets.
+ #[inline]
+ pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
+ let mut inverse_small = [0u8; 64];
+ let mut inverse_big = vec![];
+ let use_small = self.count() <= inverse_small.len();
+
+ // We have to write this logic twice in order to keep the array small.
+ if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
+ if use_small {
+ for i in 0..self.count() {
+ inverse_small[memory_index[i] as usize] = i as u8;
+ }
+ } else {
+ inverse_big = vec![0; self.count()];
+ for i in 0..self.count() {
+ inverse_big[memory_index[i] as usize] = i as u32;
+ }
+ }
+ }
+
+ (0..self.count()).map(move |i| match *self {
+ FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
+ FieldsShape::Arbitrary { .. } => {
+ if use_small {
+ inverse_small[i] as usize
+ } else {
+ inverse_big[i] as usize
+ }
+ }
+ })
+ }
+}
+
+/// An identifier that specifies the address space that some operation
+/// should operate on. Special address spaces have an effect on code generation,
+/// depending on the target and the address spaces it implements.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub struct AddressSpace(pub u32);
+
+impl AddressSpace {
+ /// The default address space, corresponding to data space.
+ pub const DATA: Self = AddressSpace(0);
+}
+
+/// Describes how values of the type are passed by target ABIs,
+/// in terms of categories of C types there are ABI rules for.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+
+pub enum Abi {
+ Uninhabited,
+ Scalar(Scalar),
+ ScalarPair(Scalar, Scalar),
+ Vector {
+ element: Scalar,
+ count: u64,
+ },
+ Aggregate {
+ /// If true, the size is exact, otherwise it's only a lower bound.
+ sized: bool,
+ },
+}
+
+impl Abi {
+ /// Returns `true` if the layout corresponds to an unsized type.
+ #[inline]
+ pub fn is_unsized(&self) -> bool {
+ match *self {
+ Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
+ Abi::Aggregate { sized } => !sized,
+ }
+ }
+
+ #[inline]
+ pub fn is_sized(&self) -> bool {
+ !self.is_unsized()
+ }
+
+ /// Returns `true` if this is a single signed integer scalar
+ #[inline]
+ pub fn is_signed(&self) -> bool {
+ match self {
+ Abi::Scalar(scal) => match scal.primitive() {
+ Primitive::Int(_, signed) => signed,
+ _ => false,
+ },
+ _ => panic!("`is_signed` on non-scalar ABI {:?}", self),
+ }
+ }
+
+ /// Returns `true` if this is an uninhabited type
+ #[inline]
+ pub fn is_uninhabited(&self) -> bool {
+ matches!(*self, Abi::Uninhabited)
+ }
+
+ /// Returns `true` is this is a scalar type
+ #[inline]
+ pub fn is_scalar(&self) -> bool {
+ matches!(*self, Abi::Scalar(_))
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub enum Variants<V: Idx> {
+ /// Single enum variants, structs/tuples, unions, and all non-ADTs.
+ Single { index: V },
+
+ /// Enum-likes with more than one inhabited variant: each variant comes with
+ /// a *discriminant* (usually the same as the variant index but the user can
+ /// assign explicit discriminant values). That discriminant is encoded
+ /// as a *tag* on the machine. The layout of each variant is
+ /// a struct, and they all have space reserved for the tag.
+ /// For enums, the tag is the sole field of the layout.
+ Multiple {
+ tag: Scalar,
+ tag_encoding: TagEncoding<V>,
+ tag_field: usize,
+ variants: IndexVec<V, LayoutS<V>>,
+ },
+}
+
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub enum TagEncoding<V: Idx> {
+ /// The tag directly stores the discriminant, but possibly with a smaller layout
+ /// (so converting the tag to the discriminant can require sign extension).
+ Direct,
+
+ /// Niche (values invalid for a type) encoding the discriminant:
+ /// Discriminant and variant index coincide.
+ /// The variant `untagged_variant` contains a niche at an arbitrary
+ /// offset (field `tag_field` of the enum), which for a variant with
+ /// discriminant `d` is set to
+ /// `(d - niche_variants.start).wrapping_add(niche_start)`.
+ ///
+ /// For example, `Option<(usize, &T)>` is represented such that
+ /// `None` has a null pointer for the second tuple field, and
+ /// `Some` is the identity function (with a non-null reference).
+ Niche { untagged_variant: V, niche_variants: RangeInclusive<V>, niche_start: u128 },
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub struct Niche {
+ pub offset: Size,
+ pub value: Primitive,
+ pub valid_range: WrappingRange,
+}
+
+impl Niche {
+ pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
+ let Scalar::Initialized { value, valid_range } = scalar else { return None };
+ let niche = Niche { offset, value, valid_range };
+ if niche.available(cx) > 0 { Some(niche) } else { None }
+ }
+
+ pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
+ let Self { value, valid_range: v, .. } = *self;
+ let size = value.size(cx);
+ assert!(size.bits() <= 128);
+ let max_value = size.unsigned_int_max();
+
+ // Find out how many values are outside the valid range.
+ let niche = v.end.wrapping_add(1)..v.start;
+ niche.end.wrapping_sub(niche.start) & max_value
+ }
+
+ pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
+ assert!(count > 0);
+
+ let Self { value, valid_range: v, .. } = *self;
+ let size = value.size(cx);
+ assert!(size.bits() <= 128);
+ let max_value = size.unsigned_int_max();
+
+ let niche = v.end.wrapping_add(1)..v.start;
+ let available = niche.end.wrapping_sub(niche.start) & max_value;
+ if count > available {
+ return None;
+ }
+
+ // Extend the range of valid values being reserved by moving either `v.start` or `v.end` bound.
+ // Given an eventual `Option<T>`, we try to maximize the chance for `None` to occupy the niche of zero.
+ // This is accomplished by preferring enums with 2 variants(`count==1`) and always taking the shortest path to niche zero.
+ // Having `None` in niche zero can enable some special optimizations.
+ //
+ // Bound selection criteria:
+ // 1. Select closest to zero given wrapping semantics.
+ // 2. Avoid moving past zero if possible.
+ //
+ // In practice this means that enums with `count > 1` are unlikely to claim niche zero, since they have to fit perfectly.
+ // If niche zero is already reserved, the selection of bounds are of little interest.
+ let move_start = |v: WrappingRange| {
+ let start = v.start.wrapping_sub(count) & max_value;
+ Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
+ };
+ let move_end = |v: WrappingRange| {
+ let start = v.end.wrapping_add(1) & max_value;
+ let end = v.end.wrapping_add(count) & max_value;
+ Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
+ };
+ let distance_end_zero = max_value - v.end;
+ if v.start > v.end {
+ // zero is unavailable because wrapping occurs
+ move_end(v)
+ } else if v.start <= distance_end_zero {
+ if count <= v.start {
+ move_start(v)
+ } else {
+ // moved past zero, use other bound
+ move_end(v)
+ }
+ } else {
+ let end = v.end.wrapping_add(count) & max_value;
+ let overshot_zero = (1..=v.end).contains(&end);
+ if overshot_zero {
+ // moved past zero, use other bound
+ move_start(v)
+ } else {
+ move_end(v)
+ }
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Hash, Clone)]
+#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
+pub struct LayoutS<V: Idx> {
+ /// Says where the fields are located within the layout.
+ pub fields: FieldsShape,
+
+ /// Encodes information about multi-variant layouts.
+ /// Even with `Multiple` variants, a layout still has its own fields! Those are then
+ /// shared between all variants. One of them will be the discriminant,
+ /// but e.g. generators can have more.
+ ///
+ /// To access all fields of this layout, both `fields` and the fields of the active variant
+ /// must be taken into account.
+ pub variants: Variants<V>,
+
+ /// The `abi` defines how this data is passed between functions, and it defines
+ /// value restrictions via `valid_range`.
+ ///
+ /// Note that this is entirely orthogonal to the recursive structure defined by
+ /// `variants` and `fields`; for example, `ManuallyDrop<Result<isize, isize>>` has
+ /// `Abi::ScalarPair`! So, even with non-`Aggregate` `abi`, `fields` and `variants`
+ /// have to be taken into account to find all fields of this layout.
+ pub abi: Abi,
+
+ /// The leaf scalar with the largest number of invalid values
+ /// (i.e. outside of its `valid_range`), if it exists.
+ pub largest_niche: Option<Niche>,
+
+ pub align: AbiAndPrefAlign,
+ pub size: Size,
+}
+
+impl<V: Idx> LayoutS<V> {
+ pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
+ let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar);
+ let size = scalar.size(cx);
+ let align = scalar.align(cx);
+ LayoutS {
+ variants: Variants::Single { index: V::new(0) },
+ fields: FieldsShape::Primitive,
+ abi: Abi::Scalar(scalar),
+ largest_niche,
+ size,
+ align,
+ }
+ }
+}
+
+impl<V: Idx> fmt::Debug for LayoutS<V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // This is how `Layout` used to print before it become
+ // `Interned<LayoutS>`. We print it like this to avoid having to update
+ // expected output in a lot of tests.
+ let LayoutS { size, align, abi, fields, largest_niche, variants } = self;
+ f.debug_struct("Layout")
+ .field("size", size)
+ .field("align", align)
+ .field("abi", abi)
+ .field("fields", fields)
+ .field("largest_niche", largest_niche)
+ .field("variants", variants)
+ .finish()
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub enum PointerKind {
+ /// Most general case, we know no restrictions to tell LLVM.
+ SharedMutable,
+
+ /// `&T` where `T` contains no `UnsafeCell`, is `dereferenceable`, `noalias` and `readonly`.
+ Frozen,
+
+ /// `&mut T` which is `dereferenceable` and `noalias` but not `readonly`.
+ UniqueBorrowed,
+
+ /// `&mut !Unpin`, which is `dereferenceable` but neither `noalias` nor `readonly`.
+ UniqueBorrowedPinned,
+
+ /// `Box<T>`, which is `noalias` (even on return types, unlike the above) but neither `readonly`
+ /// nor `dereferenceable`.
+ UniqueOwned,
+}
+
+#[derive(Copy, Clone, Debug)]
+pub struct PointeeInfo {
+ pub size: Size,
+ pub align: Align,
+ pub safe: Option<PointerKind>,
+ pub address_space: AddressSpace,
+}
+
+/// Used in `might_permit_raw_init` to indicate the kind of initialisation
+/// that is checked to be valid
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum InitKind {
+ Zero,
+ UninitMitigated0x01Fill,
+}
+
+impl<V: Idx> LayoutS<V> {
+ /// Returns `true` if the layout corresponds to an unsized type.
+ pub fn is_unsized(&self) -> bool {
+ self.abi.is_unsized()
+ }
+
+ pub fn is_sized(&self) -> bool {
+ self.abi.is_sized()
+ }
+
+ /// Returns `true` if the type is a ZST and not unsized.
+ pub fn is_zst(&self) -> bool {
+ match self.abi {
+ Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
+ Abi::Uninhabited => self.size.bytes() == 0,
+ Abi::Aggregate { sized } => sized && self.size.bytes() == 0,
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub enum StructKind {
+ /// A tuple, closure, or univariant which cannot be coerced to unsized.
+ AlwaysSized,
+ /// A univariant, the last field of which may be coerced to unsized.
+ MaybeUnsized,
+ /// A univariant, but with a prefix of an arbitrary size & alignment (e.g., enum tag).
+ Prefixed(Size, Align),
+}
diff --git a/compiler/rustc_arena/src/tests.rs b/compiler/rustc_arena/src/tests.rs
index ad6146434..49a070bad 100644
--- a/compiler/rustc_arena/src/tests.rs
+++ b/compiler/rustc_arena/src/tests.rs
@@ -52,19 +52,15 @@ fn test_arena_alloc_nested() {
impl<'a> Wrap<'a> {
fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
- let r: &EI<'_> = self.0.alloc(EI::I(f()));
- if let &EI::I(ref i) = r {
- i
- } else {
- panic!("mismatch");
+ match self.0.alloc(EI::I(f())) {
+ EI::I(i) => i,
+ _ => panic!("mismatch"),
}
}
fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer<'_> {
- let r: &EI<'_> = self.0.alloc(EI::O(f()));
- if let &EI::O(ref o) = r {
- o
- } else {
- panic!("mismatch");
+ match self.0.alloc(EI::O(f())) {
+ EI::O(o) => o,
+ _ => panic!("mismatch"),
}
}
}
diff --git a/compiler/rustc_ast/Cargo.toml b/compiler/rustc_ast/Cargo.toml
index fcbf96818..9253b7e68 100644
--- a/compiler/rustc_ast/Cargo.toml
+++ b/compiler/rustc_ast/Cargo.toml
@@ -14,5 +14,5 @@ rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs
index 4ef43735a..4d80f904a 100644
--- a/compiler/rustc_ast/src/ast.rs
+++ b/compiler/rustc_ast/src/ast.rs
@@ -13,7 +13,7 @@
//! - [`FnDecl`], [`FnHeader`] and [`Param`]: Metadata associated with a function declaration.
//! - [`Generics`], [`GenericParam`], [`WhereClause`]: Metadata associated with generic parameters.
//! - [`EnumDef`] and [`Variant`]: Enum declaration.
-//! - [`Lit`] and [`LitKind`]: Literal expressions.
+//! - [`MetaItemLit`] and [`LitKind`]: Literal expressions.
//! - [`MacroDef`], [`MacStmtStyle`], [`MacCall`], [`MacDelimiter`]: Macro definition and invocation.
//! - [`Attribute`]: Metadata associated with item.
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
@@ -36,7 +36,7 @@ use rustc_span::{Span, DUMMY_SP};
use std::convert::TryFrom;
use std::fmt;
use std::mem;
-use thin_vec::ThinVec;
+use thin_vec::{thin_vec, ThinVec};
/// A "Label" is an identifier of some point in sources,
/// e.g. in the following code:
@@ -90,7 +90,7 @@ pub struct Path {
pub span: Span,
/// The segments in the path: the things separated by `::`.
/// Global paths begin with `kw::PathRoot`.
- pub segments: Vec<PathSegment>,
+ pub segments: ThinVec<PathSegment>,
pub tokens: Option<LazyAttrTokenStream>,
}
@@ -111,10 +111,10 @@ impl<CTX: rustc_span::HashStableContext> HashStable<CTX> for Path {
}
impl Path {
- // Convert a span and an identifier to the corresponding
- // one-segment path.
+ /// Convert a span and an identifier to the corresponding
+ /// one-segment path.
pub fn from_ident(ident: Ident) -> Path {
- Path { segments: vec![PathSegment::from_ident(ident)], span: ident.span, tokens: None }
+ Path { segments: thin_vec![PathSegment::from_ident(ident)], span: ident.span, tokens: None }
}
pub fn is_global(&self) -> bool {
@@ -175,9 +175,9 @@ impl GenericArgs {
}
pub fn span(&self) -> Span {
- match *self {
- AngleBracketed(ref data) => data.span,
- Parenthesized(ref data) => data.span,
+ match self {
+ AngleBracketed(data) => data.span,
+ Parenthesized(data) => data.span,
}
}
}
@@ -312,8 +312,8 @@ pub enum GenericBound {
impl GenericBound {
pub fn span(&self) -> Span {
match self {
- GenericBound::Trait(ref t, ..) => t.span,
- GenericBound::Outlives(ref l) => l.ident.span,
+ GenericBound::Trait(t, ..) => t.span,
+ GenericBound::Outlives(l) => l.ident.span,
}
}
}
@@ -392,15 +392,7 @@ pub struct Generics {
impl Default for Generics {
/// Creates an instance of `Generics`.
fn default() -> Generics {
- Generics {
- params: Vec::new(),
- where_clause: WhereClause {
- has_where_token: false,
- predicates: Vec::new(),
- span: DUMMY_SP,
- },
- span: DUMMY_SP,
- }
+ Generics { params: Vec::new(), where_clause: Default::default(), span: DUMMY_SP }
}
}
@@ -415,6 +407,12 @@ pub struct WhereClause {
pub span: Span,
}
+impl Default for WhereClause {
+ fn default() -> WhereClause {
+ WhereClause { has_where_token: false, predicates: Vec::new(), span: DUMMY_SP }
+ }
+}
+
/// A single predicate in a where-clause.
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum WherePredicate {
@@ -481,20 +479,10 @@ pub struct Crate {
pub is_placeholder: bool,
}
-/// Possible values inside of compile-time attribute lists.
-///
-/// E.g., the '..' in `#[name(..)]`.
-#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
-pub enum NestedMetaItem {
- /// A full MetaItem, for recursive meta items.
- MetaItem(MetaItem),
- /// A literal.
- ///
- /// E.g., `"foo"`, `64`, `true`.
- Literal(Lit),
-}
-
-/// A spanned compile-time attribute item.
+/// A semantic representation of a meta item. A meta item is a slightly
+/// restricted form of an attribute -- it can only contain expressions in
+/// certain leaf positions, rather than arbitrary token streams -- that is used
+/// for most built-in attributes.
///
/// E.g., `#[test]`, `#[derive(..)]`, `#[rustfmt::skip]` or `#[feature = "foo"]`.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
@@ -504,23 +492,37 @@ pub struct MetaItem {
pub span: Span,
}
-/// A compile-time attribute item.
-///
-/// E.g., `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`.
+/// The meta item kind, containing the data after the initial path.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum MetaItemKind {
/// Word meta item.
///
- /// E.g., `test` as in `#[test]`.
+ /// E.g., `#[test]`, which lacks any arguments after `test`.
Word,
+
/// List meta item.
///
- /// E.g., `derive(..)` as in `#[derive(..)]`.
+ /// E.g., `#[derive(..)]`, where the field represents the `..`.
List(Vec<NestedMetaItem>),
+
/// Name value meta item.
///
- /// E.g., `feature = "foo"` as in `#[feature = "foo"]`.
- NameValue(Lit),
+ /// E.g., `#[feature = "foo"]`, where the field represents the `"foo"`.
+ NameValue(MetaItemLit),
+}
+
+/// Values inside meta item lists.
+///
+/// E.g., each of `Clone`, `Copy` in `#[derive(Clone, Copy)]`.
+#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
+pub enum NestedMetaItem {
+ /// A full MetaItem, for recursive meta items.
+ MetaItem(MetaItem),
+
+ /// A literal.
+ ///
+ /// E.g., `"foo"`, `64`, `true`.
+ Lit(MetaItemLit),
}
/// A block (`{ .. }`).
@@ -720,10 +722,10 @@ pub enum PatKind {
/// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`).
/// The `bool` is `true` in the presence of a `..`.
- Struct(Option<QSelf>, Path, Vec<PatField>, /* recovered */ bool),
+ Struct(Option<P<QSelf>>, Path, Vec<PatField>, /* recovered */ bool),
/// A tuple struct/variant pattern (`Variant(x, y, .., z)`).
- TupleStruct(Option<QSelf>, Path, Vec<P<Pat>>),
+ TupleStruct(Option<P<QSelf>>, Path, Vec<P<Pat>>),
/// An or-pattern `A | B | C`.
/// Invariant: `pats.len() >= 2`.
@@ -733,7 +735,7 @@ pub enum PatKind {
/// Unqualified path patterns `A::B::C` can legally refer to variants, structs, constants
/// or associated constants. Qualified path patterns `<A>::B::C`/`<A as Trait>::B::C` can
/// only legally refer to associated constants.
- Path(Option<QSelf>, Path),
+ Path(Option<P<QSelf>>, Path),
/// A tuple pattern (`(a, b)`).
Tuple(Vec<P<Pat>>),
@@ -777,8 +779,9 @@ pub enum PatKind {
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Copy)]
#[derive(HashStable_Generic, Encodable, Decodable)]
pub enum Mutability {
- Mut,
+ // N.B. Order is deliberate, so that Not < Mut
Not,
+ Mut,
}
impl Mutability {
@@ -789,12 +792,39 @@ impl Mutability {
}
}
- pub fn prefix_str(&self) -> &'static str {
+ /// Returns `""` (empty string) or `"mut "` depending on the mutability.
+ pub fn prefix_str(self) -> &'static str {
match self {
Mutability::Mut => "mut ",
Mutability::Not => "",
}
}
+
+ /// Returns `"&"` or `"&mut "` depending on the mutability.
+ pub fn ref_prefix_str(self) -> &'static str {
+ match self {
+ Mutability::Not => "&",
+ Mutability::Mut => "&mut ",
+ }
+ }
+
+ /// Returns `""` (empty string) or `"mutably "` depending on the mutability.
+ pub fn mutably_str(self) -> &'static str {
+ match self {
+ Mutability::Not => "",
+ Mutability::Mut => "mutably ",
+ }
+ }
+
+ /// Return `true` if self is mutable
+ pub fn is_mut(self) -> bool {
+ matches!(self, Self::Mut)
+ }
+
+ /// Return `true` if self is **not** mutable
+ pub fn is_not(self) -> bool {
+ matches!(self, Self::Not)
+ }
}
/// The kind of borrow in an `AddrOf` expression,
@@ -1117,23 +1147,23 @@ impl Expr {
/// If this is not the case, name resolution does not resolve `N` when using
/// `min_const_generics` as more complex expressions are not supported.
pub fn is_potential_trivial_const_param(&self) -> bool {
- let this = if let ExprKind::Block(ref block, None) = self.kind {
- if block.stmts.len() == 1 {
- if let StmtKind::Expr(ref expr) = block.stmts[0].kind { expr } else { self }
- } else {
- self
- }
+ let this = if let ExprKind::Block(block, None) = &self.kind
+ && block.stmts.len() == 1
+ && let StmtKind::Expr(expr) = &block.stmts[0].kind
+ {
+ expr
} else {
self
};
- if let ExprKind::Path(None, ref path) = this.kind {
- if path.segments.len() == 1 && path.segments[0].args.is_none() {
- return true;
- }
+ if let ExprKind::Path(None, path) = &this.kind
+ && path.segments.len() == 1
+ && path.segments[0].args.is_none()
+ {
+ true
+ } else {
+ false
}
-
- false
}
pub fn to_bound(&self) -> Option<GenericBound> {
@@ -1149,7 +1179,7 @@ impl Expr {
pub fn peel_parens(&self) -> &Expr {
let mut expr = self;
while let ExprKind::Paren(inner) = &expr.kind {
- expr = &inner;
+ expr = inner;
}
expr
}
@@ -1208,7 +1238,7 @@ impl Expr {
ExprKind::Tup(_) => ExprPrecedence::Tup,
ExprKind::Binary(op, ..) => ExprPrecedence::Binary(op.node),
ExprKind::Unary(..) => ExprPrecedence::Unary,
- ExprKind::Lit(_) => ExprPrecedence::Lit,
+ ExprKind::Lit(_) | ExprKind::IncludedBytes(..) => ExprPrecedence::Lit,
ExprKind::Type(..) | ExprKind::Cast(..) => ExprPrecedence::Cast,
ExprKind::Let(..) => ExprPrecedence::Let,
ExprKind::If(..) => ExprPrecedence::If,
@@ -1257,7 +1287,7 @@ impl Expr {
)
}
- // To a first-order approximation, is this a pattern
+ /// To a first-order approximation, is this a pattern?
pub fn is_approximately_pattern(&self) -> bool {
match &self.peel_parens().kind {
ExprKind::Box(_)
@@ -1274,6 +1304,20 @@ impl Expr {
}
}
+#[derive(Clone, Encodable, Decodable, Debug)]
+pub struct Closure {
+ pub binder: ClosureBinder,
+ pub capture_clause: CaptureBy,
+ pub asyncness: Async,
+ pub movability: Movability,
+ pub fn_decl: P<FnDecl>,
+ pub body: P<Expr>,
+ /// The span of the declaration block: 'move |...| -> ...'
+ pub fn_decl_span: Span,
+ /// The span of the argument block `|...|`
+ pub fn_arg_span: Span,
+}
+
/// Limit types of a range (inclusive or exclusive)
#[derive(Copy, Clone, PartialEq, Encodable, Decodable, Debug)]
pub enum RangeLimits {
@@ -1283,6 +1327,20 @@ pub enum RangeLimits {
Closed,
}
+/// A method call (e.g. `x.foo::<Bar, Baz>(a, b, c)`).
+#[derive(Clone, Encodable, Decodable, Debug)]
+pub struct MethodCall {
+ /// The method name and its generic arguments, e.g. `foo::<Bar, Baz>`.
+ pub seg: PathSegment,
+ /// The receiver, e.g. `x`.
+ pub receiver: P<Expr>,
+ /// The arguments, e.g. `a, b, c`.
+ pub args: Vec<P<Expr>>,
+ /// The span of the function, without the dot and receiver e.g. `foo::<Bar,
+ /// Baz>(a, b, c)`.
+ pub span: Span,
+}
+
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum StructRest {
/// `..x`.
@@ -1295,7 +1353,7 @@ pub enum StructRest {
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct StructExpr {
- pub qself: Option<QSelf>,
+ pub qself: Option<P<QSelf>>,
pub path: Path,
pub fields: Vec<ExprField>,
pub rest: StructRest,
@@ -1316,17 +1374,8 @@ pub enum ExprKind {
/// This also represents calling the constructor of
/// tuple-like ADTs such as tuple structs and enum variants.
Call(P<Expr>, Vec<P<Expr>>),
- /// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`)
- ///
- /// The `PathSegment` represents the method name and its generic arguments
- /// (within the angle brackets).
- /// The standalone `Expr` is the receiver expression.
- /// The vector of `Expr` is the arguments.
- /// `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
- /// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, x, [a, b, c, d])`.
- /// This `Span` is the span of the function, without the dot and receiver
- /// (e.g. `foo(a, b)` in `x.foo(a, b)`
- MethodCall(PathSegment, P<Expr>, Vec<P<Expr>>, Span),
+ /// A method call (e.g. `x.foo::<Bar, Baz>(a, b, c)`).
+ MethodCall(Box<MethodCall>),
/// A tuple (e.g., `(a, b, c, d)`).
Tup(Vec<P<Expr>>),
/// A binary operation (e.g., `a + b`, `a * b`).
@@ -1334,7 +1383,7 @@ pub enum ExprKind {
/// A unary operation (e.g., `!x`, `*x`).
Unary(UnOp, P<Expr>),
/// A literal (e.g., `1`, `"foo"`).
- Lit(Lit),
+ Lit(token::Lit),
/// A cast (e.g., `foo as f64`).
Cast(P<Expr>, P<Ty>),
/// A type ascription (e.g., `42: usize`).
@@ -1361,13 +1410,11 @@ pub enum ExprKind {
/// Conditionless loop (can be exited with `break`, `continue`, or `return`).
///
/// `'label: loop { block }`
- Loop(P<Block>, Option<Label>),
+ Loop(P<Block>, Option<Label>, Span),
/// A `match` block.
Match(P<Expr>, Vec<Arm>),
/// A closure (e.g., `move |a, b, c| a + b + c`).
- ///
- /// The final span is the span of the argument block `|...|`.
- Closure(ClosureBinder, CaptureBy, Async, Movability, P<FnDecl>, P<Expr>, Span),
+ Closure(Box<Closure>),
/// A block (`'label: { ... }`).
Block(P<Block>, Option<Label>),
/// An async block (`async move { ... }`).
@@ -1405,7 +1452,7 @@ pub enum ExprKind {
/// parameters (e.g., `foo::bar::<baz>`).
///
/// Optionally "qualified" (e.g., `<Vec<T> as SomeTrait>::SomeType`).
- Path(Option<QSelf>, Path),
+ Path(Option<P<QSelf>>, Path),
/// A referencing operation (`&a`, `&mut a`, `&raw const a` or `&raw mut a`).
AddrOf(BorrowKind, Mutability, P<Expr>),
@@ -1446,6 +1493,12 @@ pub enum ExprKind {
/// with an optional value to be returned.
Yeet(Option<P<Expr>>),
+ /// Bytes included via `include_bytes!`
+ /// Added for optimization purposes to avoid the need to escape
+ /// large binary blobs - should always behave like [`ExprKind::Lit`]
+ /// with a `ByteStr` literal.
+ IncludedBytes(Lrc<[u8]>),
+
/// Placeholder for an expression that wasn't syntactically well formed in some way.
Err,
}
@@ -1525,55 +1578,48 @@ pub enum ClosureBinder {
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct MacCall {
pub path: Path,
- pub args: P<MacArgs>,
+ pub args: P<DelimArgs>,
pub prior_type_ascription: Option<(Span, bool)>,
}
impl MacCall {
pub fn span(&self) -> Span {
- self.path.span.to(self.args.span().unwrap_or(self.path.span))
+ self.path.span.to(self.args.dspan.entire())
}
}
-/// Arguments passed to an attribute or a function-like macro.
+/// Arguments passed to an attribute macro.
#[derive(Clone, Encodable, Decodable, Debug)]
-pub enum MacArgs {
- /// No arguments - `#[attr]`.
+pub enum AttrArgs {
+ /// No arguments: `#[attr]`.
Empty,
- /// Delimited arguments - `#[attr()/[]/{}]` or `mac!()/[]/{}`.
- Delimited(DelimSpan, MacDelimiter, TokenStream),
- /// Arguments of a key-value attribute - `#[attr = "value"]`.
+ /// Delimited arguments: `#[attr()/[]/{}]`.
+ Delimited(DelimArgs),
+ /// Arguments of a key-value attribute: `#[attr = "value"]`.
Eq(
/// Span of the `=` token.
Span,
/// The "value".
- MacArgsEq,
+ AttrArgsEq,
),
}
-// The RHS of a `MacArgs::Eq` starts out as an expression. Once macro expansion
-// is completed, all cases end up either as a literal, which is the form used
-// after lowering to HIR, or as an error.
+// The RHS of an `AttrArgs::Eq` starts out as an expression. Once macro
+// expansion is completed, all cases end up either as a meta item literal,
+// which is the form used after lowering to HIR, or as an error.
#[derive(Clone, Encodable, Decodable, Debug)]
-pub enum MacArgsEq {
+pub enum AttrArgsEq {
Ast(P<Expr>),
- Hir(Lit),
+ Hir(MetaItemLit),
}
-impl MacArgs {
- pub fn delim(&self) -> Option<Delimiter> {
- match self {
- MacArgs::Delimited(_, delim, _) => Some(delim.to_token()),
- MacArgs::Empty | MacArgs::Eq(..) => None,
- }
- }
-
+impl AttrArgs {
pub fn span(&self) -> Option<Span> {
match self {
- MacArgs::Empty => None,
- MacArgs::Delimited(dspan, ..) => Some(dspan.entire()),
- MacArgs::Eq(eq_span, MacArgsEq::Ast(expr)) => Some(eq_span.to(expr.span)),
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => {
+ AttrArgs::Empty => None,
+ AttrArgs::Delimited(args) => Some(args.dspan.entire()),
+ AttrArgs::Eq(eq_span, AttrArgsEq::Ast(expr)) => Some(eq_span.to(expr.span)),
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => {
unreachable!("in literal form when getting span: {:?}", lit);
}
}
@@ -1583,39 +1629,29 @@ impl MacArgs {
/// Proc macros see these tokens, for example.
pub fn inner_tokens(&self) -> TokenStream {
match self {
- MacArgs::Empty => TokenStream::default(),
- MacArgs::Delimited(.., tokens) => tokens.clone(),
- MacArgs::Eq(_, MacArgsEq::Ast(expr)) => TokenStream::from_ast(expr),
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => {
+ AttrArgs::Empty => TokenStream::default(),
+ AttrArgs::Delimited(args) => args.tokens.clone(),
+ AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => TokenStream::from_ast(expr),
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => {
unreachable!("in literal form when getting inner tokens: {:?}", lit)
}
}
}
-
- /// Whether a macro with these arguments needs a semicolon
- /// when used as a standalone item or statement.
- pub fn need_semicolon(&self) -> bool {
- !matches!(self, MacArgs::Delimited(_, MacDelimiter::Brace, _))
- }
}
-impl<CTX> HashStable<CTX> for MacArgs
+impl<CTX> HashStable<CTX> for AttrArgs
where
CTX: crate::HashStableContext,
{
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
mem::discriminant(self).hash_stable(ctx, hasher);
match self {
- MacArgs::Empty => {}
- MacArgs::Delimited(dspan, delim, tokens) => {
- dspan.hash_stable(ctx, hasher);
- delim.hash_stable(ctx, hasher);
- tokens.hash_stable(ctx, hasher);
- }
- MacArgs::Eq(_eq_span, MacArgsEq::Ast(expr)) => {
+ AttrArgs::Empty => {}
+ AttrArgs::Delimited(args) => args.hash_stable(ctx, hasher),
+ AttrArgs::Eq(_eq_span, AttrArgsEq::Ast(expr)) => {
unreachable!("hash_stable {:?}", expr);
}
- MacArgs::Eq(eq_span, MacArgsEq::Hir(lit)) => {
+ AttrArgs::Eq(eq_span, AttrArgsEq::Hir(lit)) => {
eq_span.hash_stable(ctx, hasher);
lit.hash_stable(ctx, hasher);
}
@@ -1623,6 +1659,34 @@ where
}
}
+/// Delimited arguments, as used in `#[attr()/[]/{}]` or `mac!()/[]/{}`.
+#[derive(Clone, Encodable, Decodable, Debug)]
+pub struct DelimArgs {
+ pub dspan: DelimSpan,
+ pub delim: MacDelimiter,
+ pub tokens: TokenStream,
+}
+
+impl DelimArgs {
+ /// Whether a macro with these arguments needs a semicolon
+ /// when used as a standalone item or statement.
+ pub fn need_semicolon(&self) -> bool {
+ !matches!(self, DelimArgs { delim: MacDelimiter::Brace, .. })
+ }
+}
+
+impl<CTX> HashStable<CTX> for DelimArgs
+where
+ CTX: crate::HashStableContext,
+{
+ fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
+ let DelimArgs { dspan, delim, tokens } = self;
+ dspan.hash_stable(ctx, hasher);
+ delim.hash_stable(ctx, hasher);
+ tokens.hash_stable(ctx, hasher);
+ }
+}
+
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum MacDelimiter {
Parenthesis,
@@ -1652,7 +1716,7 @@ impl MacDelimiter {
/// Represents a macro definition.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct MacroDef {
- pub body: P<MacArgs>,
+ pub body: P<DelimArgs>,
/// `true` if macro was defined with `macro_rules`.
pub macro_rules: bool,
}
@@ -1668,19 +1732,18 @@ pub enum StrStyle {
Raw(u8),
}
-/// An AST literal.
+/// A literal in a meta item.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
-pub struct Lit {
+pub struct MetaItemLit {
/// The original literal token as written in source code.
pub token_lit: token::Lit,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
- /// FIXME: Remove this and only create the semantic representation during lowering to HIR.
pub kind: LitKind,
pub span: Span,
}
-/// Same as `Lit`, but restricted to string literals.
+/// Similar to `MetaItemLit`, but restricted to string literals.
#[derive(Clone, Copy, Encodable, Decodable, Debug)]
pub struct StrLit {
/// The original literal token as written in source code.
@@ -1689,21 +1752,16 @@ pub struct StrLit {
pub suffix: Option<Symbol>,
pub span: Span,
/// The unescaped "semantic" representation of the literal lowered from the original token.
- /// FIXME: Remove this and only create the semantic representation during lowering to HIR.
pub symbol_unescaped: Symbol,
}
impl StrLit {
- pub fn as_lit(&self) -> Lit {
+ pub fn as_token_lit(&self) -> token::Lit {
let token_kind = match self.style {
StrStyle::Cooked => token::Str,
StrStyle::Raw(n) => token::StrRaw(n),
};
- Lit {
- token_lit: token::Lit::new(token_kind, self.symbol, self.suffix),
- span: self.span,
- kind: LitKind::Str(self.symbol_unescaped, self.style),
- }
+ token::Lit::new(token_kind, self.symbol, self.suffix)
}
}
@@ -1729,9 +1787,12 @@ pub enum LitFloatType {
Unsuffixed,
}
-/// Literal kind.
+/// This type is used within both `ast::MetaItemLit` and `hir::Lit`.
///
-/// E.g., `"foo"`, `42`, `12.34`, or `bool`.
+/// Note that the entire literal (including the suffix) is considered when
+/// deciding the `LitKind`. This means that float literals like `1f32` are
+/// classified by this type as `Float`. This is different to `token::LitKind`
+/// which does *not* consider the suffix.
#[derive(Clone, Encodable, Decodable, Debug, Hash, Eq, PartialEq, HashStable_Generic)]
pub enum LitKind {
/// A string literal (`"foo"`). The symbol is unescaped, and so may differ
@@ -1745,10 +1806,11 @@ pub enum LitKind {
Char(char),
/// An integer literal (`1`).
Int(u128, LitIntType),
- /// A float literal (`1f64` or `1E10f64`). Stored as a symbol rather than
- /// `f64` so that `LitKind` can impl `Eq` and `Hash`.
+ /// A float literal (`1.0`, `1f64` or `1E10f64`). The pre-suffix part is
+ /// stored as a symbol rather than `f64` so that `LitKind` can impl `Eq`
+ /// and `Hash`.
Float(Symbol, LitFloatType),
- /// A boolean literal.
+ /// A boolean literal (`true`, `false`).
Bool(bool),
/// Placeholder for a literal that wasn't well-formed in some way.
Err,
@@ -1967,7 +2029,7 @@ impl Ty {
pub fn peel_refs(&self) -> &Self {
let mut final_ty = self;
while let TyKind::Rptr(_, MutTy { ty, .. }) = &final_ty.kind {
- final_ty = &ty;
+ final_ty = ty;
}
final_ty
}
@@ -2004,7 +2066,7 @@ pub enum TyKind {
/// "qualified", e.g., `<Vec<T> as SomeTrait>::SomeType`.
///
/// Type parameters are stored in the `Path` itself.
- Path(Option<QSelf>, Path),
+ Path(Option<P<QSelf>>, Path),
/// A trait object type `Bound1 + Bound2 + Bound3`
/// where `Bound` is a trait or a lifetime.
TraitObject(GenericBounds, TraitObjectSyntax),
@@ -2136,7 +2198,7 @@ impl InlineAsmTemplatePiece {
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct InlineAsmSym {
pub id: NodeId,
- pub qself: Option<QSelf>,
+ pub qself: Option<P<QSelf>>,
pub path: Path,
}
@@ -2376,9 +2438,9 @@ pub enum FnRetTy {
impl FnRetTy {
pub fn span(&self) -> Span {
- match *self {
- FnRetTy::Default(span) => span,
- FnRetTy::Ty(ref ty) => ty.span,
+ match self {
+ &FnRetTy::Default(span) => span,
+ FnRetTy::Ty(ty) => ty.span,
}
}
}
@@ -2457,10 +2519,7 @@ pub struct Variant {
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum UseTreeKind {
/// `use prefix` or `use prefix as rename`
- ///
- /// The extra `NodeId`s are for HIR lowering, when additional statements are created for each
- /// namespace.
- Simple(Option<Ident>, NodeId, NodeId),
+ Simple(Option<Ident>),
/// `use prefix::{...}`
Nested(Vec<(UseTree, NodeId)>),
/// `use prefix::*`
@@ -2479,8 +2538,8 @@ pub struct UseTree {
impl UseTree {
pub fn ident(&self) -> Ident {
match self.kind {
- UseTreeKind::Simple(Some(rename), ..) => rename,
- UseTreeKind::Simple(None, ..) => {
+ UseTreeKind::Simple(Some(rename)) => rename,
+ UseTreeKind::Simple(None) => {
self.prefix.segments.last().expect("empty prefix in a simple import").ident
}
_ => panic!("`UseTree::ident` can only be used on a simple import"),
@@ -2514,17 +2573,10 @@ impl<D: Decoder> Decodable<D> for AttrId {
}
}
-#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
-pub struct AttrItem {
- pub path: Path,
- pub args: MacArgs,
- pub tokens: Option<LazyAttrTokenStream>,
-}
-
/// A list of attributes.
pub type AttrVec = ThinVec<Attribute>;
-/// Metadata associated with an item.
+/// A syntax-level representation of an attribute.
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct Attribute {
pub kind: AttrKind,
@@ -2536,12 +2588,6 @@ pub struct Attribute {
}
#[derive(Clone, Encodable, Decodable, Debug)]
-pub struct NormalAttr {
- pub item: AttrItem,
- pub tokens: Option<LazyAttrTokenStream>,
-}
-
-#[derive(Clone, Encodable, Decodable, Debug)]
pub enum AttrKind {
/// A normal attribute.
Normal(P<NormalAttr>),
@@ -2552,6 +2598,19 @@ pub enum AttrKind {
DocComment(CommentKind, Symbol),
}
+#[derive(Clone, Encodable, Decodable, Debug)]
+pub struct NormalAttr {
+ pub item: AttrItem,
+ pub tokens: Option<LazyAttrTokenStream>,
+}
+
+#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
+pub struct AttrItem {
+ pub path: Path,
+ pub args: AttrArgs,
+ pub tokens: Option<LazyAttrTokenStream>,
+}
+
/// `TraitRef`s appear in impls.
///
/// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all
@@ -2640,14 +2699,14 @@ pub enum VariantData {
impl VariantData {
/// Return the fields of this variant.
pub fn fields(&self) -> &[FieldDef] {
- match *self {
- VariantData::Struct(ref fields, ..) | VariantData::Tuple(ref fields, _) => fields,
+ match self {
+ VariantData::Struct(fields, ..) | VariantData::Tuple(fields, _) => fields,
_ => &[],
}
}
/// Return the `NodeId` of this variant's constructor, if it has one.
- pub fn ctor_id(&self) -> Option<NodeId> {
+ pub fn ctor_node_id(&self) -> Option<NodeId> {
match *self {
VariantData::Struct(..) => None,
VariantData::Tuple(_, id) | VariantData::Unit(id) => Some(id),
@@ -3029,28 +3088,28 @@ mod size_asserts {
static_assert_size!(AssocItemKind, 32);
static_assert_size!(Attribute, 32);
static_assert_size!(Block, 48);
- static_assert_size!(Expr, 104);
- static_assert_size!(ExprKind, 72);
+ static_assert_size!(Expr, 72);
+ static_assert_size!(ExprKind, 40);
static_assert_size!(Fn, 184);
static_assert_size!(ForeignItem, 96);
static_assert_size!(ForeignItemKind, 24);
static_assert_size!(GenericArg, 24);
- static_assert_size!(GenericBound, 88);
+ static_assert_size!(GenericBound, 72);
static_assert_size!(Generics, 72);
- static_assert_size!(Impl, 200);
+ static_assert_size!(Impl, 184);
static_assert_size!(Item, 184);
static_assert_size!(ItemKind, 112);
- static_assert_size!(Lit, 48);
static_assert_size!(LitKind, 24);
static_assert_size!(Local, 72);
+ static_assert_size!(MetaItemLit, 48);
static_assert_size!(Param, 40);
- static_assert_size!(Pat, 120);
- static_assert_size!(Path, 40);
+ static_assert_size!(Pat, 88);
+ static_assert_size!(Path, 24);
static_assert_size!(PathSegment, 24);
- static_assert_size!(PatKind, 96);
+ static_assert_size!(PatKind, 64);
static_assert_size!(Stmt, 32);
static_assert_size!(StmtKind, 16);
- static_assert_size!(Ty, 96);
- static_assert_size!(TyKind, 72);
+ static_assert_size!(Ty, 64);
+ static_assert_size!(TyKind, 40);
// tidy-alphabetical-end
}
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index 990f4f8f1..057cc26b5 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -1,35 +1,33 @@
//! Functions dealing with attributes and meta items.
use crate::ast;
-use crate::ast::{AttrId, AttrItem, AttrKind, AttrStyle, Attribute};
-use crate::ast::{Lit, LitKind};
-use crate::ast::{MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
-use crate::ast::{Path, PathSegment};
+use crate::ast::{AttrArgs, AttrArgsEq, AttrId, AttrItem, AttrKind, AttrStyle, AttrVec, Attribute};
+use crate::ast::{DelimArgs, Expr, ExprKind, LitKind, MetaItemLit};
+use crate::ast::{MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem, NormalAttr};
+use crate::ast::{Path, PathSegment, StrStyle, DUMMY_NODE_ID};
use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
use crate::tokenstream::{LazyAttrTokenStream, TokenStream};
use crate::util::comments;
-
use rustc_data_structures::sync::WorkerLocal;
use rustc_index::bit_set::GrowableBitSet;
-use rustc_span::source_map::BytePos;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
-
use std::cell::Cell;
use std::iter;
#[cfg(debug_assertions)]
use std::ops::BitXor;
#[cfg(debug_assertions)]
use std::sync::atomic::{AtomicU32, Ordering};
+use thin_vec::thin_vec;
pub struct MarkedAttrs(GrowableBitSet<AttrId>);
impl MarkedAttrs {
- // We have no idea how many attributes there will be, so just
- // initiate the vectors with 0 bits. We'll grow them as necessary.
pub fn new() -> Self {
+ // We have no idea how many attributes there will be, so just
+ // initiate the vectors with 0 bits. We'll grow them as necessary.
MarkedAttrs(GrowableBitSet::new_empty())
}
@@ -45,16 +43,16 @@ impl MarkedAttrs {
impl NestedMetaItem {
/// Returns the `MetaItem` if `self` is a `NestedMetaItem::MetaItem`.
pub fn meta_item(&self) -> Option<&MetaItem> {
- match *self {
- NestedMetaItem::MetaItem(ref item) => Some(item),
+ match self {
+ NestedMetaItem::MetaItem(item) => Some(item),
_ => None,
}
}
- /// Returns the `Lit` if `self` is a `NestedMetaItem::Literal`s.
- pub fn literal(&self) -> Option<&Lit> {
- match *self {
- NestedMetaItem::Literal(ref lit) => Some(lit),
+ /// Returns the `MetaItemLit` if `self` is a `NestedMetaItem::Literal`s.
+ pub fn lit(&self) -> Option<&MetaItemLit> {
+ match self {
+ NestedMetaItem::Lit(lit) => Some(lit),
_ => None,
}
}
@@ -79,12 +77,12 @@ impl NestedMetaItem {
}
/// Returns a name and single literal value tuple of the `MetaItem`.
- pub fn name_value_literal(&self) -> Option<(Symbol, &Lit)> {
+ pub fn name_value_literal(&self) -> Option<(Symbol, &MetaItemLit)> {
self.meta_item().and_then(|meta_item| {
meta_item.meta_item_list().and_then(|meta_item_list| {
if meta_item_list.len() == 1
&& let Some(ident) = meta_item.ident()
- && let Some(lit) = meta_item_list[0].literal()
+ && let Some(lit) = meta_item_list[0].lit()
{
return Some((ident.name, lit));
}
@@ -117,18 +115,18 @@ impl NestedMetaItem {
impl Attribute {
#[inline]
pub fn has_name(&self, name: Symbol) -> bool {
- match self.kind {
- AttrKind::Normal(ref normal) => normal.item.path == name,
+ match &self.kind {
+ AttrKind::Normal(normal) => normal.item.path == name,
AttrKind::DocComment(..) => false,
}
}
/// For a single-segment attribute, returns its name; otherwise, returns `None`.
pub fn ident(&self) -> Option<Ident> {
- match self.kind {
- AttrKind::Normal(ref normal) => {
- if normal.item.path.segments.len() == 1 {
- Some(normal.item.path.segments[0].ident)
+ match &self.kind {
+ AttrKind::Normal(normal) => {
+ if let [ident] = &*normal.item.path.segments {
+ Some(ident.ident)
} else {
None
}
@@ -141,17 +139,15 @@ impl Attribute {
}
pub fn value_str(&self) -> Option<Symbol> {
- match self.kind {
- AttrKind::Normal(ref normal) => {
- normal.item.meta_kind().and_then(|kind| kind.value_str())
- }
+ match &self.kind {
+ AttrKind::Normal(normal) => normal.item.meta_kind().and_then(|kind| kind.value_str()),
AttrKind::DocComment(..) => None,
}
}
pub fn meta_item_list(&self) -> Option<Vec<NestedMetaItem>> {
- match self.kind {
- AttrKind::Normal(ref normal) => match normal.item.meta_kind() {
+ match &self.kind {
+ AttrKind::Normal(normal) => match normal.item.meta_kind() {
Some(MetaItemKind::List(list)) => Some(list),
_ => None,
},
@@ -161,7 +157,7 @@ impl Attribute {
pub fn is_word(&self) -> bool {
if let AttrKind::Normal(normal) = &self.kind {
- matches!(normal.item.args, MacArgs::Empty)
+ matches!(normal.item.args, AttrArgs::Empty)
} else {
false
}
@@ -177,10 +173,12 @@ impl MetaItem {
self.ident().unwrap_or_else(Ident::empty).name
}
- // Example:
- // #[attribute(name = "value")]
- // ^^^^^^^^^^^^^^
- pub fn name_value_literal(&self) -> Option<&Lit> {
+ /// ```text
+ /// Example:
+ /// #[attribute(name = "value")]
+ /// ^^^^^^^^^^^^^^
+ /// ```
+ pub fn name_value_literal(&self) -> Option<&MetaItemLit> {
match &self.kind {
MetaItemKind::NameValue(v) => Some(v),
_ => None,
@@ -192,8 +190,8 @@ impl MetaItem {
}
pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> {
- match self.kind {
- MetaItemKind::List(ref l) => Some(&l[..]),
+ match &self.kind {
+ MetaItemKind::List(l) => Some(&**l),
_ => None,
}
}
@@ -224,15 +222,11 @@ impl AttrItem {
}
pub fn meta(&self, span: Span) -> Option<MetaItem> {
- Some(MetaItem {
- path: self.path.clone(),
- kind: MetaItemKind::from_mac_args(&self.args)?,
- span,
- })
+ Some(MetaItem { path: self.path.clone(), kind: self.meta_kind()?, span })
}
pub fn meta_kind(&self) -> Option<MetaItemKind> {
- MetaItemKind::from_mac_args(&self.args)
+ MetaItemKind::from_attr_args(&self.args)
}
}
@@ -269,9 +263,9 @@ impl Attribute {
/// * `#[doc = "doc"]` returns `Some("doc")`.
/// * `#[doc(...)]` returns `None`.
pub fn doc_str(&self) -> Option<Symbol> {
- match self.kind {
- AttrKind::DocComment(.., data) => Some(data),
- AttrKind::Normal(ref normal) if normal.item.path == sym::doc => {
+ match &self.kind {
+ AttrKind::DocComment(.., data) => Some(*data),
+ AttrKind::Normal(normal) if normal.item.path == sym::doc => {
normal.item.meta_kind().and_then(|kind| kind.value_str())
}
_ => None,
@@ -283,8 +277,8 @@ impl Attribute {
}
pub fn get_normal_item(&self) -> &AttrItem {
- match self.kind {
- AttrKind::Normal(ref normal) => &normal.item,
+ match &self.kind {
+ AttrKind::Normal(normal) => &normal.item,
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
}
}
@@ -298,28 +292,28 @@ impl Attribute {
/// Extracts the MetaItem from inside this Attribute.
pub fn meta(&self) -> Option<MetaItem> {
- match self.kind {
- AttrKind::Normal(ref normal) => normal.item.meta(self.span),
+ match &self.kind {
+ AttrKind::Normal(normal) => normal.item.meta(self.span),
AttrKind::DocComment(..) => None,
}
}
pub fn meta_kind(&self) -> Option<MetaItemKind> {
- match self.kind {
- AttrKind::Normal(ref normal) => normal.item.meta_kind(),
+ match &self.kind {
+ AttrKind::Normal(normal) => normal.item.meta_kind(),
AttrKind::DocComment(..) => None,
}
}
pub fn tokens(&self) -> TokenStream {
- match self.kind {
- AttrKind::Normal(ref normal) => normal
+ match &self.kind {
+ AttrKind::Normal(normal) => normal
.tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.to_attr_token_stream()
.to_tokenstream(),
- AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
+ &AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
Spacing::Alone,
)]),
@@ -330,26 +324,13 @@ impl Attribute {
/* Constructors */
pub fn mk_name_value_item_str(ident: Ident, str: Symbol, str_span: Span) -> MetaItem {
- let lit_kind = LitKind::Str(str, ast::StrStyle::Cooked);
- mk_name_value_item(ident, lit_kind, str_span)
+ mk_name_value_item(ident, LitKind::Str(str, ast::StrStyle::Cooked), str_span)
}
-pub fn mk_name_value_item(ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
- let lit = Lit::from_lit_kind(lit_kind, lit_span);
+pub fn mk_name_value_item(ident: Ident, kind: LitKind, lit_span: Span) -> MetaItem {
+ let lit = MetaItemLit { token_lit: kind.to_token_lit(), kind, span: lit_span };
let span = ident.span.to(lit_span);
- MetaItem { path: Path::from_ident(ident), span, kind: MetaItemKind::NameValue(lit) }
-}
-
-pub fn mk_list_item(ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
- MetaItem { path: Path::from_ident(ident), span: ident.span, kind: MetaItemKind::List(items) }
-}
-
-pub fn mk_word_item(ident: Ident) -> MetaItem {
- MetaItem { path: Path::from_ident(ident), span: ident.span, kind: MetaItemKind::Word }
-}
-
-pub fn mk_nested_word_item(ident: Ident) -> NestedMetaItem {
- NestedMetaItem::MetaItem(mk_word_item(ident))
+ MetaItem { path: Path::from_ident(ident), kind: MetaItemKind::NameValue(lit), span }
}
pub struct AttrIdGenerator(WorkerLocal<Cell<u32>>);
@@ -393,7 +374,7 @@ pub fn mk_attr(
g: &AttrIdGenerator,
style: AttrStyle,
path: Path,
- args: MacArgs,
+ args: AttrArgs,
span: Span,
) -> Attribute {
mk_attr_from_item(g, AttrItem { path, args, tokens: None }, None, style, span)
@@ -407,21 +388,58 @@ pub fn mk_attr_from_item(
span: Span,
) -> Attribute {
Attribute {
- kind: AttrKind::Normal(P(ast::NormalAttr { item, tokens })),
+ kind: AttrKind::Normal(P(NormalAttr { item, tokens })),
id: g.mk_attr_id(),
style,
span,
}
}
-/// Returns an inner attribute with the given value and span.
-pub fn mk_attr_inner(g: &AttrIdGenerator, item: MetaItem) -> Attribute {
- mk_attr(g, AttrStyle::Inner, item.path, item.kind.mac_args(item.span), item.span)
+pub fn mk_attr_word(g: &AttrIdGenerator, style: AttrStyle, name: Symbol, span: Span) -> Attribute {
+ let path = Path::from_ident(Ident::new(name, span));
+ let args = AttrArgs::Empty;
+ mk_attr(g, style, path, args, span)
+}
+
+pub fn mk_attr_name_value_str(
+ g: &AttrIdGenerator,
+ style: AttrStyle,
+ name: Symbol,
+ val: Symbol,
+ span: Span,
+) -> Attribute {
+ let lit = LitKind::Str(val, StrStyle::Cooked).to_token_lit();
+ let expr = P(Expr {
+ id: DUMMY_NODE_ID,
+ kind: ExprKind::Lit(lit),
+ span,
+ attrs: AttrVec::new(),
+ tokens: None,
+ });
+ let path = Path::from_ident(Ident::new(name, span));
+ let args = AttrArgs::Eq(span, AttrArgsEq::Ast(expr));
+ mk_attr(g, style, path, args, span)
}
-/// Returns an outer attribute with the given value and span.
-pub fn mk_attr_outer(g: &AttrIdGenerator, item: MetaItem) -> Attribute {
- mk_attr(g, AttrStyle::Outer, item.path, item.kind.mac_args(item.span), item.span)
+pub fn mk_attr_nested_word(
+ g: &AttrIdGenerator,
+ style: AttrStyle,
+ outer: Symbol,
+ inner: Symbol,
+ span: Span,
+) -> Attribute {
+ let inner_tokens = TokenStream::new(vec![TokenTree::Token(
+ Token::from_ast_ident(Ident::new(inner, span)),
+ Spacing::Alone,
+ )]);
+ let outer_ident = Ident::new(outer, span);
+ let path = Path::from_ident(outer_ident);
+ let attr_args = AttrArgs::Delimited(DelimArgs {
+ dspan: DelimSpan::from_single(span),
+ delim: MacDelimiter::Parenthesis,
+ tokens: inner_tokens,
+ });
+ mk_attr(g, style, path, attr_args, span)
}
pub fn mk_doc_comment(
@@ -439,23 +457,6 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
}
impl MetaItem {
- fn token_trees(&self) -> Vec<TokenTree> {
- let mut idents = vec![];
- let mut last_pos = BytePos(0_u32);
- for (i, segment) in self.path.segments.iter().enumerate() {
- let is_first = i == 0;
- if !is_first {
- let mod_sep_span =
- Span::new(last_pos, segment.ident.span.lo(), segment.ident.span.ctxt(), None);
- idents.push(TokenTree::token_alone(token::ModSep, mod_sep_span));
- }
- idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident), Spacing::Alone));
- last_pos = segment.ident.span.hi();
- }
- idents.extend(self.kind.token_trees(self.span));
- idents
- }
-
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
where
I: Iterator<Item = TokenTree>,
@@ -471,12 +472,12 @@ impl MetaItem {
tokens.peek()
{
tokens.next();
- vec![PathSegment::from_ident(Ident::new(name, span))]
+ thin_vec![PathSegment::from_ident(Ident::new(name, span))]
} else {
break 'arm Path::from_ident(Ident::new(name, span));
}
} else {
- vec![PathSegment::path_root(span)]
+ thin_vec![PathSegment::path_root(span)]
};
loop {
if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
@@ -497,17 +498,17 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments, tokens: None }
}
- Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match *nt {
- token::Nonterminal::NtMeta(ref item) => return item.meta(item.path.span),
- token::Nonterminal::NtPath(ref path) => (**path).clone(),
+ Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &*nt {
+ token::Nonterminal::NtMeta(item) => return item.meta(item.path.span),
+ token::Nonterminal::NtPath(path) => (**path).clone(),
_ => return None,
},
_ => return None,
};
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
let kind = MetaItemKind::from_tokens(tokens)?;
- let hi = match kind {
- MetaItemKind::NameValue(ref lit) => lit.span.hi(),
+ let hi = match &kind {
+ MetaItemKind::NameValue(lit) => lit.span.hi(),
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()),
_ => path.span.hi(),
};
@@ -519,70 +520,14 @@ impl MetaItem {
impl MetaItemKind {
pub fn value_str(&self) -> Option<Symbol> {
match self {
- MetaItemKind::NameValue(ref v) => match v.kind {
- LitKind::Str(ref s, _) => Some(*s),
+ MetaItemKind::NameValue(v) => match v.kind {
+ LitKind::Str(s, _) => Some(s),
_ => None,
},
_ => None,
}
}
- pub fn mac_args(&self, span: Span) -> MacArgs {
- match self {
- MetaItemKind::Word => MacArgs::Empty,
- MetaItemKind::NameValue(lit) => {
- let expr = P(ast::Expr {
- id: ast::DUMMY_NODE_ID,
- kind: ast::ExprKind::Lit(lit.clone()),
- span: lit.span,
- attrs: ast::AttrVec::new(),
- tokens: None,
- });
- MacArgs::Eq(span, MacArgsEq::Ast(expr))
- }
- MetaItemKind::List(list) => {
- let mut tts = Vec::new();
- for (i, item) in list.iter().enumerate() {
- if i > 0 {
- tts.push(TokenTree::token_alone(token::Comma, span));
- }
- tts.extend(item.token_trees())
- }
- MacArgs::Delimited(
- DelimSpan::from_single(span),
- MacDelimiter::Parenthesis,
- TokenStream::new(tts),
- )
- }
- }
- }
-
- fn token_trees(&self, span: Span) -> Vec<TokenTree> {
- match *self {
- MetaItemKind::Word => vec![],
- MetaItemKind::NameValue(ref lit) => {
- vec![
- TokenTree::token_alone(token::Eq, span),
- TokenTree::Token(lit.to_token(), Spacing::Alone),
- ]
- }
- MetaItemKind::List(ref list) => {
- let mut tokens = Vec::new();
- for (i, item) in list.iter().enumerate() {
- if i > 0 {
- tokens.push(TokenTree::token_alone(token::Comma, span));
- }
- tokens.extend(item.token_trees())
- }
- vec![TokenTree::Delimited(
- DelimSpan::from_single(span),
- Delimiter::Parenthesis,
- TokenStream::new(tokens),
- )]
- }
- }
- }
-
fn list_from_tokens(tokens: TokenStream) -> Option<MetaItemKind> {
let mut tokens = tokens.into_trees().peekable();
let mut result = Vec::new();
@@ -605,24 +550,31 @@ impl MetaItemKind {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
}
Some(TokenTree::Token(token, _)) => {
- Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
+ MetaItemLit::from_token(&token).map(MetaItemKind::NameValue)
}
_ => None,
}
}
- fn from_mac_args(args: &MacArgs) -> Option<MetaItemKind> {
+ fn from_attr_args(args: &AttrArgs) -> Option<MetaItemKind> {
match args {
- MacArgs::Empty => Some(MetaItemKind::Word),
- MacArgs::Delimited(_, MacDelimiter::Parenthesis, tokens) => {
- MetaItemKind::list_from_tokens(tokens.clone())
- }
- MacArgs::Delimited(..) => None,
- MacArgs::Eq(_, MacArgsEq::Ast(expr)) => match &expr.kind {
- ast::ExprKind::Lit(lit) => Some(MetaItemKind::NameValue(lit.clone())),
+ AttrArgs::Empty => Some(MetaItemKind::Word),
+ AttrArgs::Delimited(DelimArgs {
+ dspan: _,
+ delim: MacDelimiter::Parenthesis,
+ tokens,
+ }) => MetaItemKind::list_from_tokens(tokens.clone()),
+ AttrArgs::Delimited(..) => None,
+ AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => match expr.kind {
+ ExprKind::Lit(token_lit) => {
+ // Turn failures to `None`, we'll get parse errors elsewhere.
+ MetaItemLit::from_token_lit(token_lit, expr.span)
+ .ok()
+ .map(|lit| MetaItemKind::NameValue(lit))
+ }
_ => None,
},
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => Some(MetaItemKind::NameValue(lit.clone())),
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => Some(MetaItemKind::NameValue(lit.clone())),
}
}
@@ -647,18 +599,9 @@ impl MetaItemKind {
impl NestedMetaItem {
pub fn span(&self) -> Span {
- match *self {
- NestedMetaItem::MetaItem(ref item) => item.span,
- NestedMetaItem::Literal(ref lit) => lit.span,
- }
- }
-
- fn token_trees(&self) -> Vec<TokenTree> {
- match *self {
- NestedMetaItem::MetaItem(ref item) => item.token_trees(),
- NestedMetaItem::Literal(ref lit) => {
- vec![TokenTree::Token(lit.to_token(), Spacing::Alone)]
- }
+ match self {
+ NestedMetaItem::MetaItem(item) => item.span,
+ NestedMetaItem::Lit(lit) => lit.span,
}
}
@@ -668,10 +611,10 @@ impl NestedMetaItem {
{
match tokens.peek() {
Some(TokenTree::Token(token, _))
- if let Ok(lit) = Lit::from_token(token) =>
+ if let Some(lit) = MetaItemLit::from_token(token) =>
{
tokens.next();
- return Some(NestedMetaItem::Literal(lit));
+ return Some(NestedMetaItem::Lit(lit));
}
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
let inner_tokens = inner_tokens.clone();
diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs
index eeb7e56e2..9c1dfeb1a 100644
--- a/compiler/rustc_ast/src/lib.rs
+++ b/compiler/rustc_ast/src/lib.rs
@@ -29,6 +29,7 @@ extern crate rustc_macros;
extern crate tracing;
pub mod util {
+ pub mod case;
pub mod classify;
pub mod comments;
pub mod literal;
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index b970e57e0..a45ee6067 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -194,7 +194,7 @@ pub trait MutVisitor: Sized {
noop_visit_path(p, self);
}
- fn visit_qself(&mut self, qs: &mut Option<QSelf>) {
+ fn visit_qself(&mut self, qs: &mut Option<P<QSelf>>) {
noop_visit_qself(qs, self);
}
@@ -367,23 +367,27 @@ pub fn visit_fn_sig<T: MutVisitor>(FnSig { header, decl, span }: &mut FnSig, vis
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
-pub fn visit_mac_args<T: MutVisitor>(args: &mut MacArgs, vis: &mut T) {
+pub fn visit_attr_args<T: MutVisitor>(args: &mut AttrArgs, vis: &mut T) {
match args {
- MacArgs::Empty => {}
- MacArgs::Delimited(dspan, _delim, tokens) => {
- visit_delim_span(dspan, vis);
- visit_tts(tokens, vis);
- }
- MacArgs::Eq(eq_span, MacArgsEq::Ast(expr)) => {
+ AttrArgs::Empty => {}
+ AttrArgs::Delimited(args) => visit_delim_args(args, vis),
+ AttrArgs::Eq(eq_span, AttrArgsEq::Ast(expr)) => {
vis.visit_span(eq_span);
vis.visit_expr(expr);
}
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => {
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => {
unreachable!("in literal form when visiting mac args eq: {:?}", lit)
}
}
}
+// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
+pub fn visit_delim_args<T: MutVisitor>(args: &mut DelimArgs, vis: &mut T) {
+ let DelimArgs { dspan, delim: _, tokens } = args;
+ visit_delim_span(dspan, vis);
+ visit_tts(tokens, vis);
+}
+
pub fn visit_delim_span<T: MutVisitor>(dspan: &mut DelimSpan, vis: &mut T) {
vis.visit_span(&mut dspan.open);
vis.visit_span(&mut dspan.close);
@@ -406,11 +410,7 @@ pub fn noop_visit_use_tree<T: MutVisitor>(use_tree: &mut UseTree, vis: &mut T) {
let UseTree { prefix, kind, span } = use_tree;
vis.visit_path(prefix);
match kind {
- UseTreeKind::Simple(rename, id1, id2) => {
- visit_opt(rename, |rename| vis.visit_ident(rename));
- vis.visit_id(id1);
- vis.visit_id(id2);
- }
+ UseTreeKind::Simple(rename) => visit_opt(rename, |rename| vis.visit_ident(rename)),
UseTreeKind::Nested(items) => {
for (tree, id) in items {
vis.visit_use_tree(tree);
@@ -439,15 +439,15 @@ pub fn noop_visit_constraint<T: MutVisitor>(
) {
vis.visit_id(id);
vis.visit_ident(ident);
- if let Some(ref mut gen_args) = gen_args {
+ if let Some(gen_args) = gen_args {
vis.visit_generic_args(gen_args);
}
match kind {
- AssocConstraintKind::Equality { ref mut term } => match term {
+ AssocConstraintKind::Equality { term } => match term {
Term::Ty(ty) => vis.visit_ty(ty),
Term::Const(c) => vis.visit_anon_const(c),
},
- AssocConstraintKind::Bound { ref mut bounds } => visit_bounds(bounds, vis),
+ AssocConstraintKind::Bound { bounds } => visit_bounds(bounds, vis),
}
vis.visit_span(span);
}
@@ -529,8 +529,9 @@ pub fn noop_visit_path<T: MutVisitor>(Path { segments, span, tokens }: &mut Path
visit_lazy_tts(tokens, vis);
}
-pub fn noop_visit_qself<T: MutVisitor>(qself: &mut Option<QSelf>, vis: &mut T) {
- visit_opt(qself, |QSelf { ty, path_span, position: _ }| {
+pub fn noop_visit_qself<T: MutVisitor>(qself: &mut Option<P<QSelf>>, vis: &mut T) {
+ visit_opt(qself, |qself| {
+ let QSelf { ty, path_span, position: _ } = &mut **qself;
vis.visit_ty(ty);
vis.visit_span(path_span);
})
@@ -600,7 +601,7 @@ pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
let NormalAttr { item: AttrItem { path, args, tokens }, tokens: attr_tokens } =
&mut **normal;
vis.visit_path(path);
- visit_mac_args(args, vis);
+ visit_attr_args(args, vis);
visit_lazy_tts(tokens, vis);
visit_lazy_tts(attr_tokens, vis);
}
@@ -612,18 +613,18 @@ pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
pub fn noop_visit_mac<T: MutVisitor>(mac: &mut MacCall, vis: &mut T) {
let MacCall { path, args, prior_type_ascription: _ } = mac;
vis.visit_path(path);
- visit_mac_args(args, vis);
+ visit_delim_args(args, vis);
}
pub fn noop_visit_macro_def<T: MutVisitor>(macro_def: &mut MacroDef, vis: &mut T) {
let MacroDef { body, macro_rules: _ } = macro_def;
- visit_mac_args(body, vis);
+ visit_delim_args(body, vis);
}
pub fn noop_visit_meta_list_item<T: MutVisitor>(li: &mut NestedMetaItem, vis: &mut T) {
match li {
NestedMetaItem::MetaItem(mi) => vis.visit_meta_item(mi),
- NestedMetaItem::Literal(_lit) => {}
+ NestedMetaItem::Lit(_lit) => {}
}
}
@@ -720,10 +721,10 @@ pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyAttrTokenStream>,
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
}
+/// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
+/// In practice the ident part is not actually used by specific visitors right now,
+/// but there's a test below checking that it works.
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
-// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
-// In practice the ident part is not actually used by specific visitors right now,
-// but there's a test below checking that it works.
pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
let Token { kind, span } = t;
match kind {
@@ -735,8 +736,7 @@ pub fn visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
return; // Avoid visiting the span for the second time.
}
token::Interpolated(nt) => {
- let mut nt = Lrc::make_mut(nt);
- visit_nonterminal(&mut nt, vis);
+ visit_nonterminal(Lrc::make_mut(nt), vis);
}
_ => {}
}
@@ -791,7 +791,7 @@ pub fn visit_nonterminal<T: MutVisitor>(nt: &mut token::Nonterminal, vis: &mut T
token::NtMeta(item) => {
let AttrItem { path, args, tokens } = item.deref_mut();
vis.visit_path(path);
- visit_mac_args(args, vis);
+ visit_attr_args(args, vis);
visit_lazy_tts(tokens, vis);
}
token::NtPath(path) => vis.visit_path(path),
@@ -879,7 +879,7 @@ pub fn noop_flat_map_generic_param<T: MutVisitor>(
let GenericParam { id, ident, attrs, bounds, kind, colon_span, is_placeholder: _ } = &mut param;
vis.visit_id(id);
vis.visit_ident(ident);
- if let Some(ref mut colon_span) = colon_span {
+ if let Some(colon_span) = colon_span {
vis.visit_span(colon_span);
}
visit_attrs(attrs, vis);
@@ -1303,12 +1303,17 @@ pub fn noop_visit_expr<T: MutVisitor>(
vis.visit_expr(f);
visit_exprs(args, vis);
}
- ExprKind::MethodCall(PathSegment { ident, id, args }, receiver, exprs, span) => {
+ ExprKind::MethodCall(box MethodCall {
+ seg: PathSegment { ident, id, args: seg_args },
+ receiver,
+ args: call_args,
+ span,
+ }) => {
vis.visit_ident(ident);
vis.visit_id(id);
- visit_opt(args, |args| vis.visit_generic_args(args));
+ visit_opt(seg_args, |args| vis.visit_generic_args(args));
vis.visit_method_receiver_expr(receiver);
- visit_exprs(exprs, vis);
+ visit_exprs(call_args, vis);
vis.visit_span(span);
}
ExprKind::Binary(_binop, lhs, rhs) => {
@@ -1345,20 +1350,30 @@ pub fn noop_visit_expr<T: MutVisitor>(
vis.visit_block(body);
visit_opt(label, |label| vis.visit_label(label));
}
- ExprKind::Loop(body, label) => {
+ ExprKind::Loop(body, label, span) => {
vis.visit_block(body);
visit_opt(label, |label| vis.visit_label(label));
+ vis.visit_span(span);
}
ExprKind::Match(expr, arms) => {
vis.visit_expr(expr);
arms.flat_map_in_place(|arm| vis.flat_map_arm(arm));
}
- ExprKind::Closure(binder, _capture_by, asyncness, _movability, decl, body, span) => {
+ ExprKind::Closure(box Closure {
+ binder,
+ capture_clause: _,
+ asyncness,
+ movability: _,
+ fn_decl,
+ body,
+ fn_decl_span,
+ fn_arg_span: _,
+ }) => {
vis.visit_closure_binder(binder);
vis.visit_asyncness(asyncness);
- vis.visit_fn_decl(decl);
+ vis.visit_fn_decl(fn_decl);
vis.visit_expr(body);
- vis.visit_span(span);
+ vis.visit_span(fn_decl_span);
}
ExprKind::Block(blk, label) => {
vis.visit_block(blk);
@@ -1428,7 +1443,7 @@ pub fn noop_visit_expr<T: MutVisitor>(
}
ExprKind::Try(expr) => vis.visit_expr(expr),
ExprKind::TryBlock(body) => vis.visit_block(body),
- ExprKind::Lit(_) | ExprKind::Err => {}
+ ExprKind::Lit(_) | ExprKind::IncludedBytes(..) | ExprKind::Err => {}
}
vis.visit_id(id);
vis.visit_span(span);
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index 83b10d906..c0cc4e79a 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -5,6 +5,7 @@ pub use TokenKind::*;
use crate::ast;
use crate::ptr::P;
+use crate::util::case::Case;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
@@ -58,13 +59,17 @@ pub enum Delimiter {
Invisible,
}
+// Note that the suffix is *not* considered when deciding the `LitKind` in this
+// type. This means that float literals like `1f32` are classified by this type
+// as `Int`. Only upon conversion to `ast::LitKind` will such a literal be
+// given the `Float` kind.
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum LitKind {
Bool, // AST only, must never appear in a `Token`
Byte,
Char,
- Integer,
- Float,
+ Integer, // e.g. `1`, `1u8`, `1f32`
+ Float, // e.g. `1.`, `1.0`, `1e3f32`
Str,
StrRaw(u8), // raw string delimited by `n` hash symbols
ByteStr,
@@ -80,6 +85,42 @@ pub struct Lit {
pub suffix: Option<Symbol>,
}
+impl Lit {
+ pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
+ Lit { kind, symbol, suffix }
+ }
+
+ /// Returns `true` if this is semantically a float literal. This includes
+ /// ones like `1f32` that have an `Integer` kind but a float suffix.
+ pub fn is_semantic_float(&self) -> bool {
+ match self.kind {
+ LitKind::Float => true,
+ LitKind::Integer => match self.suffix {
+ Some(sym) => sym == sym::f32 || sym == sym::f64,
+ None => false,
+ },
+ _ => false,
+ }
+ }
+
+ /// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
+ pub fn from_token(token: &Token) -> Option<Lit> {
+ match token.uninterpolate().kind {
+ Ident(name, false) if name.is_bool_lit() => {
+ Some(Lit::new(Bool, name, None))
+ }
+ Literal(token_lit) => Some(token_lit),
+ Interpolated(ref nt)
+ if let NtExpr(expr) | NtLiteral(expr) = &**nt
+ && let ast::ExprKind::Lit(token_lit) = expr.kind =>
+ {
+ Some(token_lit.clone())
+ }
+ _ => None,
+ }
+ }
+}
+
impl fmt::Display for Lit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Lit { kind, symbol, suffix } = *self;
@@ -138,12 +179,6 @@ impl LitKind {
}
}
-impl Lit {
- pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
- Lit { kind, symbol, suffix }
- }
-}
-
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
@@ -267,9 +302,9 @@ impl TokenKind {
Literal(Lit::new(kind, symbol, suffix))
}
- // An approximation to proc-macro-style single-character operators used by rustc parser.
- // If the operator token can be broken into two tokens, the first of which is single-character,
- // then this function performs that operation, otherwise it returns `None`.
+ /// An approximation to proc-macro-style single-character operators used by rustc parser.
+ /// If the operator token can be broken into two tokens, the first of which is single-character,
+ /// then this function performs that operation, otherwise it returns `None`.
pub fn break_two_token_op(&self) -> Option<(TokenKind, TokenKind)> {
Some(match *self {
Le => (Lt, Eq),
@@ -503,10 +538,10 @@ impl Token {
}
}
- // A convenience function for matching on identifiers during parsing.
- // Turns interpolated identifier (`$i: ident`) or lifetime (`$l: lifetime`) token
- // into the regular identifier or lifetime token it refers to,
- // otherwise returns the original token.
+ /// A convenience function for matching on identifiers during parsing.
+ /// Turns interpolated identifier (`$i: ident`) or lifetime (`$l: lifetime`) token
+ /// into the regular identifier or lifetime token it refers to,
+ /// otherwise returns the original token.
pub fn uninterpolate(&self) -> Cow<'_, Token> {
match &self.kind {
Interpolated(nt) => match **nt {
@@ -566,9 +601,10 @@ impl Token {
/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
- if let Interpolated(ref nt) = self.kind && let NtPath(..) = **nt {
+ if let Interpolated(nt) = &self.kind && let NtPath(..) = **nt {
return true;
}
+
false
}
@@ -576,7 +612,7 @@ impl Token {
/// That is, is this a pre-parsed expression dropped into the token stream
/// (which happens while parsing the result of macro expansion)?
pub fn is_whole_expr(&self) -> bool {
- if let Interpolated(ref nt) = self.kind
+ if let Interpolated(nt) = &self.kind
&& let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = **nt
{
return true;
@@ -585,11 +621,12 @@ impl Token {
false
}
- // Is the token an interpolated block (`$b:block`)?
+ /// Is the token an interpolated block (`$b:block`)?
pub fn is_whole_block(&self) -> bool {
- if let Interpolated(ref nt) = self.kind && let NtBlock(..) = **nt {
+ if let Interpolated(nt) = &self.kind && let NtBlock(..) = **nt {
return true;
}
+
false
}
@@ -615,12 +652,21 @@ impl Token {
self.is_non_raw_ident_where(|id| id.name == kw)
}
+ /// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this token is an identifier equal to `kw` ignoring the case.
+ pub fn is_keyword_case(&self, kw: Symbol, case: Case) -> bool {
+ self.is_keyword(kw)
+ || (case == Case::Insensitive
+ && self.is_non_raw_ident_where(|id| {
+ id.name.as_str().to_lowercase() == kw.as_str().to_lowercase()
+ }))
+ }
+
pub fn is_path_segment_keyword(&self) -> bool {
self.is_non_raw_ident_where(Ident::is_path_segment_keyword)
}
- // Returns true for reserved identifiers used internally for elided lifetimes,
- // unnamed method parameters, crate root module, error recovery etc.
+ /// Returns true for reserved identifiers used internally for elided lifetimes,
+ /// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(&self) -> bool {
self.is_non_raw_ident_where(Ident::is_special)
}
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index 015f5c1ee..482c30295 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -64,7 +64,7 @@ impl TokenTree {
match (self, other) {
(TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
- delim == delim2 && tts.eq_unspanned(&tts2)
+ delim == delim2 && tts.eq_unspanned(tts2)
}
_ => false,
}
@@ -86,12 +86,12 @@ impl TokenTree {
}
}
- // Create a `TokenTree::Token` with alone spacing.
+ /// Create a `TokenTree::Token` with alone spacing.
pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span), Spacing::Alone)
}
- // Create a `TokenTree::Token` with joint spacing.
+ /// Create a `TokenTree::Token` with joint spacing.
pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span), Spacing::Joint)
}
@@ -402,7 +402,7 @@ impl TokenStream {
let mut t1 = self.trees();
let mut t2 = other.trees();
for (t1, t2) in iter::zip(&mut t1, &mut t2) {
- if !t1.eq_unspanned(&t2) {
+ if !t1.eq_unspanned(t2) {
return false;
}
}
@@ -413,17 +413,17 @@ impl TokenStream {
TokenStream(Lrc::new(self.0.iter().enumerate().map(|(i, tree)| f(i, tree)).collect()))
}
- // Create a token stream containing a single token with alone spacing.
+ /// Create a token stream containing a single token with alone spacing.
pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
}
- // Create a token stream containing a single token with joint spacing.
+ /// Create a token stream containing a single token with joint spacing.
pub fn token_joint(kind: TokenKind, span: Span) -> TokenStream {
TokenStream::new(vec![TokenTree::token_joint(kind, span)])
}
- // Create a token stream containing a single `Delimited`.
+ /// Create a token stream containing a single `Delimited`.
pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
}
@@ -475,7 +475,7 @@ impl TokenStream {
token::Interpolated(nt) => TokenTree::Delimited(
DelimSpan::from_single(token.span),
Delimiter::Invisible,
- TokenStream::from_nonterminal_ast(&nt).flattened(),
+ TokenStream::from_nonterminal_ast(nt).flattened(),
),
_ => TokenTree::Token(token.clone(), spacing),
}
@@ -511,7 +511,7 @@ impl TokenStream {
fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = vec.last()
&& let TokenTree::Token(tok, spacing) = tt
- && let Some(glued_tok) = last_tok.glue(&tok)
+ && let Some(glued_tok) = last_tok.glue(tok)
{
// ...then overwrite the last token tree in `vec` with the
// glued token, and skip the first token tree from `stream`.
@@ -522,8 +522,8 @@ impl TokenStream {
}
}
- // Push `tt` onto the end of the stream, possibly gluing it to the last
- // token. Uses `make_mut` to maximize efficiency.
+ /// Push `tt` onto the end of the stream, possibly gluing it to the last
+ /// token. Uses `make_mut` to maximize efficiency.
pub fn push_tree(&mut self, tt: TokenTree) {
let vec_mut = Lrc::make_mut(&mut self.0);
@@ -534,9 +534,9 @@ impl TokenStream {
}
}
- // Push `stream` onto the end of the stream, possibly gluing the first
- // token tree to the last token. (No other token trees will be glued.)
- // Uses `make_mut` to maximize efficiency.
+ /// Push `stream` onto the end of the stream, possibly gluing the first
+ /// token tree to the last token. (No other token trees will be glued.)
+ /// Uses `make_mut` to maximize efficiency.
pub fn push_stream(&mut self, stream: TokenStream) {
let vec_mut = Lrc::make_mut(&mut self.0);
diff --git a/compiler/rustc_ast/src/util/case.rs b/compiler/rustc_ast/src/util/case.rs
new file mode 100644
index 000000000..1afd7dea7
--- /dev/null
+++ b/compiler/rustc_ast/src/util/case.rs
@@ -0,0 +1,6 @@
+/// Whatever to ignore case (`fn` vs `Fn` vs `FN`) or not. Used for recovering.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum Case {
+ Sensitive,
+ Insensitive,
+}
diff --git a/compiler/rustc_ast/src/util/classify.rs b/compiler/rustc_ast/src/util/classify.rs
index 6ea3db6d3..cdc244c12 100644
--- a/compiler/rustc_ast/src/util/classify.rs
+++ b/compiler/rustc_ast/src/util/classify.rs
@@ -21,6 +21,7 @@ pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool {
| ast::ExprKind::Loop(..)
| ast::ExprKind::ForLoop(..)
| ast::ExprKind::TryBlock(..)
+ | ast::ExprKind::ConstBlock(..)
)
}
@@ -36,7 +37,6 @@ pub fn expr_trailing_brace(mut expr: &ast::Expr) -> Option<&ast::Expr> {
| Binary(_, _, e)
| Box(e)
| Break(_, Some(e))
- | Closure(.., e, _)
| Let(_, e, _)
| Range(_, Some(e), _)
| Ret(Some(e))
@@ -44,6 +44,9 @@ pub fn expr_trailing_brace(mut expr: &ast::Expr) -> Option<&ast::Expr> {
| Yield(Some(e)) => {
expr = e;
}
+ Closure(closure) => {
+ expr = &closure.body;
+ }
Async(..) | Block(..) | ForLoop(..) | If(..) | Loop(..) | Match(..) | Struct(..)
| TryBlock(..) | While(..) => break Some(expr),
_ => break None,
diff --git a/compiler/rustc_ast/src/util/comments.rs b/compiler/rustc_ast/src/util/comments.rs
index c96474ccb..35454c3a6 100644
--- a/compiler/rustc_ast/src/util/comments.rs
+++ b/compiler/rustc_ast/src/util/comments.rs
@@ -110,7 +110,7 @@ pub fn beautify_doc_string(data: Symbol, kind: CommentKind) -> Symbol {
} else {
&mut lines
};
- if let Some(horizontal) = get_horizontal_trim(&lines, kind) {
+ if let Some(horizontal) = get_horizontal_trim(lines, kind) {
changes = true;
// remove a "[ \t]*\*" block from each line, if possible
for line in lines.iter_mut() {
@@ -147,7 +147,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
fn trim_whitespace_prefix(s: &str, col: CharPos) -> &str {
let len = s.len();
- match all_whitespace(&s, col) {
+ match all_whitespace(s, col) {
Some(col) => {
if col < len {
&s[col..]
diff --git a/compiler/rustc_ast/src/util/literal.rs b/compiler/rustc_ast/src/util/literal.rs
index 536b38560..f6f186b51 100644
--- a/compiler/rustc_ast/src/util/literal.rs
+++ b/compiler/rustc_ast/src/util/literal.rs
@@ -1,17 +1,14 @@
//! Code related to parsing literals.
-use crate::ast::{self, Lit, LitKind};
+use crate::ast::{self, LitKind, MetaItemLit};
use crate::token::{self, Token};
-
-use rustc_lexer::unescape::{unescape_byte, unescape_char};
-use rustc_lexer::unescape::{unescape_byte_literal, unescape_literal, Mode};
+use rustc_lexer::unescape::{byte_from_char, unescape_byte, unescape_char, unescape_literal, Mode};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
-
use std::ascii;
+#[derive(Debug)]
pub enum LitError {
- NotLiteral,
LexerError,
InvalidSuffix,
InvalidIntSuffix,
@@ -55,14 +52,14 @@ impl LitKind {
// new symbol because the string in the LitKind is different to the
// string in the token.
let s = symbol.as_str();
- let symbol = if s.contains(&['\\', '\r']) {
+ let symbol = if s.contains(['\\', '\r']) {
let mut buf = String::with_capacity(s.len());
let mut error = Ok(());
// Force-inlining here is aggressive but the closure is
// called on every char in the string, so it can be
// hot in programs with many long strings.
unescape_literal(
- &s,
+ s,
Mode::Str,
&mut #[inline(always)]
|_, unescaped_char| match unescaped_char {
@@ -88,7 +85,7 @@ impl LitKind {
if s.contains('\r') {
let mut buf = String::with_capacity(s.len());
let mut error = Ok(());
- unescape_literal(&s, Mode::RawStr, &mut |_, unescaped_char| {
+ unescape_literal(s, Mode::RawStr, &mut |_, unescaped_char| {
match unescaped_char {
Ok(c) => buf.push(c),
Err(err) => {
@@ -109,13 +106,11 @@ impl LitKind {
let s = symbol.as_str();
let mut buf = Vec::with_capacity(s.len());
let mut error = Ok(());
- unescape_byte_literal(&s, Mode::ByteStr, &mut |_, unescaped_byte| {
- match unescaped_byte {
- Ok(c) => buf.push(c),
- Err(err) => {
- if err.is_fatal() {
- error = Err(LitError::LexerError);
- }
+ unescape_literal(s, Mode::ByteStr, &mut |_, c| match c {
+ Ok(c) => buf.push(byte_from_char(c)),
+ Err(err) => {
+ if err.is_fatal() {
+ error = Err(LitError::LexerError);
}
}
});
@@ -127,13 +122,11 @@ impl LitKind {
let bytes = if s.contains('\r') {
let mut buf = Vec::with_capacity(s.len());
let mut error = Ok(());
- unescape_byte_literal(&s, Mode::RawByteStr, &mut |_, unescaped_byte| {
- match unescaped_byte {
- Ok(c) => buf.push(c),
- Err(err) => {
- if err.is_fatal() {
- error = Err(LitError::LexerError);
- }
+ unescape_literal(s, Mode::RawByteStr, &mut |_, c| match c {
+ Ok(c) => buf.push(byte_from_char(c)),
+ Err(err) => {
+ if err.is_fatal() {
+ error = Err(LitError::LexerError);
}
}
});
@@ -202,49 +195,16 @@ impl LitKind {
}
}
-impl Lit {
- /// Converts literal token into an AST literal.
- pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<Lit, LitError> {
- Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
- }
-
- /// Converts arbitrary token into an AST literal.
- ///
- /// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
- pub fn from_token(token: &Token) -> Result<Lit, LitError> {
- let lit = match token.uninterpolate().kind {
- token::Ident(name, false) if name.is_bool_lit() => {
- token::Lit::new(token::Bool, name, None)
- }
- token::Literal(lit) => lit,
- token::Interpolated(ref nt) => {
- if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt
- && let ast::ExprKind::Lit(lit) = &expr.kind
- {
- return Ok(lit.clone());
- }
- return Err(LitError::NotLiteral);
- }
- _ => return Err(LitError::NotLiteral),
- };
-
- Lit::from_token_lit(lit, token.span)
- }
-
- /// Attempts to recover an AST literal from semantic literal.
- /// This function is used when the original token doesn't exist (e.g. the literal is created
- /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
- pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit {
- Lit { token_lit: kind.to_token_lit(), kind, span }
+impl MetaItemLit {
+ /// Converts token literal into a meta item literal.
+ pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<MetaItemLit, LitError> {
+ Ok(MetaItemLit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
}
- /// Losslessly convert an AST literal into a token.
- pub fn to_token(&self) -> Token {
- let kind = match self.token_lit.kind {
- token::Bool => token::Ident(self.token_lit.symbol, false),
- _ => token::Literal(self.token_lit),
- };
- Token::new(kind, self.span)
+ /// Converts an arbitrary token into meta item literal.
+ pub fn from_token(token: &Token) -> Option<MetaItemLit> {
+ token::Lit::from_token(token)
+ .and_then(|token_lit| MetaItemLit::from_token_lit(token_lit, token.span).ok())
}
}
diff --git a/compiler/rustc_ast/src/util/parser.rs b/compiler/rustc_ast/src/util/parser.rs
index b40ad6f70..819f1884a 100644
--- a/compiler/rustc_ast/src/util/parser.rs
+++ b/compiler/rustc_ast/src/util/parser.rs
@@ -377,28 +377,28 @@ pub fn needs_par_as_let_scrutinee(order: i8) -> bool {
/// parens or other delimiters, e.g., `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and
/// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not.
pub fn contains_exterior_struct_lit(value: &ast::Expr) -> bool {
- match value.kind {
+ match &value.kind {
ast::ExprKind::Struct(..) => true,
- ast::ExprKind::Assign(ref lhs, ref rhs, _)
- | ast::ExprKind::AssignOp(_, ref lhs, ref rhs)
- | ast::ExprKind::Binary(_, ref lhs, ref rhs) => {
+ ast::ExprKind::Assign(lhs, rhs, _)
+ | ast::ExprKind::AssignOp(_, lhs, rhs)
+ | ast::ExprKind::Binary(_, lhs, rhs) => {
// X { y: 1 } + X { y: 2 }
- contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs)
+ contains_exterior_struct_lit(lhs) || contains_exterior_struct_lit(rhs)
}
- ast::ExprKind::Await(ref x)
- | ast::ExprKind::Unary(_, ref x)
- | ast::ExprKind::Cast(ref x, _)
- | ast::ExprKind::Type(ref x, _)
- | ast::ExprKind::Field(ref x, _)
- | ast::ExprKind::Index(ref x, _) => {
+ ast::ExprKind::Await(x)
+ | ast::ExprKind::Unary(_, x)
+ | ast::ExprKind::Cast(x, _)
+ | ast::ExprKind::Type(x, _)
+ | ast::ExprKind::Field(x, _)
+ | ast::ExprKind::Index(x, _) => {
// &X { y: 1 }, X { y: 1 }.y
- contains_exterior_struct_lit(&x)
+ contains_exterior_struct_lit(x)
}
- ast::ExprKind::MethodCall(_, ref receiver, _, _) => {
+ ast::ExprKind::MethodCall(box ast::MethodCall { receiver, .. }) => {
// X { y: 1 }.bar(...)
- contains_exterior_struct_lit(&receiver)
+ contains_exterior_struct_lit(receiver)
}
_ => false,
diff --git a/compiler/rustc_ast/src/util/unicode.rs b/compiler/rustc_ast/src/util/unicode.rs
index f009f7b30..0eae791b2 100644
--- a/compiler/rustc_ast/src/util/unicode.rs
+++ b/compiler/rustc_ast/src/util/unicode.rs
@@ -17,7 +17,7 @@ pub fn contains_text_flow_control_chars(s: &str) -> bool {
// U+2069 - E2 81 A9
let mut bytes = s.as_bytes();
loop {
- match core::slice::memchr::memchr(0xE2, &bytes) {
+ match core::slice::memchr::memchr(0xE2, bytes) {
Some(idx) => {
// bytes are valid UTF-8 -> E2 must be followed by two bytes
let ch = &bytes[idx..idx + 3];
diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs
index 6f56c1ef0..991eb489f 100644
--- a/compiler/rustc_ast/src/visit.rs
+++ b/compiler/rustc_ast/src/visit.rs
@@ -251,7 +251,7 @@ pub trait Visitor<'ast>: Sized {
macro_rules! walk_list {
($visitor: expr, $method: ident, $list: expr $(, $($extra_args: expr),* )?) => {
{
- #[cfg_attr(not(bootstrap), allow(for_loops_over_fallibles))]
+ #[allow(for_loops_over_fallibles)]
for elem in $list {
$visitor.$method(elem $(, $($extra_args,)* )?)
}
@@ -299,74 +299,68 @@ pub fn walk_trait_ref<'a, V: Visitor<'a>>(visitor: &mut V, trait_ref: &'a TraitR
pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) {
visitor.visit_vis(&item.vis);
visitor.visit_ident(item.ident);
- match item.kind {
+ match &item.kind {
ItemKind::ExternCrate(_) => {}
- ItemKind::Use(ref use_tree) => visitor.visit_use_tree(use_tree, item.id, false),
- ItemKind::Static(ref typ, _, ref expr) | ItemKind::Const(_, ref typ, ref expr) => {
+ ItemKind::Use(use_tree) => visitor.visit_use_tree(use_tree, item.id, false),
+ ItemKind::Static(typ, _, expr) | ItemKind::Const(_, typ, expr) => {
visitor.visit_ty(typ);
walk_list!(visitor, visit_expr, expr);
}
- ItemKind::Fn(box Fn { defaultness: _, ref generics, ref sig, ref body }) => {
+ ItemKind::Fn(box Fn { defaultness: _, generics, sig, body }) => {
let kind =
FnKind::Fn(FnCtxt::Free, item.ident, sig, &item.vis, generics, body.as_deref());
visitor.visit_fn(kind, item.span, item.id)
}
- ItemKind::Mod(_unsafety, ref mod_kind) => match mod_kind {
+ ItemKind::Mod(_unsafety, mod_kind) => match mod_kind {
ModKind::Loaded(items, _inline, _inner_span) => {
walk_list!(visitor, visit_item, items)
}
ModKind::Unloaded => {}
},
- ItemKind::ForeignMod(ref foreign_module) => {
+ ItemKind::ForeignMod(foreign_module) => {
walk_list!(visitor, visit_foreign_item, &foreign_module.items);
}
- ItemKind::GlobalAsm(ref asm) => visitor.visit_inline_asm(asm),
- ItemKind::TyAlias(box TyAlias { ref generics, ref bounds, ref ty, .. }) => {
+ ItemKind::GlobalAsm(asm) => visitor.visit_inline_asm(asm),
+ ItemKind::TyAlias(box TyAlias { generics, bounds, ty, .. }) => {
visitor.visit_generics(generics);
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound);
walk_list!(visitor, visit_ty, ty);
}
- ItemKind::Enum(ref enum_definition, ref generics) => {
+ ItemKind::Enum(enum_definition, generics) => {
visitor.visit_generics(generics);
visitor.visit_enum_def(enum_definition)
}
ItemKind::Impl(box Impl {
defaultness: _,
unsafety: _,
- ref generics,
+ generics,
constness: _,
polarity: _,
- ref of_trait,
- ref self_ty,
- ref items,
+ of_trait,
+ self_ty,
+ items,
}) => {
visitor.visit_generics(generics);
walk_list!(visitor, visit_trait_ref, of_trait);
visitor.visit_ty(self_ty);
walk_list!(visitor, visit_assoc_item, items, AssocCtxt::Impl);
}
- ItemKind::Struct(ref struct_definition, ref generics)
- | ItemKind::Union(ref struct_definition, ref generics) => {
+ ItemKind::Struct(struct_definition, generics)
+ | ItemKind::Union(struct_definition, generics) => {
visitor.visit_generics(generics);
visitor.visit_variant_data(struct_definition);
}
- ItemKind::Trait(box Trait {
- unsafety: _,
- is_auto: _,
- ref generics,
- ref bounds,
- ref items,
- }) => {
+ ItemKind::Trait(box Trait { unsafety: _, is_auto: _, generics, bounds, items }) => {
visitor.visit_generics(generics);
walk_list!(visitor, visit_param_bound, bounds, BoundKind::SuperTraits);
walk_list!(visitor, visit_assoc_item, items, AssocCtxt::Trait);
}
- ItemKind::TraitAlias(ref generics, ref bounds) => {
+ ItemKind::TraitAlias(generics, bounds) => {
visitor.visit_generics(generics);
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound);
}
- ItemKind::MacCall(ref mac) => visitor.visit_mac_call(mac),
- ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id),
+ ItemKind::MacCall(mac) => visitor.visit_mac_call(mac),
+ ItemKind::MacroDef(ts) => visitor.visit_mac_def(ts, item.id),
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
@@ -399,39 +393,39 @@ pub fn walk_pat_field<'a, V: Visitor<'a>>(visitor: &mut V, fp: &'a PatField) {
}
pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) {
- match typ.kind {
- TyKind::Slice(ref ty) | TyKind::Paren(ref ty) => visitor.visit_ty(ty),
- TyKind::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty),
- TyKind::Rptr(ref opt_lifetime, ref mutable_type) => {
+ match &typ.kind {
+ TyKind::Slice(ty) | TyKind::Paren(ty) => visitor.visit_ty(ty),
+ TyKind::Ptr(mutable_type) => visitor.visit_ty(&mutable_type.ty),
+ TyKind::Rptr(opt_lifetime, mutable_type) => {
walk_list!(visitor, visit_lifetime, opt_lifetime, LifetimeCtxt::Rptr);
visitor.visit_ty(&mutable_type.ty)
}
- TyKind::Tup(ref tuple_element_types) => {
+ TyKind::Tup(tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
- TyKind::BareFn(ref function_declaration) => {
+ TyKind::BareFn(function_declaration) => {
walk_list!(visitor, visit_generic_param, &function_declaration.generic_params);
walk_fn_decl(visitor, &function_declaration.decl);
}
- TyKind::Path(ref maybe_qself, ref path) => {
- if let Some(ref qself) = *maybe_qself {
+ TyKind::Path(maybe_qself, path) => {
+ if let Some(qself) = maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, typ.id);
}
- TyKind::Array(ref ty, ref length) => {
+ TyKind::Array(ty, length) => {
visitor.visit_ty(ty);
visitor.visit_anon_const(length)
}
- TyKind::TraitObject(ref bounds, ..) => {
+ TyKind::TraitObject(bounds, ..) => {
walk_list!(visitor, visit_param_bound, bounds, BoundKind::TraitObject);
}
- TyKind::ImplTrait(_, ref bounds) => {
+ TyKind::ImplTrait(_, bounds) => {
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Impl);
}
- TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression),
+ TyKind::Typeof(expression) => visitor.visit_anon_const(expression),
TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => {}
- TyKind::MacCall(ref mac) => visitor.visit_mac_call(mac),
+ TyKind::MacCall(mac) => visitor.visit_mac_call(mac),
TyKind::Never | TyKind::CVarArgs => {}
}
}
@@ -444,15 +438,15 @@ pub fn walk_path<'a, V: Visitor<'a>>(visitor: &mut V, path: &'a Path) {
pub fn walk_use_tree<'a, V: Visitor<'a>>(visitor: &mut V, use_tree: &'a UseTree, id: NodeId) {
visitor.visit_path(&use_tree.prefix, id);
- match use_tree.kind {
- UseTreeKind::Simple(rename, ..) => {
+ match &use_tree.kind {
+ UseTreeKind::Simple(rename) => {
// The extra IDs are handled during HIR lowering.
- if let Some(rename) = rename {
+ if let &Some(rename) = rename {
visitor.visit_ident(rename);
}
}
UseTreeKind::Glob => {}
- UseTreeKind::Nested(ref use_trees) => {
+ UseTreeKind::Nested(use_trees) => {
for &(ref nested_tree, nested_id) in use_trees {
visitor.visit_use_tree(nested_tree, nested_id, true);
}
@@ -462,7 +456,7 @@ pub fn walk_use_tree<'a, V: Visitor<'a>>(visitor: &mut V, use_tree: &'a UseTree,
pub fn walk_path_segment<'a, V: Visitor<'a>>(visitor: &mut V, segment: &'a PathSegment) {
visitor.visit_ident(segment.ident);
- if let Some(ref args) = segment.args {
+ if let Some(args) = &segment.args {
visitor.visit_generic_args(args);
}
}
@@ -471,8 +465,8 @@ pub fn walk_generic_args<'a, V>(visitor: &mut V, generic_args: &'a GenericArgs)
where
V: Visitor<'a>,
{
- match *generic_args {
- GenericArgs::AngleBracketed(ref data) => {
+ match generic_args {
+ GenericArgs::AngleBracketed(data) => {
for arg in &data.args {
match arg {
AngleBracketedArg::Arg(a) => visitor.visit_generic_arg(a),
@@ -480,7 +474,7 @@ where
}
}
}
- GenericArgs::Parenthesized(ref data) => {
+ GenericArgs::Parenthesized(data) => {
walk_list!(visitor, visit_ty, &data.inputs);
walk_fn_ret_ty(visitor, &data.output);
}
@@ -500,64 +494,64 @@ where
pub fn walk_assoc_constraint<'a, V: Visitor<'a>>(visitor: &mut V, constraint: &'a AssocConstraint) {
visitor.visit_ident(constraint.ident);
- if let Some(ref gen_args) = constraint.gen_args {
+ if let Some(gen_args) = &constraint.gen_args {
visitor.visit_generic_args(gen_args);
}
- match constraint.kind {
- AssocConstraintKind::Equality { ref term } => match term {
+ match &constraint.kind {
+ AssocConstraintKind::Equality { term } => match term {
Term::Ty(ty) => visitor.visit_ty(ty),
Term::Const(c) => visitor.visit_anon_const(c),
},
- AssocConstraintKind::Bound { ref bounds } => {
+ AssocConstraintKind::Bound { bounds } => {
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound);
}
}
}
pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) {
- match pattern.kind {
- PatKind::TupleStruct(ref opt_qself, ref path, ref elems) => {
- if let Some(ref qself) = *opt_qself {
+ match &pattern.kind {
+ PatKind::TupleStruct(opt_qself, path, elems) => {
+ if let Some(qself) = opt_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, pattern.id);
walk_list!(visitor, visit_pat, elems);
}
- PatKind::Path(ref opt_qself, ref path) => {
- if let Some(ref qself) = *opt_qself {
+ PatKind::Path(opt_qself, path) => {
+ if let Some(qself) = opt_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, pattern.id)
}
- PatKind::Struct(ref opt_qself, ref path, ref fields, _) => {
- if let Some(ref qself) = *opt_qself {
+ PatKind::Struct(opt_qself, path, fields, _) => {
+ if let Some(qself) = opt_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, pattern.id);
walk_list!(visitor, visit_pat_field, fields);
}
- PatKind::Box(ref subpattern)
- | PatKind::Ref(ref subpattern, _)
- | PatKind::Paren(ref subpattern) => visitor.visit_pat(subpattern),
- PatKind::Ident(_, ident, ref optional_subpattern) => {
- visitor.visit_ident(ident);
+ PatKind::Box(subpattern) | PatKind::Ref(subpattern, _) | PatKind::Paren(subpattern) => {
+ visitor.visit_pat(subpattern)
+ }
+ PatKind::Ident(_, ident, optional_subpattern) => {
+ visitor.visit_ident(*ident);
walk_list!(visitor, visit_pat, optional_subpattern);
}
- PatKind::Lit(ref expression) => visitor.visit_expr(expression),
- PatKind::Range(ref lower_bound, ref upper_bound, _) => {
+ PatKind::Lit(expression) => visitor.visit_expr(expression),
+ PatKind::Range(lower_bound, upper_bound, _) => {
walk_list!(visitor, visit_expr, lower_bound);
walk_list!(visitor, visit_expr, upper_bound);
}
PatKind::Wild | PatKind::Rest => {}
- PatKind::Tuple(ref elems) | PatKind::Slice(ref elems) | PatKind::Or(ref elems) => {
+ PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
walk_list!(visitor, visit_pat, elems);
}
- PatKind::MacCall(ref mac) => visitor.visit_mac_call(mac),
+ PatKind::MacCall(mac) => visitor.visit_mac_call(mac),
}
}
pub fn walk_foreign_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a ForeignItem) {
- let Item { id, span, ident, ref vis, ref attrs, ref kind, tokens: _ } = *item;
+ let &Item { id, span, ident, ref vis, ref attrs, ref kind, tokens: _ } = item;
visitor.visit_vis(vis);
visitor.visit_ident(ident);
walk_list!(visitor, visit_attribute, attrs);
@@ -566,7 +560,7 @@ pub fn walk_foreign_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a ForeignI
visitor.visit_ty(ty);
walk_list!(visitor, visit_expr, expr);
}
- ForeignItemKind::Fn(box Fn { defaultness: _, ref generics, ref sig, ref body }) => {
+ ForeignItemKind::Fn(box Fn { defaultness: _, generics, sig, body }) => {
let kind = FnKind::Fn(FnCtxt::Foreign, ident, sig, vis, generics, body.as_deref());
visitor.visit_fn(kind, span, id);
}
@@ -582,11 +576,9 @@ pub fn walk_foreign_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a ForeignI
}
pub fn walk_param_bound<'a, V: Visitor<'a>>(visitor: &mut V, bound: &'a GenericBound) {
- match *bound {
- GenericBound::Trait(ref typ, ref _modifier) => visitor.visit_poly_trait_ref(typ),
- GenericBound::Outlives(ref lifetime) => {
- visitor.visit_lifetime(lifetime, LifetimeCtxt::Bound)
- }
+ match bound {
+ GenericBound::Trait(typ, _modifier) => visitor.visit_poly_trait_ref(typ),
+ GenericBound::Outlives(lifetime) => visitor.visit_lifetime(lifetime, LifetimeCtxt::Bound),
}
}
@@ -594,10 +586,10 @@ pub fn walk_generic_param<'a, V: Visitor<'a>>(visitor: &mut V, param: &'a Generi
visitor.visit_ident(param.ident);
walk_list!(visitor, visit_attribute, param.attrs.iter());
walk_list!(visitor, visit_param_bound, &param.bounds, BoundKind::Bound);
- match param.kind {
+ match &param.kind {
GenericParamKind::Lifetime => (),
- GenericParamKind::Type { ref default } => walk_list!(visitor, visit_ty, default),
- GenericParamKind::Const { ref ty, ref default, .. } => {
+ GenericParamKind::Type { default } => walk_list!(visitor, visit_ty, default),
+ GenericParamKind::Const { ty, default, .. } => {
visitor.visit_ty(ty);
if let Some(default) = default {
visitor.visit_anon_const(default);
@@ -621,24 +613,22 @@ pub fn walk_closure_binder<'a, V: Visitor<'a>>(visitor: &mut V, binder: &'a Clos
}
pub fn walk_where_predicate<'a, V: Visitor<'a>>(visitor: &mut V, predicate: &'a WherePredicate) {
- match *predicate {
+ match predicate {
WherePredicate::BoundPredicate(WhereBoundPredicate {
- ref bounded_ty,
- ref bounds,
- ref bound_generic_params,
+ bounded_ty,
+ bounds,
+ bound_generic_params,
..
}) => {
visitor.visit_ty(bounded_ty);
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound);
walk_list!(visitor, visit_generic_param, bound_generic_params);
}
- WherePredicate::RegionPredicate(WhereRegionPredicate {
- ref lifetime, ref bounds, ..
- }) => {
+ WherePredicate::RegionPredicate(WhereRegionPredicate { lifetime, bounds, .. }) => {
visitor.visit_lifetime(lifetime, LifetimeCtxt::Bound);
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Bound);
}
- WherePredicate::EqPredicate(WhereEqPredicate { ref lhs_ty, ref rhs_ty, .. }) => {
+ WherePredicate::EqPredicate(WhereEqPredicate { lhs_ty, rhs_ty, .. }) => {
visitor.visit_ty(lhs_ty);
visitor.visit_ty(rhs_ty);
}
@@ -646,7 +636,7 @@ pub fn walk_where_predicate<'a, V: Visitor<'a>>(visitor: &mut V, predicate: &'a
}
pub fn walk_fn_ret_ty<'a, V: Visitor<'a>>(visitor: &mut V, ret_ty: &'a FnRetTy) {
- if let FnRetTy::Ty(ref output_ty) = *ret_ty {
+ if let FnRetTy::Ty(output_ty) = ret_ty {
visitor.visit_ty(output_ty)
}
}
@@ -675,7 +665,7 @@ pub fn walk_fn<'a, V: Visitor<'a>>(visitor: &mut V, kind: FnKind<'a>) {
}
pub fn walk_assoc_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a AssocItem, ctxt: AssocCtxt) {
- let Item { id, span, ident, ref vis, ref attrs, ref kind, tokens: _ } = *item;
+ let &Item { id, span, ident, ref vis, ref attrs, ref kind, tokens: _ } = item;
visitor.visit_vis(vis);
visitor.visit_ident(ident);
walk_list!(visitor, visit_attribute, attrs);
@@ -684,7 +674,7 @@ pub fn walk_assoc_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a AssocItem,
visitor.visit_ty(ty);
walk_list!(visitor, visit_expr, expr);
}
- AssocItemKind::Fn(box Fn { defaultness: _, ref generics, ref sig, ref body }) => {
+ AssocItemKind::Fn(box Fn { defaultness: _, generics, sig, body }) => {
let kind = FnKind::Fn(FnCtxt::Assoc(ctxt), ident, sig, vis, generics, body.as_deref());
visitor.visit_fn(kind, span, id);
}
@@ -717,13 +707,13 @@ pub fn walk_block<'a, V: Visitor<'a>>(visitor: &mut V, block: &'a Block) {
}
pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) {
- match statement.kind {
- StmtKind::Local(ref local) => visitor.visit_local(local),
- StmtKind::Item(ref item) => visitor.visit_item(item),
- StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => visitor.visit_expr(expr),
+ match &statement.kind {
+ StmtKind::Local(local) => visitor.visit_local(local),
+ StmtKind::Item(item) => visitor.visit_item(item),
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => visitor.visit_expr(expr),
StmtKind::Empty => {}
- StmtKind::MacCall(ref mac) => {
- let MacCallStmt { ref mac, style: _, ref attrs, tokens: _ } = **mac;
+ StmtKind::MacCall(mac) => {
+ let MacCallStmt { mac, attrs, style: _, tokens: _ } = &**mac;
visitor.visit_mac_call(mac);
for attr in attrs.iter() {
visitor.visit_attribute(attr);
@@ -760,7 +750,7 @@ pub fn walk_inline_asm<'a, V: Visitor<'a>>(visitor: &mut V, asm: &'a InlineAsm)
}
pub fn walk_inline_asm_sym<'a, V: Visitor<'a>>(visitor: &mut V, sym: &'a InlineAsmSym) {
- if let Some(ref qself) = sym.qself {
+ if let Some(qself) = &sym.qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(&sym.path, sym.id);
@@ -769,18 +759,18 @@ pub fn walk_inline_asm_sym<'a, V: Visitor<'a>>(visitor: &mut V, sym: &'a InlineA
pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
walk_list!(visitor, visit_attribute, expression.attrs.iter());
- match expression.kind {
- ExprKind::Box(ref subexpression) => visitor.visit_expr(subexpression),
- ExprKind::Array(ref subexpressions) => {
+ match &expression.kind {
+ ExprKind::Box(subexpression) => visitor.visit_expr(subexpression),
+ ExprKind::Array(subexpressions) => {
walk_list!(visitor, visit_expr, subexpressions);
}
- ExprKind::ConstBlock(ref anon_const) => visitor.visit_anon_const(anon_const),
- ExprKind::Repeat(ref element, ref count) => {
+ ExprKind::ConstBlock(anon_const) => visitor.visit_anon_const(anon_const),
+ ExprKind::Repeat(element, count) => {
visitor.visit_expr(element);
visitor.visit_anon_const(count)
}
- ExprKind::Struct(ref se) => {
- if let Some(ref qself) = se.qself {
+ ExprKind::Struct(se) => {
+ if let Some(qself) = &se.qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(&se.path, expression.id);
@@ -791,117 +781,126 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
StructRest::None => {}
}
}
- ExprKind::Tup(ref subexpressions) => {
+ ExprKind::Tup(subexpressions) => {
walk_list!(visitor, visit_expr, subexpressions);
}
- ExprKind::Call(ref callee_expression, ref arguments) => {
+ ExprKind::Call(callee_expression, arguments) => {
visitor.visit_expr(callee_expression);
walk_list!(visitor, visit_expr, arguments);
}
- ExprKind::MethodCall(ref segment, ref receiver, ref arguments, _span) => {
- visitor.visit_path_segment(segment);
+ ExprKind::MethodCall(box MethodCall { seg, receiver, args, span: _ }) => {
+ visitor.visit_path_segment(seg);
visitor.visit_expr(receiver);
- walk_list!(visitor, visit_expr, arguments);
+ walk_list!(visitor, visit_expr, args);
}
- ExprKind::Binary(_, ref left_expression, ref right_expression) => {
+ ExprKind::Binary(_, left_expression, right_expression) => {
visitor.visit_expr(left_expression);
visitor.visit_expr(right_expression)
}
- ExprKind::AddrOf(_, _, ref subexpression) | ExprKind::Unary(_, ref subexpression) => {
+ ExprKind::AddrOf(_, _, subexpression) | ExprKind::Unary(_, subexpression) => {
visitor.visit_expr(subexpression)
}
- ExprKind::Cast(ref subexpression, ref typ) | ExprKind::Type(ref subexpression, ref typ) => {
+ ExprKind::Cast(subexpression, typ) | ExprKind::Type(subexpression, typ) => {
visitor.visit_expr(subexpression);
visitor.visit_ty(typ)
}
- ExprKind::Let(ref pat, ref expr, _) => {
+ ExprKind::Let(pat, expr, _) => {
visitor.visit_pat(pat);
visitor.visit_expr(expr);
}
- ExprKind::If(ref head_expression, ref if_block, ref optional_else) => {
+ ExprKind::If(head_expression, if_block, optional_else) => {
visitor.visit_expr(head_expression);
visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else);
}
- ExprKind::While(ref subexpression, ref block, ref opt_label) => {
+ ExprKind::While(subexpression, block, opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
}
- ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_label) => {
+ ExprKind::ForLoop(pattern, subexpression, block, opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern);
visitor.visit_expr(subexpression);
visitor.visit_block(block);
}
- ExprKind::Loop(ref block, ref opt_label) => {
+ ExprKind::Loop(block, opt_label, _) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
}
- ExprKind::Match(ref subexpression, ref arms) => {
+ ExprKind::Match(subexpression, arms) => {
visitor.visit_expr(subexpression);
walk_list!(visitor, visit_arm, arms);
}
- ExprKind::Closure(ref binder, _, _, _, ref decl, ref body, _decl_span) => {
- visitor.visit_fn(FnKind::Closure(binder, decl, body), expression.span, expression.id)
+ ExprKind::Closure(box Closure {
+ binder,
+ capture_clause: _,
+ asyncness: _,
+ movability: _,
+ fn_decl,
+ body,
+ fn_decl_span: _,
+ fn_arg_span: _,
+ }) => {
+ visitor.visit_fn(FnKind::Closure(binder, fn_decl, body), expression.span, expression.id)
}
- ExprKind::Block(ref block, ref opt_label) => {
+ ExprKind::Block(block, opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block);
}
- ExprKind::Async(_, _, ref body) => {
+ ExprKind::Async(_, _, body) => {
visitor.visit_block(body);
}
- ExprKind::Await(ref expr) => visitor.visit_expr(expr),
- ExprKind::Assign(ref lhs, ref rhs, _) => {
+ ExprKind::Await(expr) => visitor.visit_expr(expr),
+ ExprKind::Assign(lhs, rhs, _) => {
visitor.visit_expr(lhs);
visitor.visit_expr(rhs);
}
- ExprKind::AssignOp(_, ref left_expression, ref right_expression) => {
+ ExprKind::AssignOp(_, left_expression, right_expression) => {
visitor.visit_expr(left_expression);
visitor.visit_expr(right_expression);
}
- ExprKind::Field(ref subexpression, ident) => {
+ ExprKind::Field(subexpression, ident) => {
visitor.visit_expr(subexpression);
- visitor.visit_ident(ident);
+ visitor.visit_ident(*ident);
}
- ExprKind::Index(ref main_expression, ref index_expression) => {
+ ExprKind::Index(main_expression, index_expression) => {
visitor.visit_expr(main_expression);
visitor.visit_expr(index_expression)
}
- ExprKind::Range(ref start, ref end, _) => {
+ ExprKind::Range(start, end, _) => {
walk_list!(visitor, visit_expr, start);
walk_list!(visitor, visit_expr, end);
}
ExprKind::Underscore => {}
- ExprKind::Path(ref maybe_qself, ref path) => {
- if let Some(ref qself) = *maybe_qself {
+ ExprKind::Path(maybe_qself, path) => {
+ if let Some(qself) = maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path, expression.id)
}
- ExprKind::Break(ref opt_label, ref opt_expr) => {
+ ExprKind::Break(opt_label, opt_expr) => {
walk_list!(visitor, visit_label, opt_label);
walk_list!(visitor, visit_expr, opt_expr);
}
- ExprKind::Continue(ref opt_label) => {
+ ExprKind::Continue(opt_label) => {
walk_list!(visitor, visit_label, opt_label);
}
- ExprKind::Ret(ref optional_expression) => {
+ ExprKind::Ret(optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
}
- ExprKind::Yeet(ref optional_expression) => {
+ ExprKind::Yeet(optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
}
- ExprKind::MacCall(ref mac) => visitor.visit_mac_call(mac),
- ExprKind::Paren(ref subexpression) => visitor.visit_expr(subexpression),
- ExprKind::InlineAsm(ref asm) => visitor.visit_inline_asm(asm),
- ExprKind::Yield(ref optional_expression) => {
+ ExprKind::MacCall(mac) => visitor.visit_mac_call(mac),
+ ExprKind::Paren(subexpression) => visitor.visit_expr(subexpression),
+ ExprKind::InlineAsm(asm) => visitor.visit_inline_asm(asm),
+ ExprKind::Yield(optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression);
}
- ExprKind::Try(ref subexpression) => visitor.visit_expr(subexpression),
- ExprKind::TryBlock(ref body) => visitor.visit_block(body),
- ExprKind::Lit(_) | ExprKind::Err => {}
+ ExprKind::Try(subexpression) => visitor.visit_expr(subexpression),
+ ExprKind::TryBlock(body) => visitor.visit_block(body),
+ ExprKind::Lit(_) | ExprKind::IncludedBytes(..) | ExprKind::Err => {}
}
visitor.visit_expr_post(expression)
@@ -927,18 +926,18 @@ pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) {
}
pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) {
- match attr.kind {
- AttrKind::Normal(ref normal) => walk_mac_args(visitor, &normal.item.args),
+ match &attr.kind {
+ AttrKind::Normal(normal) => walk_attr_args(visitor, &normal.item.args),
AttrKind::DocComment(..) => {}
}
}
-pub fn walk_mac_args<'a, V: Visitor<'a>>(visitor: &mut V, args: &'a MacArgs) {
+pub fn walk_attr_args<'a, V: Visitor<'a>>(visitor: &mut V, args: &'a AttrArgs) {
match args {
- MacArgs::Empty => {}
- MacArgs::Delimited(_dspan, _delim, _tokens) => {}
- MacArgs::Eq(_eq_span, MacArgsEq::Ast(expr)) => visitor.visit_expr(expr),
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => {
+ AttrArgs::Empty => {}
+ AttrArgs::Delimited(_) => {}
+ AttrArgs::Eq(_eq_span, AttrArgsEq::Ast(expr)) => visitor.visit_expr(expr),
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => {
unreachable!("in literal form when walking mac args eq: {:?}", lit)
}
}
diff --git a/compiler/rustc_ast_lowering/Cargo.toml b/compiler/rustc_ast_lowering/Cargo.toml
index ce1c8d499..6a59b9e61 100644
--- a/compiler/rustc_ast_lowering/Cargo.toml
+++ b/compiler/rustc_ast_lowering/Cargo.toml
@@ -21,5 +21,5 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
diff --git a/compiler/rustc_ast_lowering/src/asm.rs b/compiler/rustc_ast_lowering/src/asm.rs
index 450cdf246..dfef6ec70 100644
--- a/compiler/rustc_ast_lowering/src/asm.rs
+++ b/compiler/rustc_ast_lowering/src/asm.rs
@@ -11,7 +11,7 @@ use super::LoweringContext;
use rustc_ast::ptr::P;
use rustc_ast::*;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::definitions::DefPathData;
@@ -71,7 +71,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
.emit();
}
- let mut clobber_abis = FxHashMap::default();
+ let mut clobber_abis = FxIndexMap::default();
if let Some(asm_arch) = asm_arch {
for (abi_name, abi_span) in &asm.clobber_abis {
match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) {
@@ -123,7 +123,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
.operands
.iter()
.map(|(op, op_sp)| {
- let lower_reg = |reg| match reg {
+ let lower_reg = |&reg: &_| match reg {
InlineAsmRegOrRegClass::Reg(reg) => {
asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
@@ -152,32 +152,30 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
};
- let op = match *op {
- InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
+ let op = match op {
+ InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
reg: lower_reg(reg),
expr: self.lower_expr(expr),
},
- InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
+ InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
reg: lower_reg(reg),
- late,
+ late: *late,
expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
},
- InlineAsmOperand::InOut { reg, late, ref expr } => {
- hir::InlineAsmOperand::InOut {
- reg: lower_reg(reg),
- late,
- expr: self.lower_expr(expr),
- }
- }
- InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
+ InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
+ reg: lower_reg(reg),
+ late: *late,
+ expr: self.lower_expr(expr),
+ },
+ InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
hir::InlineAsmOperand::SplitInOut {
reg: lower_reg(reg),
- late,
+ late: *late,
in_expr: self.lower_expr(in_expr),
out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
}
}
- InlineAsmOperand::Const { ref anon_const } => {
+ InlineAsmOperand::Const { anon_const } => {
if !self.tcx.features().asm_const {
feature_err(
&sess.parse_sess,
@@ -191,7 +189,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
anon_const: self.lower_anon_const(anon_const),
}
}
- InlineAsmOperand::Sym { ref sym } => {
+ InlineAsmOperand::Sym { sym } => {
let static_def_id = self
.resolver
.get_partial_res(sym.id)
@@ -224,7 +222,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// Wrap the expression in an AnonConst.
let parent_def_id = self.current_hir_id_owner;
let node_id = self.next_node_id();
- self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst);
+ self.create_def(
+ parent_def_id.def_id,
+ node_id,
+ DefPathData::AnonConst,
+ *op_sp,
+ );
let anon_const = AnonConst { id: node_id, value: P(expr) };
hir::InlineAsmOperand::SymFn {
anon_const: self.lower_anon_const(&anon_const),
@@ -347,7 +350,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
skip = true;
let idx2 = *o.get();
- let &(ref op2, op_sp2) = &operands[idx2];
+ let (ref op2, op_sp2) = operands[idx2];
let Some(asm::InlineAsmRegOrRegClass::Reg(reg2)) = op2.reg() else {
unreachable!();
};
diff --git a/compiler/rustc_ast_lowering/src/block.rs b/compiler/rustc_ast_lowering/src/block.rs
index 12a0cc0d2..d310f72f7 100644
--- a/compiler/rustc_ast_lowering/src/block.rs
+++ b/compiler/rustc_ast_lowering/src/block.rs
@@ -31,8 +31,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let mut stmts = SmallVec::<[hir::Stmt<'hir>; 8]>::new();
let mut expr = None;
while let [s, tail @ ..] = ast_stmts {
- match s.kind {
- StmtKind::Local(ref local) => {
+ match &s.kind {
+ StmtKind::Local(local) => {
let hir_id = self.lower_node_id(s.id);
let local = self.lower_local(local);
self.alias_attrs(hir_id, local.hir_id);
@@ -40,7 +40,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let span = self.lower_span(s.span);
stmts.push(hir::Stmt { hir_id, kind, span });
}
- StmtKind::Item(ref it) => {
+ StmtKind::Item(it) => {
stmts.extend(self.lower_item_ref(it).into_iter().enumerate().map(
|(i, item_id)| {
let hir_id = match i {
@@ -53,7 +53,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
},
));
}
- StmtKind::Expr(ref e) => {
+ StmtKind::Expr(e) => {
let e = self.lower_expr(e);
if tail.is_empty() {
expr = Some(e);
@@ -65,7 +65,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
stmts.push(hir::Stmt { hir_id, kind, span });
}
}
- StmtKind::Semi(ref e) => {
+ StmtKind::Semi(e) => {
let e = self.lower_expr(e);
let hir_id = self.lower_node_id(s.id);
self.alias_attrs(hir_id, e.hir_id);
diff --git a/compiler/rustc_ast_lowering/src/errors.rs b/compiler/rustc_ast_lowering/src/errors.rs
index 157f46501..21c6a2d26 100644
--- a/compiler/rustc_ast_lowering/src/errors.rs
+++ b/compiler/rustc_ast_lowering/src/errors.rs
@@ -277,8 +277,9 @@ pub struct RegisterConflict<'a> {
pub struct SubTupleBinding<'a> {
#[primary_span]
#[label]
- #[suggestion_verbose(
+ #[suggestion(
ast_lowering_sub_tuple_binding_suggestion,
+ style = "verbose",
code = "..",
applicability = "maybe-incorrect"
)]
diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs
index ec9c39350..7dd6d5acc 100644
--- a/compiler/rustc_ast_lowering/src/expr.rs
+++ b/compiler/rustc_ast_lowering/src/expr.rs
@@ -14,6 +14,7 @@ use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::definitions::DefPathData;
+use rustc_session::errors::report_lit_error;
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
use rustc_span::symbol::{sym, Ident};
use rustc_span::DUMMY_SP;
@@ -30,20 +31,20 @@ impl<'hir> LoweringContext<'_, 'hir> {
pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
ensure_sufficient_stack(|| {
- let kind = match e.kind {
- ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
- ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
- ExprKind::ConstBlock(ref anon_const) => {
+ let kind = match &e.kind {
+ ExprKind::Box(inner) => hir::ExprKind::Box(self.lower_expr(inner)),
+ ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
+ ExprKind::ConstBlock(anon_const) => {
let anon_const = self.lower_anon_const(anon_const);
hir::ExprKind::ConstBlock(anon_const)
}
- ExprKind::Repeat(ref expr, ref count) => {
+ ExprKind::Repeat(expr, count) => {
let expr = self.lower_expr(expr);
let count = self.lower_array_length(count);
hir::ExprKind::Repeat(expr, count)
}
- ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
- ExprKind::Call(ref f, ref args) => {
+ ExprKind::Tup(elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
+ ExprKind::Call(f, args) => {
if e.attrs.get(0).map_or(false, |a| a.has_name(sym::rustc_box)) {
if let [inner] = &args[..] && e.attrs.len() == 1 {
let kind = hir::ExprKind::Box(self.lower_expr(&inner));
@@ -60,7 +61,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::ExprKind::Call(f, self.lower_exprs(args))
}
}
- ExprKind::MethodCall(ref seg, ref receiver, ref args, span) => {
+ ExprKind::MethodCall(box MethodCall { seg, receiver, args, span }) => {
let hir_seg = self.arena.alloc(self.lower_path_segment(
e.span,
seg,
@@ -71,81 +72,89 @@ impl<'hir> LoweringContext<'_, 'hir> {
let receiver = self.lower_expr(receiver);
let args =
self.arena.alloc_from_iter(args.iter().map(|x| self.lower_expr_mut(x)));
- hir::ExprKind::MethodCall(hir_seg, receiver, args, self.lower_span(span))
+ hir::ExprKind::MethodCall(hir_seg, receiver, args, self.lower_span(*span))
}
- ExprKind::Binary(binop, ref lhs, ref rhs) => {
- let binop = self.lower_binop(binop);
+ ExprKind::Binary(binop, lhs, rhs) => {
+ let binop = self.lower_binop(*binop);
let lhs = self.lower_expr(lhs);
let rhs = self.lower_expr(rhs);
hir::ExprKind::Binary(binop, lhs, rhs)
}
- ExprKind::Unary(op, ref ohs) => {
- let op = self.lower_unop(op);
+ ExprKind::Unary(op, ohs) => {
+ let op = self.lower_unop(*op);
let ohs = self.lower_expr(ohs);
hir::ExprKind::Unary(op, ohs)
}
- ExprKind::Lit(ref l) => {
- hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone()))
+ ExprKind::Lit(token_lit) => {
+ let lit_kind = match LitKind::from_token_lit(*token_lit) {
+ Ok(lit_kind) => lit_kind,
+ Err(err) => {
+ report_lit_error(&self.tcx.sess.parse_sess, err, *token_lit, e.span);
+ LitKind::Err
+ }
+ };
+ hir::ExprKind::Lit(respan(self.lower_span(e.span), lit_kind))
}
- ExprKind::Cast(ref expr, ref ty) => {
+ ExprKind::IncludedBytes(bytes) => hir::ExprKind::Lit(respan(
+ self.lower_span(e.span),
+ LitKind::ByteStr(bytes.clone()),
+ )),
+ ExprKind::Cast(expr, ty) => {
let expr = self.lower_expr(expr);
let ty =
self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
hir::ExprKind::Cast(expr, ty)
}
- ExprKind::Type(ref expr, ref ty) => {
+ ExprKind::Type(expr, ty) => {
let expr = self.lower_expr(expr);
let ty =
self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
hir::ExprKind::Type(expr, ty)
}
- ExprKind::AddrOf(k, m, ref ohs) => {
+ ExprKind::AddrOf(k, m, ohs) => {
let ohs = self.lower_expr(ohs);
- hir::ExprKind::AddrOf(k, m, ohs)
+ hir::ExprKind::AddrOf(*k, *m, ohs)
}
- ExprKind::Let(ref pat, ref scrutinee, span) => {
+ ExprKind::Let(pat, scrutinee, span) => {
hir::ExprKind::Let(self.arena.alloc(hir::Let {
hir_id: self.next_id(),
- span: self.lower_span(span),
+ span: self.lower_span(*span),
pat: self.lower_pat(pat),
ty: None,
init: self.lower_expr(scrutinee),
}))
}
- ExprKind::If(ref cond, ref then, ref else_opt) => {
+ ExprKind::If(cond, then, else_opt) => {
self.lower_expr_if(cond, then, else_opt.as_deref())
}
- ExprKind::While(ref cond, ref body, opt_label) => {
- self.with_loop_scope(e.id, |this| {
- let span =
- this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
- this.lower_expr_while_in_loop_scope(span, cond, body, opt_label)
- })
- }
- ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
+ ExprKind::While(cond, body, opt_label) => self.with_loop_scope(e.id, |this| {
+ let span = this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
+ this.lower_expr_while_in_loop_scope(span, cond, body, *opt_label)
+ }),
+ ExprKind::Loop(body, opt_label, span) => self.with_loop_scope(e.id, |this| {
hir::ExprKind::Loop(
this.lower_block(body, false),
- this.lower_label(opt_label),
+ this.lower_label(*opt_label),
hir::LoopSource::Loop,
- DUMMY_SP,
+ this.lower_span(*span),
)
}),
- ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
- ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
+ ExprKind::TryBlock(body) => self.lower_expr_try_block(body),
+ ExprKind::Match(expr, arms) => hir::ExprKind::Match(
self.lower_expr(expr),
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
hir::MatchSource::Normal,
),
- ExprKind::Async(capture_clause, closure_node_id, ref block) => self
- .make_async_expr(
- capture_clause,
- closure_node_id,
- None,
- block.span,
- hir::AsyncGeneratorKind::Block,
- |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
- ),
- ExprKind::Await(ref expr) => {
+ ExprKind::Async(capture_clause, closure_node_id, block) => self.make_async_expr(
+ *capture_clause,
+ None,
+ *closure_node_id,
+ None,
+ e.span,
+ hir::AsyncGeneratorKind::Block,
+ |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
+ ),
+ ExprKind::Await(expr) => {
let dot_await_span = if expr.span.hi() < e.span.hi() {
let span_with_whitespace = self
.tcx
@@ -160,66 +169,67 @@ impl<'hir> LoweringContext<'_, 'hir> {
};
self.lower_expr_await(dot_await_span, expr)
}
- ExprKind::Closure(
- ref binder,
+ ExprKind::Closure(box Closure {
+ binder,
capture_clause,
asyncness,
movability,
- ref decl,
- ref body,
+ fn_decl,
+ body,
fn_decl_span,
- ) => {
+ fn_arg_span,
+ }) => {
if let Async::Yes { closure_id, .. } = asyncness {
self.lower_expr_async_closure(
binder,
- capture_clause,
+ *capture_clause,
e.id,
- closure_id,
- decl,
+ *closure_id,
+ fn_decl,
body,
- fn_decl_span,
+ *fn_decl_span,
+ *fn_arg_span,
)
} else {
self.lower_expr_closure(
binder,
- capture_clause,
+ *capture_clause,
e.id,
- movability,
- decl,
+ *movability,
+ fn_decl,
body,
- fn_decl_span,
+ *fn_decl_span,
+ *fn_arg_span,
)
}
}
- ExprKind::Block(ref blk, opt_label) => {
- let opt_label = self.lower_label(opt_label);
+ ExprKind::Block(blk, opt_label) => {
+ let opt_label = self.lower_label(*opt_label);
hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
}
- ExprKind::Assign(ref el, ref er, span) => {
- self.lower_expr_assign(el, er, span, e.span)
- }
- ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
- self.lower_binop(op),
+ ExprKind::Assign(el, er, span) => self.lower_expr_assign(el, er, *span, e.span),
+ ExprKind::AssignOp(op, el, er) => hir::ExprKind::AssignOp(
+ self.lower_binop(*op),
self.lower_expr(el),
self.lower_expr(er),
),
- ExprKind::Field(ref el, ident) => {
- hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(ident))
+ ExprKind::Field(el, ident) => {
+ hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(*ident))
}
- ExprKind::Index(ref el, ref er) => {
+ ExprKind::Index(el, er) => {
hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
}
- ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
+ ExprKind::Range(Some(e1), Some(e2), RangeLimits::Closed) => {
self.lower_expr_range_closed(e.span, e1, e2)
}
- ExprKind::Range(ref e1, ref e2, lims) => {
- self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
+ ExprKind::Range(e1, e2, lims) => {
+ self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), *lims)
}
ExprKind::Underscore => {
self.tcx.sess.emit_err(UnderscoreExprLhsAssign { span: e.span });
hir::ExprKind::Err
}
- ExprKind::Path(ref qself, ref path) => {
+ ExprKind::Path(qself, path) => {
let qpath = self.lower_qpath(
e.id,
qself,
@@ -229,22 +239,22 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
hir::ExprKind::Path(qpath)
}
- ExprKind::Break(opt_label, ref opt_expr) => {
+ ExprKind::Break(opt_label, opt_expr) => {
let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
- hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
+ hir::ExprKind::Break(self.lower_jump_destination(e.id, *opt_label), opt_expr)
}
ExprKind::Continue(opt_label) => {
- hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
+ hir::ExprKind::Continue(self.lower_jump_destination(e.id, *opt_label))
}
- ExprKind::Ret(ref e) => {
+ ExprKind::Ret(e) => {
let e = e.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Ret(e)
}
- ExprKind::Yeet(ref sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
- ExprKind::InlineAsm(ref asm) => {
+ ExprKind::Yeet(sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
+ ExprKind::InlineAsm(asm) => {
hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
}
- ExprKind::Struct(ref se) => {
+ ExprKind::Struct(se) => {
let rest = match &se.rest {
StructRest::Base(e) => Some(self.lower_expr(e)),
StructRest::Rest(sp) => {
@@ -266,10 +276,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
rest,
)
}
- ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
+ ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
ExprKind::Err => hir::ExprKind::Err,
- ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
- ExprKind::Paren(ref ex) => {
+ ExprKind::Try(sub_expr) => self.lower_expr_try(e.span, sub_expr),
+ ExprKind::Paren(ex) => {
let mut ex = self.lower_expr_mut(ex);
// Include parens in span, but only if it is a super-span.
if e.span.contains(ex.span) {
@@ -294,8 +304,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
// Desugar `ExprForLoop`
// from: `[opt_ident]: for <pat> in <head> <body>`
- ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
- return self.lower_expr_for(e, pat, head, body, opt_label);
+ ExprKind::ForLoop(pat, head, body, opt_label) => {
+ return self.lower_expr_for(e, pat, head, body, *opt_label);
}
ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
};
@@ -346,7 +356,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
args: Vec<AstP<Expr>>,
legacy_args_idx: &[usize],
) -> hir::ExprKind<'hir> {
- let ExprKind::Path(None, ref mut path) = f.kind else {
+ let ExprKind::Path(None, path) = &mut f.kind else {
unreachable!();
};
@@ -359,7 +369,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let node_id = self.next_node_id();
// Add a definition for the in-band const def.
- self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst);
+ self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst, f.span);
let anon_const = AnonConst { id: node_id, value: arg };
generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
@@ -426,18 +436,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
let lhs = self.lower_cond(lhs);
let rhs = self.lower_cond(rhs);
- self.arena.alloc(self.expr(
- cond.span,
- hir::ExprKind::Binary(op, lhs, rhs),
- AttrVec::new(),
- ))
+ self.arena.alloc(self.expr(cond.span, hir::ExprKind::Binary(op, lhs, rhs)))
}
ExprKind::Let(..) => self.lower_expr(cond),
_ => {
let cond = self.lower_expr(cond);
let reason = DesugaringKind::CondTemporary;
let span_block = self.mark_span_with_reason(reason, cond.span, None);
- self.expr_drop_temps(span_block, cond, AttrVec::new())
+ self.expr_drop_temps(span_block, cond)
}
}
}
@@ -467,12 +473,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
) -> hir::ExprKind<'hir> {
let lowered_cond = self.with_loop_condition_scope(|t| t.lower_cond(cond));
let then = self.lower_block_expr(body);
- let expr_break = self.expr_break(span, AttrVec::new());
+ let expr_break = self.expr_break(span);
let stmt_break = self.stmt_expr(span, expr_break);
let else_blk = self.block_all(span, arena_vec![self; stmt_break], None);
- let else_expr = self.arena.alloc(self.expr_block(else_blk, AttrVec::new()));
+ let else_expr = self.arena.alloc(self.expr_block(else_blk));
let if_kind = hir::ExprKind::If(lowered_cond, self.arena.alloc(then), Some(else_expr));
- let if_expr = self.expr(span, if_kind, AttrVec::new());
+ let if_expr = self.expr(span, if_kind);
let block = self.block_expr(self.arena.alloc(if_expr));
let span = self.lower_span(span.with_hi(cond.span.hi()));
let opt_label = self.lower_label(opt_label);
@@ -528,22 +534,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
expr: &'hir hir::Expr<'hir>,
overall_span: Span,
) -> &'hir hir::Expr<'hir> {
- let constructor = self.arena.alloc(self.expr_lang_item_path(
- method_span,
- lang_item,
- AttrVec::new(),
- None,
- ));
+ let constructor = self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, None));
self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
}
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
let pat = self.lower_pat(&arm.pat);
let guard = arm.guard.as_ref().map(|cond| {
- if let ExprKind::Let(ref pat, ref scrutinee, span) = cond.kind {
+ if let ExprKind::Let(pat, scrutinee, span) = &cond.kind {
hir::Guard::IfLet(self.arena.alloc(hir::Let {
hir_id: self.next_id(),
- span: self.lower_span(span),
+ span: self.lower_span(*span),
pat: self.lower_pat(pat),
ty: None,
init: self.lower_expr(scrutinee),
@@ -563,37 +564,35 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}
- /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
+ /// Lower an `async` construct to a generator that implements `Future`.
///
/// This results in:
///
/// ```text
- /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
+ /// std::future::identity_future(static move? |_task_context| -> <ret_ty> {
/// <body>
/// })
/// ```
pub(super) fn make_async_expr(
&mut self,
capture_clause: CaptureBy,
+ outer_hir_id: Option<hir::HirId>,
closure_node_id: NodeId,
- ret_ty: Option<AstP<Ty>>,
+ ret_ty: Option<hir::FnRetTy<'hir>>,
span: Span,
async_gen_kind: hir::AsyncGeneratorKind,
body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
) -> hir::ExprKind<'hir> {
- let output = match ret_ty {
- Some(ty) => hir::FnRetTy::Return(
- self.lower_ty(&ty, &ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock)),
- ),
- None => hir::FnRetTy::DefaultReturn(self.lower_span(span)),
- };
+ let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
- // Resume argument type. We let the compiler infer this to simplify the lowering. It is
- // fully constrained by `future::from_generator`.
+ // Resume argument type: `ResumeTy`
+ let unstable_span =
+ self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
+ let resume_ty = hir::QPath::LangItem(hir::LangItem::ResumeTy, unstable_span, None);
let input_ty = hir::Ty {
hir_id: self.next_id(),
- kind: hir::TyKind::Infer,
- span: self.lower_span(span),
+ kind: hir::TyKind::Path(resume_ty),
+ span: unstable_span,
};
// The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
@@ -602,6 +601,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
output,
c_variadic: false,
implicit_self: hir::ImplicitSelfKind::None,
+ lifetime_elision_allowed: false,
});
// Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
@@ -631,35 +631,63 @@ impl<'hir> LoweringContext<'_, 'hir> {
// `static |_task_context| -> <ret_ty> { body }`:
let generator_kind = {
let c = self.arena.alloc(hir::Closure {
+ def_id: self.local_def_id(closure_node_id),
binder: hir::ClosureBinder::Default,
capture_clause,
bound_generic_params: &[],
fn_decl,
body,
fn_decl_span: self.lower_span(span),
+ fn_arg_span: None,
movability: Some(hir::Movability::Static),
});
hir::ExprKind::Closure(c)
};
- let generator = hir::Expr {
- hir_id: self.lower_node_id(closure_node_id),
- kind: generator_kind,
- span: self.lower_span(span),
- };
- // `future::from_generator`:
+ let hir_id = self.lower_node_id(closure_node_id);
let unstable_span =
self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
- let gen_future = self.expr_lang_item_path(
- unstable_span,
- hir::LangItem::FromGenerator,
- AttrVec::new(),
- None,
- );
- // `future::from_generator(generator)`:
- hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
+ if self.tcx.features().closure_track_caller
+ && let Some(outer_hir_id) = outer_hir_id
+ && let Some(attrs) = self.attrs.get(&outer_hir_id.local_id)
+ && attrs.into_iter().any(|attr| attr.has_name(sym::track_caller))
+ {
+ self.lower_attrs(
+ hir_id,
+ &[Attribute {
+ kind: AttrKind::Normal(ptr::P(NormalAttr {
+ item: AttrItem {
+ path: Path::from_ident(Ident::new(sym::track_caller, span)),
+ args: AttrArgs::Empty,
+ tokens: None,
+ },
+ tokens: None,
+ })),
+ id: self.tcx.sess.parse_sess.attr_id_generator.mk_attr_id(),
+ style: AttrStyle::Outer,
+ span: unstable_span,
+ }],
+ );
+ }
+
+ let generator = hir::Expr { hir_id, kind: generator_kind, span: self.lower_span(span) };
+
+ // FIXME(swatinem):
+ // For some reason, the async block needs to flow through *any*
+ // call (like the identity function), as otherwise type and lifetime
+ // inference have a hard time figuring things out.
+ // Without this, we would get:
+ // E0720 in src/test/ui/impl-trait/in-trait/default-body-with-rpit.rs
+ // E0700 in src/test/ui/self/self_lifetime-async.rs
+
+ // `future::identity_future`:
+ let identity_future =
+ self.expr_lang_item_path(unstable_span, hir::LangItem::IdentityFuture, None);
+
+ // `future::identity_future(generator)`:
+ hir::ExprKind::Call(self.arena.alloc(identity_future), arena_vec![self; generator])
}
/// Desugar `<expr>.await` into:
@@ -759,7 +787,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let break_x = self.with_loop_scope(loop_node_id, move |this| {
let expr_break =
hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
- this.arena.alloc(this.expr(gen_future_span, expr_break, AttrVec::new()))
+ this.arena.alloc(this.expr(gen_future_span, expr_break))
});
self.arm(ready_pat, break_x)
};
@@ -792,17 +820,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
let yield_expr = self.expr(
span,
hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr_hir_id) }),
- AttrVec::new(),
);
let yield_expr = self.arena.alloc(yield_expr);
if let Some(task_context_hid) = self.task_context {
let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
- let assign = self.expr(
- span,
- hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)),
- AttrVec::new(),
- );
+ let assign =
+ self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)));
self.stmt_expr(span, assign)
} else {
// Use of `await` outside of an async context. Return `yield_expr` so that we can
@@ -860,6 +884,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
decl: &FnDecl,
body: &Expr,
fn_decl_span: Span,
+ fn_arg_span: Span,
) -> hir::ExprKind<'hir> {
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
@@ -880,15 +905,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
// Lower outside new scope to preserve `is_in_loop_condition`.
- let fn_decl = self.lower_fn_decl(decl, None, fn_decl_span, FnDeclKind::Closure, None);
+ let fn_decl = self.lower_fn_decl(decl, closure_id, fn_decl_span, FnDeclKind::Closure, None);
let c = self.arena.alloc(hir::Closure {
+ def_id: self.local_def_id(closure_id),
binder: binder_clause,
capture_clause,
bound_generic_params,
fn_decl,
body: body_id,
fn_decl_span: self.lower_span(fn_decl_span),
+ fn_arg_span: Some(self.lower_span(fn_arg_span)),
movability: generator_option,
});
@@ -927,8 +954,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
) -> (hir::ClosureBinder, &'c [GenericParam]) {
let (binder, params) = match binder {
ClosureBinder::NotPresent => (hir::ClosureBinder::Default, &[][..]),
- &ClosureBinder::For { span, ref generic_params } => {
- let span = self.lower_span(span);
+ ClosureBinder::For { span, generic_params } => {
+ let span = self.lower_span(*span);
(hir::ClosureBinder::For { span }, &**generic_params)
}
};
@@ -945,6 +972,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
decl: &FnDecl,
body: &Expr,
fn_decl_span: Span,
+ fn_arg_span: Span,
) -> hir::ExprKind<'hir> {
if let &ClosureBinder::For { span, .. } = binder {
self.tcx.sess.emit_err(NotSupportedForLifetimeBinderAsyncClosure { span });
@@ -962,19 +990,27 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
// Transform `async |x: u8| -> X { ... }` into
- // `|x: u8| future_from_generator(|| -> X { ... })`.
+ // `|x: u8| identity_future(|| -> X { ... })`.
let body_id = this.lower_fn_body(&outer_decl, |this| {
- let async_ret_ty =
- if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
+ let async_ret_ty = if let FnRetTy::Ty(ty) = &decl.output {
+ let itctx = ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock);
+ Some(hir::FnRetTy::Return(this.lower_ty(&ty, &itctx)))
+ } else {
+ None
+ };
+
let async_body = this.make_async_expr(
capture_clause,
+ // FIXME(nbdd0121): This should also use a proper HIR id so `#[track_caller]`
+ // can be applied on async closures as well.
+ None,
inner_closure_id,
async_ret_ty,
body.span,
hir::AsyncGeneratorKind::Closure,
|this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
);
- this.expr(fn_decl_span, async_body, AttrVec::new())
+ this.expr(fn_decl_span, async_body)
});
body_id
});
@@ -984,15 +1020,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
// have to conserve the state of being inside a loop condition for the
// closure argument types.
let fn_decl =
- self.lower_fn_decl(&outer_decl, None, fn_decl_span, FnDeclKind::Closure, None);
+ self.lower_fn_decl(&outer_decl, closure_id, fn_decl_span, FnDeclKind::Closure, None);
let c = self.arena.alloc(hir::Closure {
+ def_id: self.local_def_id(closure_id),
binder: binder_clause,
capture_clause,
bound_generic_params,
fn_decl,
body,
fn_decl_span: self.lower_span(fn_decl_span),
+ fn_arg_span: Some(self.lower_span(fn_arg_span)),
movability: None,
});
hir::ExprKind::Closure(c)
@@ -1065,7 +1103,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
fn extract_tuple_struct_path<'a>(
&mut self,
expr: &'a Expr,
- ) -> Option<(&'a Option<QSelf>, &'a Path)> {
+ ) -> Option<(&'a Option<AstP<QSelf>>, &'a Path)> {
if let ExprKind::Path(qself, path) = &expr.kind {
// Does the path resolve to something disallowed in a tuple struct/variant pattern?
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
@@ -1085,7 +1123,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
fn extract_unit_struct_path<'a>(
&mut self,
expr: &'a Expr,
- ) -> Option<(&'a Option<QSelf>, &'a Path)> {
+ ) -> Option<(&'a Option<AstP<QSelf>>, &'a Path)> {
if let ExprKind::Path(qself, path) = &expr.kind {
// Does the path resolve to something disallowed in a unit struct/variant pattern?
if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
@@ -1232,7 +1270,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let ident = self.expr_ident(lhs.span, ident, binding);
let assign =
hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span));
- let expr = self.expr(lhs.span, assign, AttrVec::new());
+ let expr = self.expr(lhs.span, assign);
assignments.push(self.stmt_expr(lhs.span, expr));
pat
}
@@ -1273,8 +1311,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let e2 = self.lower_expr_mut(e2);
let fn_path =
hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, self.lower_span(span), None);
- let fn_expr =
- self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), AttrVec::new()));
+ let fn_expr = self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path)));
hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
}
@@ -1446,8 +1483,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// `None => break`
let none_arm = {
- let break_expr =
- self.with_loop_scope(e.id, |this| this.expr_break_alloc(for_span, AttrVec::new()));
+ let break_expr = self.with_loop_scope(e.id, |this| this.expr_break_alloc(for_span));
let pat = self.pat_none(for_span);
self.arm(pat, break_expr)
};
@@ -1456,7 +1492,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let some_arm = {
let some_pat = self.pat_some(pat_span, pat);
let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
- let body_expr = self.arena.alloc(self.expr_block(body_block, AttrVec::new()));
+ let body_expr = self.arena.alloc(self.expr_block(body_block));
self.arm(some_pat, body_expr)
};
@@ -1519,7 +1555,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
// surrounding scope of the `match` since the `match` is not a terminating scope.
//
// Also, add the attributes to the outer returned expr node.
- self.expr_drop_temps_mut(for_span, match_expr, e.attrs.clone())
+ let expr = self.expr_drop_temps_mut(for_span, match_expr);
+ self.lower_attrs(expr.hir_id, &e.attrs);
+ expr
}
/// Desugar `ExprKind::Try` from: `<expr>?` into:
@@ -1561,28 +1599,21 @@ impl<'hir> LoweringContext<'_, 'hir> {
};
// `#[allow(unreachable_code)]`
- let attr = {
- // `allow(unreachable_code)`
- let allow = {
- let allow_ident = Ident::new(sym::allow, self.lower_span(span));
- let uc_ident = Ident::new(sym::unreachable_code, self.lower_span(span));
- let uc_nested = attr::mk_nested_word_item(uc_ident);
- attr::mk_list_item(allow_ident, vec![uc_nested])
- };
- attr::mk_attr_outer(&self.tcx.sess.parse_sess.attr_id_generator, allow)
- };
+ let attr = attr::mk_attr_nested_word(
+ &self.tcx.sess.parse_sess.attr_id_generator,
+ AttrStyle::Outer,
+ sym::allow,
+ sym::unreachable_code,
+ self.lower_span(span),
+ );
let attrs: AttrVec = thin_vec![attr];
// `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`
let continue_arm = {
let val_ident = Ident::with_dummy_span(sym::val);
let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
- let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
- span,
- val_ident,
- val_pat_nid,
- attrs.clone(),
- ));
+ let val_expr = self.expr_ident(span, val_ident, val_pat_nid);
+ self.lower_attrs(val_expr.hir_id, &attrs);
let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
self.arm(continue_pat, val_expr)
};
@@ -1608,15 +1639,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::Destination { label: None, target_id },
Some(from_residual_expr),
),
- attrs,
))
} else {
- self.arena.alloc(self.expr(
- try_span,
- hir::ExprKind::Ret(Some(from_residual_expr)),
- attrs,
- ))
+ self.arena.alloc(self.expr(try_span, hir::ExprKind::Ret(Some(from_residual_expr))))
};
+ self.lower_attrs(ret_expr.hir_id, &attrs);
let break_pat = self.pat_cf_break(try_span, residual_local);
self.arm(break_pat, ret_expr)
@@ -1681,18 +1708,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
&mut self,
span: Span,
expr: &'hir hir::Expr<'hir>,
- attrs: AttrVec,
) -> &'hir hir::Expr<'hir> {
- self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
+ self.arena.alloc(self.expr_drop_temps_mut(span, expr))
}
pub(super) fn expr_drop_temps_mut(
&mut self,
span: Span,
expr: &'hir hir::Expr<'hir>,
- attrs: AttrVec,
) -> hir::Expr<'hir> {
- self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
+ self.expr(span, hir::ExprKind::DropTemps(expr))
}
fn expr_match(
@@ -1702,29 +1727,25 @@ impl<'hir> LoweringContext<'_, 'hir> {
arms: &'hir [hir::Arm<'hir>],
source: hir::MatchSource,
) -> hir::Expr<'hir> {
- self.expr(span, hir::ExprKind::Match(arg, arms, source), AttrVec::new())
+ self.expr(span, hir::ExprKind::Match(arg, arms, source))
}
- fn expr_break(&mut self, span: Span, attrs: AttrVec) -> hir::Expr<'hir> {
+ fn expr_break(&mut self, span: Span) -> hir::Expr<'hir> {
let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
- self.expr(span, expr_break, attrs)
+ self.expr(span, expr_break)
}
- fn expr_break_alloc(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
- let expr_break = self.expr_break(span, attrs);
+ fn expr_break_alloc(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
+ let expr_break = self.expr_break(span);
self.arena.alloc(expr_break)
}
fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
- self.expr(
- span,
- hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
- AttrVec::new(),
- )
+ self.expr(span, hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e))
}
fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
- self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), AttrVec::new()))
+ self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[])))
}
fn expr_call_mut(
@@ -1733,7 +1754,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
e: &'hir hir::Expr<'hir>,
args: &'hir [hir::Expr<'hir>],
) -> hir::Expr<'hir> {
- self.expr(span, hir::ExprKind::Call(e, args), AttrVec::new())
+ self.expr(span, hir::ExprKind::Call(e, args))
}
fn expr_call(
@@ -1752,8 +1773,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
args: &'hir [hir::Expr<'hir>],
hir_id: Option<hir::HirId>,
) -> hir::Expr<'hir> {
- let path =
- self.arena.alloc(self.expr_lang_item_path(span, lang_item, AttrVec::new(), hir_id));
+ let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, hir_id));
self.expr_call_mut(span, path, args)
}
@@ -1771,13 +1791,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
&mut self,
span: Span,
lang_item: hir::LangItem,
- attrs: AttrVec,
hir_id: Option<hir::HirId>,
) -> hir::Expr<'hir> {
self.expr(
span,
hir::ExprKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span), hir_id)),
- attrs,
)
}
@@ -1792,19 +1810,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
pub(super) fn expr_ident_mut(
&mut self,
- sp: Span,
- ident: Ident,
- binding: hir::HirId,
- ) -> hir::Expr<'hir> {
- self.expr_ident_with_attrs(sp, ident, binding, AttrVec::new())
- }
-
- fn expr_ident_with_attrs(
- &mut self,
span: Span,
ident: Ident,
binding: hir::HirId,
- attrs: AttrVec,
) -> hir::Expr<'hir> {
let hir_id = self.next_id();
let res = Res::Local(binding);
@@ -1817,7 +1825,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
}),
));
- self.expr(span, expr_path, attrs)
+ self.expr(span, expr_path)
}
fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
@@ -1836,32 +1844,21 @@ impl<'hir> LoweringContext<'_, 'hir> {
}),
None,
),
- AttrVec::new(),
)
}
fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
let blk = self.block_all(span, &[], None);
- let expr = self.expr_block(blk, AttrVec::new());
+ let expr = self.expr_block(blk);
self.arena.alloc(expr)
}
- pub(super) fn expr_block(
- &mut self,
- b: &'hir hir::Block<'hir>,
- attrs: AttrVec,
- ) -> hir::Expr<'hir> {
- self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
+ pub(super) fn expr_block(&mut self, b: &'hir hir::Block<'hir>) -> hir::Expr<'hir> {
+ self.expr(b.span, hir::ExprKind::Block(b, None))
}
- pub(super) fn expr(
- &mut self,
- span: Span,
- kind: hir::ExprKind<'hir>,
- attrs: AttrVec,
- ) -> hir::Expr<'hir> {
+ pub(super) fn expr(&mut self, span: Span, kind: hir::ExprKind<'hir>) -> hir::Expr<'hir> {
let hir_id = self.next_id();
- self.lower_attrs(hir_id, &attrs);
hir::Expr { hir_id, kind, span: self.lower_span(span) }
}
diff --git a/compiler/rustc_ast_lowering/src/index.rs b/compiler/rustc_ast_lowering/src/index.rs
index f1851d7b4..fe0bd4381 100644
--- a/compiler/rustc_ast_lowering/src/index.rs
+++ b/compiler/rustc_ast_lowering/src/index.rs
@@ -77,7 +77,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
if hir_id.owner != self.owner {
span_bug!(
span,
- "inconsistent DepNode at `{:?}` for `{:?}`: \
+ "inconsistent HirId at `{:?}` for `{:?}`: \
current_dep_node_owner={} ({:?}), hir_id.owner={} ({:?})",
self.source_map.span_to_diagnostic_string(span),
node,
@@ -145,7 +145,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
fn visit_item(&mut self, i: &'hir Item<'hir>) {
debug_assert_eq!(i.owner_id, self.owner);
self.with_parent(i.hir_id(), |this| {
- if let ItemKind::Struct(ref struct_def, _) = i.kind {
+ if let ItemKind::Struct(struct_def, _) = &i.kind {
// If this is a tuple or unit-like struct, register the constructor.
if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
this.insert(i.span, ctor_hir_id, Node::Ctor(struct_def));
@@ -303,12 +303,12 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
}
fn visit_lifetime(&mut self, lifetime: &'hir Lifetime) {
- self.insert(lifetime.span, lifetime.hir_id, Node::Lifetime(lifetime));
+ self.insert(lifetime.ident.span, lifetime.hir_id, Node::Lifetime(lifetime));
}
fn visit_variant(&mut self, v: &'hir Variant<'hir>) {
- self.insert(v.span, v.id, Node::Variant(v));
- self.with_parent(v.id, |this| {
+ self.insert(v.span, v.hir_id, Node::Variant(v));
+ self.with_parent(v.hir_id, |this| {
// Register the constructor of this variant.
if let Some(ctor_hir_id) = v.data.ctor_hir_id() {
this.insert(v.span, ctor_hir_id, Node::Ctor(&v.data));
diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs
index 76316a574..d73d6d391 100644
--- a/compiler/rustc_ast_lowering/src/item.rs
+++ b/compiler/rustc_ast_lowering/src/item.rs
@@ -6,7 +6,6 @@ use super::{FnDeclKind, LoweringContext, ParamMode};
use rustc_ast::ptr::P;
use rustc_ast::visit::AssocCtxt;
use rustc_ast::*;
-use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sorted_map::SortedMap;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
@@ -20,8 +19,7 @@ use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, Symbol};
use rustc_target::spec::abi;
use smallvec::{smallvec, SmallVec};
-
-use std::iter;
+use thin_vec::ThinVec;
pub(super) struct ItemLowerer<'a, 'hir> {
pub(super) tcx: TyCtxt<'hir>,
@@ -67,7 +65,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
// HirId handling.
bodies: Vec::new(),
attrs: SortedMap::default(),
- children: FxHashMap::default(),
+ children: Vec::default(),
current_hir_id_owner: hir::CRATE_OWNER_ID,
item_local_id_counter: hir::ItemLocalId::new(0),
node_id_to_local_id: Default::default(),
@@ -86,7 +84,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
impl_trait_defs: Vec::new(),
impl_trait_bounds: Vec::new(),
allow_try_trait: Some([sym::try_trait_v2, sym::yeet_desugar_details][..].into()),
- allow_gen_future: Some([sym::gen_future][..].into()),
+ allow_gen_future: Some([sym::gen_future, sym::closure_track_caller][..].into()),
allow_into_future: Some([sym::into_future][..].into()),
generics_def_id_map: Default::default(),
};
@@ -143,7 +141,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
// This is used to track which lifetimes have already been defined,
// and which need to be replicated when lowering an async fn.
match parent_hir.node().expect_item().kind {
- hir::ItemKind::Impl(hir::Impl { ref of_trait, .. }) => {
+ hir::ItemKind::Impl(hir::Impl { of_trait, .. }) => {
lctx.is_in_trait_impl = of_trait.is_some();
}
_ => {}
@@ -179,37 +177,23 @@ impl<'hir> LoweringContext<'_, 'hir> {
pub(super) fn lower_item_ref(&mut self, i: &Item) -> SmallVec<[hir::ItemId; 1]> {
let mut node_ids =
smallvec![hir::ItemId { owner_id: hir::OwnerId { def_id: self.local_def_id(i.id) } }];
- if let ItemKind::Use(ref use_tree) = &i.kind {
- self.lower_item_id_use_tree(use_tree, i.id, &mut node_ids);
+ if let ItemKind::Use(use_tree) = &i.kind {
+ self.lower_item_id_use_tree(use_tree, &mut node_ids);
}
node_ids
}
- fn lower_item_id_use_tree(
- &mut self,
- tree: &UseTree,
- base_id: NodeId,
- vec: &mut SmallVec<[hir::ItemId; 1]>,
- ) {
- match tree.kind {
- UseTreeKind::Nested(ref nested_vec) => {
+ fn lower_item_id_use_tree(&mut self, tree: &UseTree, vec: &mut SmallVec<[hir::ItemId; 1]>) {
+ match &tree.kind {
+ UseTreeKind::Nested(nested_vec) => {
for &(ref nested, id) in nested_vec {
vec.push(hir::ItemId {
owner_id: hir::OwnerId { def_id: self.local_def_id(id) },
});
- self.lower_item_id_use_tree(nested, id, vec);
- }
- }
- UseTreeKind::Glob => {}
- UseTreeKind::Simple(_, id1, id2) => {
- for (_, &id) in
- iter::zip(self.expect_full_res_from_use(base_id).skip(1), &[id1, id2])
- {
- vec.push(hir::ItemId {
- owner_id: hir::OwnerId { def_id: self.local_def_id(id) },
- });
+ self.lower_item_id_use_tree(nested, vec);
}
}
+ UseTreeKind::Simple(..) | UseTreeKind::Glob => {}
}
}
@@ -239,26 +223,26 @@ impl<'hir> LoweringContext<'_, 'hir> {
vis_span: Span,
i: &ItemKind,
) -> hir::ItemKind<'hir> {
- match *i {
- ItemKind::ExternCrate(orig_name) => hir::ItemKind::ExternCrate(orig_name),
- ItemKind::Use(ref use_tree) => {
+ match i {
+ ItemKind::ExternCrate(orig_name) => hir::ItemKind::ExternCrate(*orig_name),
+ ItemKind::Use(use_tree) => {
// Start with an empty prefix.
- let prefix = Path { segments: vec![], span: use_tree.span, tokens: None };
+ let prefix = Path { segments: ThinVec::new(), span: use_tree.span, tokens: None };
self.lower_use_tree(use_tree, &prefix, id, vis_span, ident, attrs)
}
- ItemKind::Static(ref t, m, ref e) => {
+ ItemKind::Static(t, m, e) => {
let (ty, body_id) = self.lower_const_item(t, span, e.as_deref());
- hir::ItemKind::Static(ty, m, body_id)
+ hir::ItemKind::Static(ty, *m, body_id)
}
- ItemKind::Const(_, ref t, ref e) => {
+ ItemKind::Const(_, t, e) => {
let (ty, body_id) = self.lower_const_item(t, span, e.as_deref());
hir::ItemKind::Const(ty, body_id)
}
ItemKind::Fn(box Fn {
- sig: FnSig { ref decl, header, span: fn_sig_span },
- ref generics,
- ref body,
+ sig: FnSig { decl, header, span: fn_sig_span },
+ generics,
+ body,
..
}) => {
self.with_new_scopes(|this| {
@@ -269,43 +253,41 @@ impl<'hir> LoweringContext<'_, 'hir> {
// only cares about the input argument patterns in the function
// declaration (decl), not the return types.
let asyncness = header.asyncness;
- let body_id =
- this.lower_maybe_async_body(span, &decl, asyncness, body.as_deref());
+ let body_id = this.lower_maybe_async_body(
+ span,
+ hir_id,
+ &decl,
+ asyncness,
+ body.as_deref(),
+ );
let mut itctx = ImplTraitContext::Universal;
let (generics, decl) = this.lower_generics(generics, id, &mut itctx, |this| {
let ret_id = asyncness.opt_return_id();
- this.lower_fn_decl(&decl, Some(id), fn_sig_span, FnDeclKind::Fn, ret_id)
+ this.lower_fn_decl(&decl, id, *fn_sig_span, FnDeclKind::Fn, ret_id)
});
let sig = hir::FnSig {
decl,
- header: this.lower_fn_header(header),
- span: this.lower_span(fn_sig_span),
+ header: this.lower_fn_header(*header),
+ span: this.lower_span(*fn_sig_span),
};
hir::ItemKind::Fn(sig, generics, body_id)
})
}
- ItemKind::Mod(_, ref mod_kind) => match mod_kind {
+ ItemKind::Mod(_, mod_kind) => match mod_kind {
ModKind::Loaded(items, _, spans) => {
hir::ItemKind::Mod(self.lower_mod(items, spans))
}
ModKind::Unloaded => panic!("`mod` items should have been loaded by now"),
},
- ItemKind::ForeignMod(ref fm) => hir::ItemKind::ForeignMod {
+ ItemKind::ForeignMod(fm) => hir::ItemKind::ForeignMod {
abi: fm.abi.map_or(abi::Abi::FALLBACK, |abi| self.lower_abi(abi)),
items: self
.arena
.alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item_ref(x))),
},
- ItemKind::GlobalAsm(ref asm) => {
- hir::ItemKind::GlobalAsm(self.lower_inline_asm(span, asm))
- }
- ItemKind::TyAlias(box TyAlias {
- ref generics,
- where_clauses,
- ty: Some(ref ty),
- ..
- }) => {
+ ItemKind::GlobalAsm(asm) => hir::ItemKind::GlobalAsm(self.lower_inline_asm(span, asm)),
+ ItemKind::TyAlias(box TyAlias { generics, where_clauses, ty: Some(ty), .. }) => {
// We lower
//
// type Foo = impl Trait
@@ -315,7 +297,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// type Foo = Foo1
// opaque type Foo1: Trait
let mut generics = generics.clone();
- add_ty_alias_where_clause(&mut generics, where_clauses, true);
+ add_ty_alias_where_clause(&mut generics, *where_clauses, true);
let (generics, ty) = self.lower_generics(
&generics,
id,
@@ -324,9 +306,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
hir::ItemKind::TyAlias(ty, generics)
}
- ItemKind::TyAlias(box TyAlias {
- ref generics, ref where_clauses, ty: None, ..
- }) => {
+ ItemKind::TyAlias(box TyAlias { generics, where_clauses, ty: None, .. }) => {
let mut generics = generics.clone();
add_ty_alias_where_clause(&mut generics, *where_clauses, true);
let (generics, ty) = self.lower_generics(
@@ -337,7 +317,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
hir::ItemKind::TyAlias(ty, generics)
}
- ItemKind::Enum(ref enum_definition, ref generics) => {
+ ItemKind::Enum(enum_definition, generics) => {
let (generics, variants) = self.lower_generics(
generics,
id,
@@ -350,7 +330,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
hir::ItemKind::Enum(hir::EnumDef { variants }, generics)
}
- ItemKind::Struct(ref struct_def, ref generics) => {
+ ItemKind::Struct(struct_def, generics) => {
let (generics, struct_def) = self.lower_generics(
generics,
id,
@@ -359,7 +339,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
hir::ItemKind::Struct(struct_def, generics)
}
- ItemKind::Union(ref vdata, ref generics) => {
+ ItemKind::Union(vdata, generics) => {
let (generics, vdata) = self.lower_generics(
generics,
id,
@@ -373,10 +353,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
polarity,
defaultness,
constness,
- generics: ref ast_generics,
- of_trait: ref trait_ref,
- self_ty: ref ty,
- items: ref impl_items,
+ generics: ast_generics,
+ of_trait: trait_ref,
+ self_ty: ty,
+ items: impl_items,
}) => {
// Lower the "impl header" first. This ordering is important
// for in-band lifetimes! Consider `'a` here:
@@ -414,30 +394,24 @@ impl<'hir> LoweringContext<'_, 'hir> {
// `defaultness.has_value()` is never called for an `impl`, always `true` in order
// to not cause an assertion failure inside the `lower_defaultness` function.
let has_val = true;
- let (defaultness, defaultness_span) = self.lower_defaultness(defaultness, has_val);
+ let (defaultness, defaultness_span) = self.lower_defaultness(*defaultness, has_val);
let polarity = match polarity {
ImplPolarity::Positive => ImplPolarity::Positive,
- ImplPolarity::Negative(s) => ImplPolarity::Negative(self.lower_span(s)),
+ ImplPolarity::Negative(s) => ImplPolarity::Negative(self.lower_span(*s)),
};
hir::ItemKind::Impl(self.arena.alloc(hir::Impl {
- unsafety: self.lower_unsafety(unsafety),
+ unsafety: self.lower_unsafety(*unsafety),
polarity,
defaultness,
defaultness_span,
- constness: self.lower_constness(constness),
+ constness: self.lower_constness(*constness),
generics,
of_trait: trait_ref,
self_ty: lowered_ty,
items: new_impl_items,
}))
}
- ItemKind::Trait(box Trait {
- is_auto,
- unsafety,
- ref generics,
- ref bounds,
- ref items,
- }) => {
+ ItemKind::Trait(box Trait { is_auto, unsafety, generics, bounds, items }) => {
let (generics, (unsafety, items, bounds)) = self.lower_generics(
generics,
id,
@@ -450,13 +424,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
let items = this.arena.alloc_from_iter(
items.iter().map(|item| this.lower_trait_item_ref(item)),
);
- let unsafety = this.lower_unsafety(unsafety);
+ let unsafety = this.lower_unsafety(*unsafety);
(unsafety, items, bounds)
},
);
- hir::ItemKind::Trait(is_auto, unsafety, generics, bounds, items)
+ hir::ItemKind::Trait(*is_auto, unsafety, generics, bounds, items)
}
- ItemKind::TraitAlias(ref generics, ref bounds) => {
+ ItemKind::TraitAlias(generics, bounds) => {
let (generics, bounds) = self.lower_generics(
generics,
id,
@@ -470,10 +444,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
hir::ItemKind::TraitAlias(generics, bounds)
}
- ItemKind::MacroDef(MacroDef { ref body, macro_rules }) => {
- let body = P(self.lower_mac_args(body));
+ ItemKind::MacroDef(MacroDef { body, macro_rules }) => {
+ let body = P(self.lower_delim_args(body));
let macro_kind = self.resolver.decl_macro_kind(self.local_def_id(id));
- hir::ItemKind::Macro(ast::MacroDef { body, macro_rules }, macro_kind)
+ hir::ItemKind::Macro(ast::MacroDef { body, macro_rules: *macro_rules }, macro_kind)
}
ItemKind::MacCall(..) => {
panic!("`TyMac` should have been expanded by now")
@@ -505,7 +479,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let segments = prefix.segments.iter().chain(path.segments.iter()).cloned().collect();
match tree.kind {
- UseTreeKind::Simple(rename, id1, id2) => {
+ UseTreeKind::Simple(rename) => {
*ident = tree.ident();
// First, apply the prefix to the path.
@@ -521,66 +495,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
}
- let mut resolutions = self.expect_full_res_from_use(id).fuse();
- // We want to return *something* from this function, so hold onto the first item
- // for later.
- let ret_res = self.lower_res(resolutions.next().unwrap_or(Res::Err));
-
- // Here, we are looping over namespaces, if they exist for the definition
- // being imported. We only handle type and value namespaces because we
- // won't be dealing with macros in the rest of the compiler.
- // Essentially a single `use` which imports two names is desugared into
- // two imports.
- for new_node_id in [id1, id2] {
- let new_id = self.local_def_id(new_node_id);
- let Some(res) = resolutions.next() else {
- // Associate an HirId to both ids even if there is no resolution.
- let _old = self.children.insert(
- new_id,
- hir::MaybeOwner::NonOwner(hir::HirId::make_owner(new_id)),
- );
- debug_assert!(_old.is_none());
- continue;
- };
- let ident = *ident;
- let mut path = path.clone();
- for seg in &mut path.segments {
- // Give the cloned segment the same resolution information
- // as the old one (this is needed for stability checking).
- let new_id = self.next_node_id();
- self.resolver.clone_res(seg.id, new_id);
- seg.id = new_id;
- }
- let span = path.span;
-
- self.with_hir_id_owner(new_node_id, |this| {
- let res = this.lower_res(res);
- let path = this.lower_path_extra(res, &path, ParamMode::Explicit);
- let kind = hir::ItemKind::Use(path, hir::UseKind::Single);
- if let Some(attrs) = attrs {
- this.attrs.insert(hir::ItemLocalId::new(0), attrs);
- }
-
- let item = hir::Item {
- owner_id: hir::OwnerId { def_id: new_id },
- ident: this.lower_ident(ident),
- kind,
- vis_span,
- span: this.lower_span(span),
- };
- hir::OwnerNode::Item(this.arena.alloc(item))
- });
- }
-
- let path = self.lower_path_extra(ret_res, &path, ParamMode::Explicit);
+ let res =
+ self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
+ let path = self.lower_use_path(res, &path, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::Single)
}
UseTreeKind::Glob => {
- let path = self.lower_path(
- id,
- &Path { segments, span: path.span, tokens: None },
- ParamMode::Explicit,
- );
+ let res = self.expect_full_res(id);
+ let res = smallvec![self.lower_res(res)];
+ let path = Path { segments, span: path.span, tokens: None };
+ let path = self.lower_use_path(res, &path, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::Glob)
}
UseTreeKind::Nested(ref trees) => {
@@ -650,9 +574,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
});
}
- let res = self.expect_full_res_from_use(id).next().unwrap_or(Res::Err);
- let res = self.lower_res(res);
- let path = self.lower_path_extra(res, &prefix, ParamMode::Explicit);
+ let res =
+ self.expect_full_res_from_use(id).map(|res| self.lower_res(res)).collect();
+ let path = self.lower_use_path(res, &prefix, ParamMode::Explicit);
hir::ItemKind::Use(path, hir::UseKind::ListStem)
}
}
@@ -665,8 +589,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
let item = hir::ForeignItem {
owner_id,
ident: self.lower_ident(i.ident),
- kind: match i.kind {
- ForeignItemKind::Fn(box Fn { ref sig, ref generics, .. }) => {
+ kind: match &i.kind {
+ ForeignItemKind::Fn(box Fn { sig, generics, .. }) => {
let fdec = &sig.decl;
let mut itctx = ImplTraitContext::Universal;
let (generics, (fn_dec, fn_args)) =
@@ -675,7 +599,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// Disallow `impl Trait` in foreign items.
this.lower_fn_decl(
fdec,
- None,
+ i.id,
sig.span,
FnDeclKind::ExternFn,
None,
@@ -686,10 +610,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::ForeignItemKind::Fn(fn_dec, fn_args, generics)
}
- ForeignItemKind::Static(ref t, m, _) => {
+ ForeignItemKind::Static(t, m, _) => {
let ty =
self.lower_ty(t, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
- hir::ForeignItemKind::Static(ty, m)
+ hir::ForeignItemKind::Static(ty, *m)
}
ForeignItemKind::TyAlias(..) => hir::ForeignItemKind::Type,
ForeignItemKind::MacCall(_) => panic!("macro shouldn't exist here"),
@@ -709,11 +633,12 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> {
- let id = self.lower_node_id(v.id);
- self.lower_attrs(id, &v.attrs);
+ let hir_id = self.lower_node_id(v.id);
+ self.lower_attrs(hir_id, &v.attrs);
hir::Variant {
- id,
- data: self.lower_variant_data(id, &v.data),
+ hir_id,
+ def_id: self.local_def_id(v.id),
+ data: self.lower_variant_data(hir_id, &v.data),
disr_expr: v.disr_expr.as_ref().map(|e| self.lower_anon_const(e)),
ident: self.lower_ident(v.ident),
span: self.lower_span(v.span),
@@ -725,32 +650,33 @@ impl<'hir> LoweringContext<'_, 'hir> {
parent_id: hir::HirId,
vdata: &VariantData,
) -> hir::VariantData<'hir> {
- match *vdata {
- VariantData::Struct(ref fields, recovered) => hir::VariantData::Struct(
+ match vdata {
+ VariantData::Struct(fields, recovered) => hir::VariantData::Struct(
self.arena
.alloc_from_iter(fields.iter().enumerate().map(|f| self.lower_field_def(f))),
- recovered,
+ *recovered,
),
- VariantData::Tuple(ref fields, id) => {
- let ctor_id = self.lower_node_id(id);
+ VariantData::Tuple(fields, id) => {
+ let ctor_id = self.lower_node_id(*id);
self.alias_attrs(ctor_id, parent_id);
hir::VariantData::Tuple(
self.arena.alloc_from_iter(
fields.iter().enumerate().map(|f| self.lower_field_def(f)),
),
ctor_id,
+ self.local_def_id(*id),
)
}
VariantData::Unit(id) => {
- let ctor_id = self.lower_node_id(id);
+ let ctor_id = self.lower_node_id(*id);
self.alias_attrs(ctor_id, parent_id);
- hir::VariantData::Unit(ctor_id)
+ hir::VariantData::Unit(ctor_id, self.local_def_id(*id))
}
}
}
fn lower_field_def(&mut self, (index, f): (usize, &FieldDef)) -> hir::FieldDef<'hir> {
- let ty = if let TyKind::Path(ref qself, ref path) = f.ty.kind {
+ let ty = if let TyKind::Path(qself, path) = &f.ty.kind {
let t = self.lower_path_ty(
&f.ty,
qself,
@@ -767,6 +693,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::FieldDef {
span: self.lower_span(f.span),
hir_id,
+ def_id: self.local_def_id(f.id),
ident: match f.ident {
Some(ident) => self.lower_ident(ident),
// FIXME(jseyfried): positional field hygiene.
@@ -779,15 +706,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
fn lower_trait_item(&mut self, i: &AssocItem) -> &'hir hir::TraitItem<'hir> {
let hir_id = self.lower_node_id(i.id);
+ self.lower_attrs(hir_id, &i.attrs);
let trait_item_def_id = hir_id.expect_owner();
- let (generics, kind, has_default) = match i.kind {
- AssocItemKind::Const(_, ref ty, ref default) => {
+ let (generics, kind, has_default) = match &i.kind {
+ AssocItemKind::Const(_, ty, default) => {
let ty = self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
let body = default.as_ref().map(|x| self.lower_const_body(i.span, Some(x)));
(hir::Generics::empty(), hir::TraitItemKind::Const(ty, body), body.is_some())
}
- AssocItemKind::Fn(box Fn { ref sig, ref generics, body: None, .. }) => {
+ AssocItemKind::Fn(box Fn { sig, generics, body: None, .. }) => {
let asyncness = sig.header.asyncness;
let names = self.lower_fn_params_to_names(&sig.decl);
let (generics, sig) = self.lower_method_sig(
@@ -799,10 +727,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Required(names)), false)
}
- AssocItemKind::Fn(box Fn { ref sig, ref generics, body: Some(ref body), .. }) => {
+ AssocItemKind::Fn(box Fn { sig, generics, body: Some(body), .. }) => {
let asyncness = sig.header.asyncness;
let body_id =
- self.lower_maybe_async_body(i.span, &sig.decl, asyncness, Some(&body));
+ self.lower_maybe_async_body(i.span, hir_id, &sig.decl, asyncness, Some(&body));
let (generics, sig) = self.lower_method_sig(
generics,
sig,
@@ -812,15 +740,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
);
(generics, hir::TraitItemKind::Fn(sig, hir::TraitFn::Provided(body_id)), true)
}
- AssocItemKind::Type(box TyAlias {
- ref generics,
- where_clauses,
- ref bounds,
- ref ty,
- ..
- }) => {
+ AssocItemKind::Type(box TyAlias { generics, where_clauses, bounds, ty, .. }) => {
let mut generics = generics.clone();
- add_ty_alias_where_clause(&mut generics, where_clauses, false);
+ add_ty_alias_where_clause(&mut generics, *where_clauses, false);
let (generics, kind) = self.lower_generics(
&generics,
i.id,
@@ -843,7 +765,6 @@ impl<'hir> LoweringContext<'_, 'hir> {
AssocItemKind::MacCall(..) => panic!("macro item shouldn't exist at this point"),
};
- self.lower_attrs(hir_id, &i.attrs);
let item = hir::TraitItem {
owner_id: trait_item_def_id,
ident: self.lower_ident(i.ident),
@@ -875,13 +796,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
/// Construct `ExprKind::Err` for the given `span`.
pub(crate) fn expr_err(&mut self, span: Span) -> hir::Expr<'hir> {
- self.expr(span, hir::ExprKind::Err, AttrVec::new())
+ self.expr(span, hir::ExprKind::Err)
}
fn lower_impl_item(&mut self, i: &AssocItem) -> &'hir hir::ImplItem<'hir> {
// Since `default impl` is not yet implemented, this is always true in impls.
let has_value = true;
let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value);
+ let hir_id = self.lower_node_id(i.id);
+ self.lower_attrs(hir_id, &i.attrs);
let (generics, kind) = match &i.kind {
AssocItemKind::Const(_, ty, expr) => {
@@ -894,8 +817,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
AssocItemKind::Fn(box Fn { sig, generics, body, .. }) => {
self.current_item = Some(i.span);
let asyncness = sig.header.asyncness;
- let body_id =
- self.lower_maybe_async_body(i.span, &sig.decl, asyncness, body.as_deref());
+ let body_id = self.lower_maybe_async_body(
+ i.span,
+ hir_id,
+ &sig.decl,
+ asyncness,
+ body.as_deref(),
+ );
let (generics, sig) = self.lower_method_sig(
generics,
sig,
@@ -928,8 +856,6 @@ impl<'hir> LoweringContext<'_, 'hir> {
AssocItemKind::MacCall(..) => panic!("`TyMac` should have been expanded by now"),
};
- let hir_id = self.lower_node_id(i.id);
- self.lower_attrs(hir_id, &i.attrs);
let item = hir::ImplItem {
owner_id: hir_id.expect_owner(),
ident: self.lower_ident(i.ident),
@@ -1062,6 +988,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
fn lower_maybe_async_body(
&mut self,
span: Span,
+ fn_id: hir::HirId,
decl: &FnDecl,
asyncness: Async,
body: Option<&Block>,
@@ -1212,6 +1139,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let async_expr = this.make_async_expr(
CaptureBy::Value,
+ Some(fn_id),
closure_id,
None,
body.span,
@@ -1223,11 +1151,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
// Transform into `drop-temps { <user-body> }`, an expression:
let desugared_span =
this.mark_span_with_reason(DesugaringKind::Async, user_body.span, None);
- let user_body = this.expr_drop_temps(
- desugared_span,
- this.arena.alloc(user_body),
- AttrVec::new(),
- );
+ let user_body =
+ this.expr_drop_temps(desugared_span, this.arena.alloc(user_body));
// As noted above, create the final block like
//
@@ -1244,14 +1169,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
Some(user_body),
);
- this.expr_block(body, AttrVec::new())
+ this.expr_block(body)
},
);
- (
- this.arena.alloc_from_iter(parameters),
- this.expr(body.span, async_expr, AttrVec::new()),
- )
+ (this.arena.alloc_from_iter(parameters), this.expr(body.span, async_expr))
})
}
@@ -1266,7 +1188,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
let header = self.lower_fn_header(sig.header);
let mut itctx = ImplTraitContext::Universal;
let (generics, decl) = self.lower_generics(generics, id, &mut itctx, |this| {
- this.lower_fn_decl(&sig.decl, Some(id), sig.span, kind, is_async)
+ this.lower_fn_decl(&sig.decl, id, sig.span, kind, is_async)
});
(generics, hir::FnSig { header, decl, span: self.lower_span(sig.span) })
}
@@ -1352,7 +1274,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// keep track of the Span info. Now, `add_implicitly_sized` in `AstConv` checks both param bounds and
// where clauses for `?Sized`.
for pred in &generics.where_clause.predicates {
- let WherePredicate::BoundPredicate(ref bound_pred) = *pred else {
+ let WherePredicate::BoundPredicate(bound_pred) = pred else {
continue;
};
let compute_is_param = || {
@@ -1498,10 +1420,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
}))
}
GenericParamKind::Lifetime => {
- let ident_span = self.lower_span(ident.span);
let ident = self.lower_ident(ident);
let lt_id = self.next_node_id();
- let lifetime = self.new_named_lifetime(id, lt_id, ident_span, ident);
+ let lifetime = self.new_named_lifetime(id, lt_id, ident);
Some(hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
lifetime,
span,
@@ -1513,11 +1434,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate<'hir> {
- match *pred {
+ match pred {
WherePredicate::BoundPredicate(WhereBoundPredicate {
- ref bound_generic_params,
- ref bounded_ty,
- ref bounds,
+ bound_generic_params,
+ bounded_ty,
+ bounds,
span,
}) => hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
hir_id: self.next_id(),
@@ -1530,29 +1451,27 @@ impl<'hir> LoweringContext<'_, 'hir> {
&ImplTraitContext::Disallowed(ImplTraitPosition::Bound),
)
})),
- span: self.lower_span(span),
+ span: self.lower_span(*span),
origin: PredicateOrigin::WhereClause,
}),
- WherePredicate::RegionPredicate(WhereRegionPredicate {
- ref lifetime,
- ref bounds,
- span,
- }) => hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
- span: self.lower_span(span),
- lifetime: self.lower_lifetime(lifetime),
- bounds: self.lower_param_bounds(
- bounds,
- &ImplTraitContext::Disallowed(ImplTraitPosition::Bound),
- ),
- in_where_clause: true,
- }),
- WherePredicate::EqPredicate(WhereEqPredicate { ref lhs_ty, ref rhs_ty, span }) => {
+ WherePredicate::RegionPredicate(WhereRegionPredicate { lifetime, bounds, span }) => {
+ hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
+ span: self.lower_span(*span),
+ lifetime: self.lower_lifetime(lifetime),
+ bounds: self.lower_param_bounds(
+ bounds,
+ &ImplTraitContext::Disallowed(ImplTraitPosition::Bound),
+ ),
+ in_where_clause: true,
+ })
+ }
+ WherePredicate::EqPredicate(WhereEqPredicate { lhs_ty, rhs_ty, span }) => {
hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
lhs_ty: self
.lower_ty(lhs_ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type)),
rhs_ty: self
.lower_ty(rhs_ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type)),
- span: self.lower_span(span),
+ span: self.lower_span(*span),
})
}
}
diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs
index ff29d15f1..4fa18907f 100644
--- a/compiler/rustc_ast_lowering/src/lib.rs
+++ b/compiler/rustc_ast_lowering/src/lib.rs
@@ -34,7 +34,6 @@
#![feature(let_chains)]
#![feature(never_type)]
#![recursion_limit = "256"]
-#![allow(rustc::potential_query_instability)]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
@@ -61,8 +60,8 @@ use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
use rustc_hir::definitions::DefPathData;
use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate};
use rustc_index::vec::{Idx, IndexVec};
+use rustc_middle::span_bug;
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
-use rustc_middle::{bug, span_bug};
use rustc_session::parse::feature_err;
use rustc_span::hygiene::MacroKind;
use rustc_span::source_map::DesugaringKind;
@@ -107,7 +106,7 @@ struct LoweringContext<'a, 'hir> {
/// Attributes inside the owner being lowered.
attrs: SortedMap<hir::ItemLocalId, &'hir [Attribute]>,
/// Collect items that were created by lowering the current owner.
- children: FxHashMap<LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>>,
+ children: Vec<(LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>)>,
generator_kind: Option<hir::GeneratorKind>,
@@ -260,6 +259,8 @@ enum ImplTraitContext {
},
/// Impl trait in type aliases.
TypeAliasesOpaqueTy,
+ /// `impl Trait` is unstably accepted in this position.
+ FeatureGated(ImplTraitPosition, Symbol),
/// `impl Trait` is not accepted in this position.
Disallowed(ImplTraitPosition),
}
@@ -328,7 +329,14 @@ enum FnDeclKind {
}
impl FnDeclKind {
- fn impl_trait_allowed(&self, tcx: TyCtxt<'_>) -> bool {
+ fn param_impl_trait_allowed(&self) -> bool {
+ match self {
+ FnDeclKind::Fn | FnDeclKind::Inherent | FnDeclKind::Impl | FnDeclKind::Trait => true,
+ _ => false,
+ }
+ }
+
+ fn return_impl_trait_allowed(&self, tcx: TyCtxt<'_>) -> bool {
match self {
FnDeclKind::Fn | FnDeclKind::Inherent => true,
FnDeclKind::Impl if tcx.features().return_position_impl_trait_in_trait => true,
@@ -481,6 +489,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
parent: LocalDefId,
node_id: ast::NodeId,
data: DefPathData,
+ span: Span,
) -> LocalDefId {
debug_assert_ne!(node_id, ast::DUMMY_NODE_ID);
assert!(
@@ -491,7 +500,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.tcx.hir().def_key(self.local_def_id(node_id)),
);
- let def_id = self.tcx.create_def(parent, data);
+ let def_id = self.tcx.at(span).create_def(parent, data).def_id();
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
self.resolver.node_id_to_def_id.insert(node_id, def_id);
@@ -611,8 +620,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.impl_trait_defs = current_impl_trait_defs;
self.impl_trait_bounds = current_impl_trait_bounds;
- let _old = self.children.insert(def_id, hir::MaybeOwner::Owner(info));
- debug_assert!(_old.is_none())
+ debug_assert!(self.children.iter().find(|(id, _)| id == &def_id).is_none());
+ self.children.push((def_id, hir::MaybeOwner::Owner(info)));
}
/// Installs the remapping `remap` in scope while `f` is being executed.
@@ -719,8 +728,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
assert_ne!(local_id, hir::ItemLocalId::new(0));
if let Some(def_id) = self.opt_local_def_id(ast_node_id) {
- // Do not override a `MaybeOwner::Owner` that may already here.
- self.children.entry(def_id).or_insert(hir::MaybeOwner::NonOwner(hir_id));
+ self.children.push((def_id, hir::MaybeOwner::NonOwner(hir_id)));
self.local_id_to_def_id.insert(local_id, def_id);
}
@@ -818,6 +826,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.current_hir_id_owner.def_id,
param,
DefPathData::LifetimeNs(kw::UnderscoreLifetime),
+ ident.span,
);
debug!(?_def_id);
@@ -830,8 +839,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
),
};
let hir_id = self.lower_node_id(node_id);
+ let def_id = self.local_def_id(node_id);
Some(hir::GenericParam {
hir_id,
+ def_id,
name,
span: self.lower_span(ident.span),
pure_wrt_drop: false,
@@ -911,7 +922,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
AttrKind::Normal(ref normal) => AttrKind::Normal(P(NormalAttr {
item: AttrItem {
path: normal.item.path.clone(),
- args: self.lower_mac_args(&normal.item.args),
+ args: self.lower_attr_args(&normal.item.args),
tokens: None,
},
tokens: None,
@@ -931,51 +942,39 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
}
- fn lower_mac_args(&self, args: &MacArgs) -> MacArgs {
- match *args {
- MacArgs::Empty => MacArgs::Empty,
- MacArgs::Delimited(dspan, delim, ref tokens) => {
- // This is either a non-key-value attribute, or a `macro_rules!` body.
- // We either not have any nonterminals present (in the case of an attribute),
- // or have tokens available for all nonterminals in the case of a nested
- // `macro_rules`: e.g:
- //
- // ```rust
- // macro_rules! outer {
- // ($e:expr) => {
- // macro_rules! inner {
- // () => { $e }
- // }
- // }
- // }
- // ```
- //
- // In both cases, we don't want to synthesize any tokens
- MacArgs::Delimited(dspan, delim, tokens.flattened())
- }
+ fn lower_attr_args(&self, args: &AttrArgs) -> AttrArgs {
+ match args {
+ AttrArgs::Empty => AttrArgs::Empty,
+ AttrArgs::Delimited(args) => AttrArgs::Delimited(self.lower_delim_args(args)),
// This is an inert key-value attribute - it will never be visible to macros
// after it gets lowered to HIR. Therefore, we can extract literals to handle
// nonterminals in `#[doc]` (e.g. `#[doc = $e]`).
- MacArgs::Eq(eq_span, MacArgsEq::Ast(ref expr)) => {
+ AttrArgs::Eq(eq_span, AttrArgsEq::Ast(expr)) => {
// In valid code the value always ends up as a single literal. Otherwise, a dummy
// literal suffices because the error is handled elsewhere.
- let lit = if let ExprKind::Lit(lit) = &expr.kind {
- lit.clone()
+ let lit = if let ExprKind::Lit(token_lit) = expr.kind
+ && let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span)
+ {
+ lit
} else {
- Lit {
+ MetaItemLit {
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err,
span: DUMMY_SP,
}
};
- MacArgs::Eq(eq_span, MacArgsEq::Hir(lit))
+ AttrArgs::Eq(*eq_span, AttrArgsEq::Hir(lit))
}
- MacArgs::Eq(_, MacArgsEq::Hir(ref lit)) => {
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => {
unreachable!("in literal form when lowering mac args eq: {:?}", lit)
}
}
}
+ fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs {
+ DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() }
+ }
+
/// Given an associated type constraint like one of these:
///
/// ```ignore (illustrative)
@@ -994,12 +993,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
) -> hir::TypeBinding<'hir> {
debug!("lower_assoc_ty_constraint(constraint={:?}, itctx={:?})", constraint, itctx);
// lower generic arguments of identifier in constraint
- let gen_args = if let Some(ref gen_args) = constraint.gen_args {
+ let gen_args = if let Some(gen_args) = &constraint.gen_args {
let gen_args_ctor = match gen_args {
- GenericArgs::AngleBracketed(ref data) => {
+ GenericArgs::AngleBracketed(data) => {
self.lower_angle_bracketed_parameter_data(data, ParamMode::Explicit, itctx).0
}
- GenericArgs::Parenthesized(ref data) => {
+ GenericArgs::Parenthesized(data) => {
self.emit_bad_parenthesized_trait_in_assoc_ty(data);
let aba = self.ast_arena.aba.alloc(data.as_angle_bracketed_args());
self.lower_angle_bracketed_parameter_data(aba, ParamMode::Explicit, itctx).0
@@ -1011,15 +1010,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
};
let itctx_tait = &ImplTraitContext::TypeAliasesOpaqueTy;
- let kind = match constraint.kind {
- AssocConstraintKind::Equality { ref term } => {
+ let kind = match &constraint.kind {
+ AssocConstraintKind::Equality { term } => {
let term = match term {
- Term::Ty(ref ty) => self.lower_ty(ty, itctx).into(),
- Term::Const(ref c) => self.lower_anon_const(c).into(),
+ Term::Ty(ty) => self.lower_ty(ty, itctx).into(),
+ Term::Const(c) => self.lower_anon_const(c).into(),
};
hir::TypeBindingKind::Equality { term }
}
- AssocConstraintKind::Bound { ref bounds } => {
+ AssocConstraintKind::Bound { bounds } => {
// Piggy-back on the `impl Trait` context to figure out the correct behavior.
let (desugar_to_impl_trait, itctx) = match itctx {
// We are in the return position:
@@ -1129,7 +1128,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
match arg {
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(&lt)),
ast::GenericArg::Type(ty) => {
- match ty.kind {
+ match &ty.kind {
TyKind::Infer if self.tcx.features().generic_arg_infer => {
return GenericArg::Infer(hir::InferArg {
hir_id: self.lower_node_id(ty.id),
@@ -1140,7 +1139,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// parsing. We try to resolve that ambiguity by attempting resolution in both the
// type and value namespaces. If we resolved the path in the value namespace, we
// transform it into a generic const argument.
- TyKind::Path(ref qself, ref path) => {
+ TyKind::Path(qself, path) => {
if let Some(res) = self
.resolver
.get_partial_res(ty.id)
@@ -1156,15 +1155,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let parent_def_id = self.current_hir_id_owner;
let node_id = self.next_node_id();
+ let span = self.lower_span(ty.span);
// Add a definition for the in-band const def.
- self.create_def(
+ let def_id = self.create_def(
parent_def_id.def_id,
node_id,
DefPathData::AnonConst,
+ span,
);
- let span = self.lower_span(ty.span);
let path_expr = Expr {
id: ty.id,
kind: ExprKind::Path(qself.clone(), path.clone()),
@@ -1174,6 +1174,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
};
let ct = self.with_new_scopes(|this| hir::AnonConst {
+ def_id,
hir_id: this.lower_node_id(node_id),
body: this.lower_const_body(path_expr.span, Some(&path_expr)),
});
@@ -1200,7 +1201,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_path_ty(
&mut self,
t: &Ty,
- qself: &Option<QSelf>,
+ qself: &Option<ptr::P<QSelf>>,
path: &Path,
param_mode: ParamMode,
itctx: &ImplTraitContext,
@@ -1246,12 +1247,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
fn lower_ty_direct(&mut self, t: &Ty, itctx: &ImplTraitContext) -> hir::Ty<'hir> {
- let kind = match t.kind {
+ let kind = match &t.kind {
TyKind::Infer => hir::TyKind::Infer,
TyKind::Err => hir::TyKind::Err,
- TyKind::Slice(ref ty) => hir::TyKind::Slice(self.lower_ty(ty, itctx)),
- TyKind::Ptr(ref mt) => hir::TyKind::Ptr(self.lower_mt(mt, itctx)),
- TyKind::Rptr(ref region, ref mt) => {
+ TyKind::Slice(ty) => hir::TyKind::Slice(self.lower_ty(ty, itctx)),
+ TyKind::Ptr(mt) => hir::TyKind::Ptr(self.lower_mt(mt, itctx)),
+ TyKind::Rptr(region, mt) => {
let region = region.unwrap_or_else(|| {
let id = if let Some(LifetimeRes::ElidedAnchor { start, end }) =
self.resolver.get_lifetime_res(t.id)
@@ -1261,30 +1262,30 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
} else {
self.next_node_id()
};
- let span = self.tcx.sess.source_map().start_point(t.span);
+ let span = self.tcx.sess.source_map().start_point(t.span).shrink_to_hi();
Lifetime { ident: Ident::new(kw::UnderscoreLifetime, span), id }
});
let lifetime = self.lower_lifetime(&region);
hir::TyKind::Rptr(lifetime, self.lower_mt(mt, itctx))
}
- TyKind::BareFn(ref f) => {
+ TyKind::BareFn(f) => {
let generic_params = self.lower_lifetime_binder(t.id, &f.generic_params);
hir::TyKind::BareFn(self.arena.alloc(hir::BareFnTy {
generic_params,
unsafety: self.lower_unsafety(f.unsafety),
abi: self.lower_extern(f.ext),
- decl: self.lower_fn_decl(&f.decl, None, t.span, FnDeclKind::Pointer, None),
+ decl: self.lower_fn_decl(&f.decl, t.id, t.span, FnDeclKind::Pointer, None),
param_names: self.lower_fn_params_to_names(&f.decl),
}))
}
TyKind::Never => hir::TyKind::Never,
- TyKind::Tup(ref tys) => hir::TyKind::Tup(
+ TyKind::Tup(tys) => hir::TyKind::Tup(
self.arena.alloc_from_iter(tys.iter().map(|ty| self.lower_ty_direct(ty, itctx))),
),
- TyKind::Paren(ref ty) => {
+ TyKind::Paren(ty) => {
return self.lower_ty_direct(ty, itctx);
}
- TyKind::Path(ref qself, ref path) => {
+ TyKind::Path(qself, path) => {
return self.lower_path_ty(t, qself, path, ParamMode::Explicit, itctx);
}
TyKind::ImplicitSelf => {
@@ -1304,48 +1305,46 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}),
))
}
- TyKind::Array(ref ty, ref length) => {
+ TyKind::Array(ty, length) => {
hir::TyKind::Array(self.lower_ty(ty, itctx), self.lower_array_length(length))
}
- TyKind::Typeof(ref expr) => hir::TyKind::Typeof(self.lower_anon_const(expr)),
- TyKind::TraitObject(ref bounds, kind) => {
+ TyKind::Typeof(expr) => hir::TyKind::Typeof(self.lower_anon_const(expr)),
+ TyKind::TraitObject(bounds, kind) => {
let mut lifetime_bound = None;
let (bounds, lifetime_bound) = self.with_dyn_type_scope(true, |this| {
let bounds =
- this.arena.alloc_from_iter(bounds.iter().filter_map(
- |bound| match *bound {
- GenericBound::Trait(
- ref ty,
- TraitBoundModifier::None | TraitBoundModifier::MaybeConst,
- ) => Some(this.lower_poly_trait_ref(ty, itctx)),
- // `~const ?Bound` will cause an error during AST validation
- // anyways, so treat it like `?Bound` as compilation proceeds.
- GenericBound::Trait(
- _,
- TraitBoundModifier::Maybe | TraitBoundModifier::MaybeConstMaybe,
- ) => None,
- GenericBound::Outlives(ref lifetime) => {
- if lifetime_bound.is_none() {
- lifetime_bound = Some(this.lower_lifetime(lifetime));
- }
- None
+ this.arena.alloc_from_iter(bounds.iter().filter_map(|bound| match bound {
+ GenericBound::Trait(
+ ty,
+ TraitBoundModifier::None | TraitBoundModifier::MaybeConst,
+ ) => Some(this.lower_poly_trait_ref(ty, itctx)),
+ // `~const ?Bound` will cause an error during AST validation
+ // anyways, so treat it like `?Bound` as compilation proceeds.
+ GenericBound::Trait(
+ _,
+ TraitBoundModifier::Maybe | TraitBoundModifier::MaybeConstMaybe,
+ ) => None,
+ GenericBound::Outlives(lifetime) => {
+ if lifetime_bound.is_none() {
+ lifetime_bound = Some(this.lower_lifetime(lifetime));
}
- },
- ));
+ None
+ }
+ }));
let lifetime_bound =
lifetime_bound.unwrap_or_else(|| this.elided_dyn_bound(t.span));
(bounds, lifetime_bound)
});
- hir::TyKind::TraitObject(bounds, lifetime_bound, kind)
+ hir::TyKind::TraitObject(bounds, lifetime_bound, *kind)
}
- TyKind::ImplTrait(def_node_id, ref bounds) => {
+ TyKind::ImplTrait(def_node_id, bounds) => {
let span = t.span;
match itctx {
ImplTraitContext::ReturnPositionOpaqueTy { origin, in_trait } => self
.lower_opaque_impl_trait(
span,
*origin,
- def_node_id,
+ *def_node_id,
bounds,
*in_trait,
itctx,
@@ -1353,38 +1352,37 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
ImplTraitContext::TypeAliasesOpaqueTy => self.lower_opaque_impl_trait(
span,
hir::OpaqueTyOrigin::TyAlias,
- def_node_id,
+ *def_node_id,
bounds,
false,
itctx,
),
ImplTraitContext::Universal => {
+ let span = t.span;
self.create_def(
self.current_hir_id_owner.def_id,
- def_node_id,
+ *def_node_id,
DefPathData::ImplTrait,
+ span,
);
- let span = t.span;
let ident = Ident::from_str_and_span(&pprust::ty_to_string(t), span);
let (param, bounds, path) =
- self.lower_generic_and_bounds(def_node_id, span, ident, bounds);
+ self.lower_generic_and_bounds(*def_node_id, span, ident, bounds);
self.impl_trait_defs.push(param);
if let Some(bounds) = bounds {
self.impl_trait_bounds.push(bounds);
}
path
}
- ImplTraitContext::Disallowed(
- position @ (ImplTraitPosition::TraitReturn | ImplTraitPosition::ImplReturn),
- ) => {
+ ImplTraitContext::FeatureGated(position, feature) => {
self.tcx
.sess
.create_feature_err(
MisplacedImplTrait {
span: t.span,
- position: DiagnosticArgFromDisplay(&position),
+ position: DiagnosticArgFromDisplay(position),
},
- sym::return_position_impl_trait_in_trait,
+ *feature,
)
.emit();
hir::TyKind::Err
@@ -1392,7 +1390,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
ImplTraitContext::Disallowed(position) => {
self.tcx.sess.emit_err(MisplacedImplTrait {
span: t.span,
- position: DiagnosticArgFromDisplay(&position),
+ position: DiagnosticArgFromDisplay(position),
});
hir::TyKind::Err
}
@@ -1457,17 +1455,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// frequently opened issues show.
let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::OpaqueTy, span, None);
- let opaque_ty_def_id = match origin {
- hir::OpaqueTyOrigin::TyAlias => self.create_def(
- self.current_hir_id_owner.def_id,
- opaque_ty_node_id,
- DefPathData::ImplTrait,
- ),
- hir::OpaqueTyOrigin::FnReturn(fn_def_id) => {
- self.create_def(fn_def_id, opaque_ty_node_id, DefPathData::ImplTrait)
- }
- hir::OpaqueTyOrigin::AsyncFn(..) => bug!("unreachable"),
- };
+ let opaque_ty_def_id = self.create_def(
+ self.current_hir_id_owner.def_id,
+ opaque_ty_node_id,
+ DefPathData::ImplTrait,
+ opaque_ty_span,
+ );
debug!(?opaque_ty_def_id);
// Contains the new lifetime definitions created for the TAIT (if any).
@@ -1521,6 +1514,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
hir::GenericParam {
hir_id,
+ def_id: lctx.local_def_id(new_node_id),
name,
span: lifetime.ident.span,
pure_wrt_drop: false,
@@ -1559,15 +1553,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let lifetimes =
self.arena.alloc_from_iter(collected_lifetimes.into_iter().map(|(_, lifetime)| {
let id = self.next_node_id();
- let span = lifetime.ident.span;
-
- let ident = if lifetime.ident.name == kw::UnderscoreLifetime {
- Ident::with_dummy_span(kw::UnderscoreLifetime)
- } else {
- lifetime.ident
- };
-
- let l = self.new_named_lifetime(lifetime.id, id, span, ident);
+ let l = self.new_named_lifetime(lifetime.id, id, lifetime.ident);
hir::GenericArg::Lifetime(l)
}));
debug!(?lifetimes);
@@ -1627,6 +1613,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
parent_def_id,
node_id,
DefPathData::LifetimeNs(lifetime.ident.name),
+ lifetime.ident.span,
);
remapping.insert(old_def_id, new_def_id);
@@ -1643,6 +1630,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
parent_def_id,
node_id,
DefPathData::LifetimeNs(kw::UnderscoreLifetime),
+ lifetime.ident.span,
);
remapping.insert(old_def_id, new_def_id);
@@ -1692,7 +1680,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_fn_decl(
&mut self,
decl: &FnDecl,
- fn_node_id: Option<NodeId>,
+ fn_node_id: NodeId,
fn_span: Span,
kind: FnDeclKind,
make_ret_async: Option<(NodeId, Span)>,
@@ -1707,23 +1695,21 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
inputs = &inputs[..inputs.len() - 1];
}
let inputs = self.arena.alloc_from_iter(inputs.iter().map(|param| {
- if fn_node_id.is_some() {
- self.lower_ty_direct(&param.ty, &ImplTraitContext::Universal)
+ let itctx = if kind.param_impl_trait_allowed() {
+ ImplTraitContext::Universal
} else {
- self.lower_ty_direct(
- &param.ty,
- &ImplTraitContext::Disallowed(match kind {
- FnDeclKind::Fn | FnDeclKind::Inherent => {
- unreachable!("fn should allow in-band lifetimes")
- }
- FnDeclKind::ExternFn => ImplTraitPosition::ExternFnParam,
- FnDeclKind::Closure => ImplTraitPosition::ClosureParam,
- FnDeclKind::Pointer => ImplTraitPosition::PointerParam,
- FnDeclKind::Trait => ImplTraitPosition::TraitParam,
- FnDeclKind::Impl => ImplTraitPosition::ImplParam,
- }),
- )
- }
+ ImplTraitContext::Disallowed(match kind {
+ FnDeclKind::Fn | FnDeclKind::Inherent => {
+ unreachable!("fn should allow APIT")
+ }
+ FnDeclKind::ExternFn => ImplTraitPosition::ExternFnParam,
+ FnDeclKind::Closure => ImplTraitPosition::ClosureParam,
+ FnDeclKind::Pointer => ImplTraitPosition::PointerParam,
+ FnDeclKind::Trait => ImplTraitPosition::TraitParam,
+ FnDeclKind::Impl => ImplTraitPosition::ImplParam,
+ })
+ };
+ self.lower_ty_direct(&param.ty, &itctx)
}));
let output = if let Some((ret_id, span)) = make_ret_async {
@@ -1746,22 +1732,21 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.lower_async_fn_ret_ty(
&decl.output,
- fn_node_id.expect("`make_ret_async` but no `fn_def_id`"),
+ fn_node_id,
ret_id,
matches!(kind, FnDeclKind::Trait),
)
} else {
- match decl.output {
- FnRetTy::Ty(ref ty) => {
- let mut context = match fn_node_id {
- Some(fn_node_id) if kind.impl_trait_allowed(self.tcx) => {
- let fn_def_id = self.local_def_id(fn_node_id);
- ImplTraitContext::ReturnPositionOpaqueTy {
- origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
- in_trait: matches!(kind, FnDeclKind::Trait),
- }
+ match &decl.output {
+ FnRetTy::Ty(ty) => {
+ let context = if kind.return_impl_trait_allowed(self.tcx) {
+ let fn_def_id = self.local_def_id(fn_node_id);
+ ImplTraitContext::ReturnPositionOpaqueTy {
+ origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
+ in_trait: matches!(kind, FnDeclKind::Trait),
}
- _ => ImplTraitContext::Disallowed(match kind {
+ } else {
+ let position = match kind {
FnDeclKind::Fn | FnDeclKind::Inherent => {
unreachable!("fn should allow in-band lifetimes")
}
@@ -1770,11 +1755,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
FnDeclKind::Pointer => ImplTraitPosition::PointerReturn,
FnDeclKind::Trait => ImplTraitPosition::TraitReturn,
FnDeclKind::Impl => ImplTraitPosition::ImplReturn,
- }),
+ };
+ match kind {
+ FnDeclKind::Trait | FnDeclKind::Impl => ImplTraitContext::FeatureGated(
+ position,
+ sym::return_position_impl_trait_in_trait,
+ ),
+ _ => ImplTraitContext::Disallowed(position),
+ }
};
- hir::FnRetTy::Return(self.lower_ty(ty, &mut context))
+ hir::FnRetTy::Return(self.lower_ty(ty, &context))
}
- FnRetTy::Default(span) => hir::FnRetTy::DefaultReturn(self.lower_span(span)),
+ FnRetTy::Default(span) => hir::FnRetTy::DefaultReturn(self.lower_span(*span)),
}
};
@@ -1782,26 +1774,23 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
inputs,
output,
c_variadic,
+ lifetime_elision_allowed: self.resolver.lifetime_elision_allowed.contains(&fn_node_id),
implicit_self: decl.inputs.get(0).map_or(hir::ImplicitSelfKind::None, |arg| {
let is_mutable_pat = matches!(
arg.pat.kind,
PatKind::Ident(hir::BindingAnnotation(_, Mutability::Mut), ..)
);
- match arg.ty.kind {
+ match &arg.ty.kind {
TyKind::ImplicitSelf if is_mutable_pat => hir::ImplicitSelfKind::Mut,
TyKind::ImplicitSelf => hir::ImplicitSelfKind::Imm,
// Given we are only considering `ImplicitSelf` types, we needn't consider
// the case where we have a mutable pattern to a reference as that would
// no longer be an `ImplicitSelf`.
- TyKind::Rptr(_, ref mt)
- if mt.ty.kind.is_implicit_self() && mt.mutbl == ast::Mutability::Mut =>
- {
- hir::ImplicitSelfKind::MutRef
- }
- TyKind::Rptr(_, ref mt) if mt.ty.kind.is_implicit_self() => {
- hir::ImplicitSelfKind::ImmRef
- }
+ TyKind::Rptr(_, mt) if mt.ty.kind.is_implicit_self() => match mt.mutbl {
+ hir::Mutability::Not => hir::ImplicitSelfKind::ImmRef,
+ hir::Mutability::Mut => hir::ImplicitSelfKind::MutRef,
+ },
_ => hir::ImplicitSelfKind::None,
}
}),
@@ -1828,9 +1817,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let opaque_ty_span = self.mark_span_with_reason(DesugaringKind::Async, span, None);
- let opaque_ty_def_id = self.local_def_id(opaque_ty_node_id);
let fn_def_id = self.local_def_id(fn_node_id);
+ let opaque_ty_def_id =
+ self.create_def(fn_def_id, opaque_ty_node_id, DefPathData::ImplTrait, opaque_ty_span);
+
// When we create the opaque type for this async fn, it is going to have
// to capture all the lifetimes involved in the signature (including in the
// return type). This is done by introducing lifetime parameters for:
@@ -1889,6 +1880,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
opaque_ty_def_id,
inner_node_id,
DefPathData::LifetimeNs(ident.name),
+ ident.span,
);
new_remapping.insert(outer_def_id, inner_def_id);
@@ -1953,7 +1945,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
output,
span,
if in_trait && !this.tcx.features().return_position_impl_trait_in_trait {
- ImplTraitContext::Disallowed(ImplTraitPosition::TraitReturn)
+ ImplTraitContext::FeatureGated(
+ ImplTraitPosition::TraitReturn,
+ sym::return_position_impl_trait_in_trait,
+ )
} else {
ImplTraitContext::ReturnPositionOpaqueTy {
origin: hir::OpaqueTyOrigin::FnReturn(fn_def_id),
@@ -1978,6 +1973,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
hir::GenericParam {
hir_id,
+ def_id: this.local_def_id(new_node_id),
name,
span: lifetime.ident.span,
pure_wrt_drop: false,
@@ -2024,18 +2020,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let generic_args = self.arena.alloc_from_iter(collected_lifetimes.into_iter().map(
|(_, lifetime, res)| {
let id = self.next_node_id();
- let span = lifetime.ident.span;
-
- let ident = if lifetime.ident.name == kw::UnderscoreLifetime {
- Ident::with_dummy_span(kw::UnderscoreLifetime)
- } else {
- lifetime.ident
- };
-
let res = res.unwrap_or(
self.resolver.get_lifetime_res(lifetime.id).unwrap_or(LifetimeRes::Error),
);
- hir::GenericArg::Lifetime(self.new_named_lifetime_with_res(id, span, ident, res))
+ hir::GenericArg::Lifetime(self.new_named_lifetime_with_res(id, lifetime.ident, res))
},
));
@@ -2105,43 +2093,40 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
fn lower_lifetime(&mut self, l: &Lifetime) -> &'hir hir::Lifetime {
- let span = self.lower_span(l.ident.span);
let ident = self.lower_ident(l.ident);
- self.new_named_lifetime(l.id, l.id, span, ident)
+ self.new_named_lifetime(l.id, l.id, ident)
}
#[instrument(level = "debug", skip(self))]
fn new_named_lifetime_with_res(
&mut self,
id: NodeId,
- span: Span,
ident: Ident,
res: LifetimeRes,
) -> &'hir hir::Lifetime {
- let name = match res {
+ let res = match res {
LifetimeRes::Param { param, .. } => {
- let p_name = ParamName::Plain(ident);
let param = self.get_remapped_def_id(param);
-
- hir::LifetimeName::Param(param, p_name)
+ hir::LifetimeName::Param(param)
}
LifetimeRes::Fresh { param, .. } => {
- debug_assert_eq!(ident.name, kw::UnderscoreLifetime);
let param = self.local_def_id(param);
-
- hir::LifetimeName::Param(param, ParamName::Fresh)
+ hir::LifetimeName::Param(param)
}
LifetimeRes::Infer => hir::LifetimeName::Infer,
LifetimeRes::Static => hir::LifetimeName::Static,
LifetimeRes::Error => hir::LifetimeName::Error,
- res => panic!("Unexpected lifetime resolution {:?} for {:?} at {:?}", res, ident, span),
+ res => panic!(
+ "Unexpected lifetime resolution {:?} for {:?} at {:?}",
+ res, ident, ident.span
+ ),
};
- debug!(?name);
+ debug!(?res);
self.arena.alloc(hir::Lifetime {
hir_id: self.lower_node_id(id),
- span: self.lower_span(span),
- name,
+ ident: self.lower_ident(ident),
+ res,
})
}
@@ -2150,11 +2135,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
&mut self,
id: NodeId,
new_id: NodeId,
- span: Span,
ident: Ident,
) -> &'hir hir::Lifetime {
let res = self.resolver.get_lifetime_res(id).unwrap_or(LifetimeRes::Error);
- self.new_named_lifetime_with_res(new_id, span, ident, res)
+ self.new_named_lifetime_with_res(new_id, ident, res)
}
fn lower_generic_params_mut<'s>(
@@ -2176,6 +2160,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.lower_attrs(hir_id, &param.attrs);
hir::GenericParam {
hir_id,
+ def_id: self.local_def_id(param.id),
name,
span: self.lower_span(param.span()),
pure_wrt_drop: self.tcx.sess.contains_name(&param.attrs, sym::may_dangle),
@@ -2188,7 +2173,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
&mut self,
param: &GenericParam,
) -> (hir::ParamName, hir::GenericParamKind<'hir>) {
- match param.kind {
+ match &param.kind {
GenericParamKind::Lifetime => {
// AST resolution emitted an error on those parameters, so we lower them using
// `ParamName::Error`.
@@ -2204,7 +2189,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
(param_name, kind)
}
- GenericParamKind::Type { ref default, .. } => {
+ GenericParamKind::Type { default, .. } => {
let kind = hir::GenericParamKind::Type {
default: default.as_ref().map(|x| {
self.lower_ty(x, &ImplTraitContext::Disallowed(ImplTraitPosition::Type))
@@ -2214,7 +2199,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
(hir::ParamName::Plain(self.lower_ident(param.ident)), kind)
}
- GenericParamKind::Const { ref ty, kw_span: _, ref default } => {
+ GenericParamKind::Const { ty, kw_span: _, default } => {
let ty = self.lower_ty(&ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
let default = default.as_ref().map(|def| self.lower_anon_const(def));
(
@@ -2280,6 +2265,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// Set the name to `impl Bound1 + Bound2`.
let param = hir::GenericParam {
hir_id: self.lower_node_id(node_id),
+ def_id,
name: ParamName::Plain(self.lower_ident(ident)),
pure_wrt_drop: false,
span: self.lower_span(span),
@@ -2315,7 +2301,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
/// has no attributes and is not targeted by a `break`.
fn lower_block_expr(&mut self, b: &Block) -> hir::Expr<'hir> {
let block = self.lower_block(b, false);
- self.expr_block(block, AttrVec::new())
+ self.expr_block(block)
}
fn lower_array_length(&mut self, c: &AnonConst) -> hir::ArrayLen {
@@ -2340,6 +2326,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_anon_const(&mut self, c: &AnonConst) -> hir::AnonConst {
self.with_new_scopes(|this| hir::AnonConst {
+ def_id: this.local_def_id(c.id),
hir_id: this.lower_node_id(c.id),
body: this.lower_const_body(c.value.span, Some(&c.value)),
})
@@ -2563,8 +2550,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn elided_dyn_bound(&mut self, span: Span) -> &'hir hir::Lifetime {
let r = hir::Lifetime {
hir_id: self.next_id(),
- span: self.lower_span(span),
- name: hir::LifetimeName::ImplicitObjectLifetimeDefault,
+ ident: Ident::new(kw::Empty, self.lower_span(span)),
+ res: hir::LifetimeName::ImplicitObjectLifetimeDefault,
};
debug!("elided_dyn_bound: r={:?}", r);
self.arena.alloc(r)
diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs
index 1af1633b5..16b012630 100644
--- a/compiler/rustc_ast_lowering/src/pat.rs
+++ b/compiler/rustc_ast_lowering/src/pat.rs
@@ -22,16 +22,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
ensure_sufficient_stack(|| {
// loop here to avoid recursion
let node = loop {
- match pattern.kind {
+ match &pattern.kind {
PatKind::Wild => break hir::PatKind::Wild,
- PatKind::Ident(binding_mode, ident, ref sub) => {
- let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
- break self.lower_pat_ident(pattern, binding_mode, ident, lower_sub);
+ PatKind::Ident(binding_mode, ident, sub) => {
+ let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(s));
+ break self.lower_pat_ident(pattern, *binding_mode, *ident, lower_sub);
}
- PatKind::Lit(ref e) => {
+ PatKind::Lit(e) => {
break hir::PatKind::Lit(self.lower_expr_within_pat(e, false));
}
- PatKind::TupleStruct(ref qself, ref path, ref pats) => {
+ PatKind::TupleStruct(qself, path, pats) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
@@ -42,12 +42,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
break hir::PatKind::TupleStruct(qpath, pats, ddpos);
}
- PatKind::Or(ref pats) => {
+ PatKind::Or(pats) => {
break hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat_mut(x))),
);
}
- PatKind::Path(ref qself, ref path) => {
+ PatKind::Path(qself, path) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
@@ -57,7 +57,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
);
break hir::PatKind::Path(qpath);
}
- PatKind::Struct(ref qself, ref path, ref fields, etc) => {
+ PatKind::Struct(qself, path, fields, etc) => {
let qpath = self.lower_qpath(
pattern.id,
qself,
@@ -78,32 +78,32 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
span: self.lower_span(f.span),
}
}));
- break hir::PatKind::Struct(qpath, fs, etc);
+ break hir::PatKind::Struct(qpath, fs, *etc);
}
- PatKind::Tuple(ref pats) => {
+ PatKind::Tuple(pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
break hir::PatKind::Tuple(pats, ddpos);
}
- PatKind::Box(ref inner) => {
+ PatKind::Box(inner) => {
break hir::PatKind::Box(self.lower_pat(inner));
}
- PatKind::Ref(ref inner, mutbl) => {
- break hir::PatKind::Ref(self.lower_pat(inner), mutbl);
+ PatKind::Ref(inner, mutbl) => {
+ break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
}
- PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => {
+ PatKind::Range(e1, e2, Spanned { node: end, .. }) => {
break hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
e2.as_deref().map(|e| self.lower_expr_within_pat(e, true)),
self.lower_range_end(end, e2.is_some()),
);
}
- PatKind::Slice(ref pats) => break self.lower_pat_slice(pats),
+ PatKind::Slice(pats) => break self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
break self.ban_illegal_rest_pat(pattern.span);
}
// return inner to be processed in next loop
- PatKind::Paren(ref inner) => pattern = inner,
+ PatKind::Paren(inner) => pattern = inner,
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span),
}
};
@@ -126,7 +126,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// Note that unlike for slice patterns,
// where `xs @ ..` is a legal sub-slice pattern,
// it is not a legal sub-tuple pattern.
- match pat.kind {
+ match &pat.kind {
// Found a sub-tuple rest pattern
PatKind::Rest => {
rest = Some((idx, pat.span));
@@ -134,12 +134,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
// Found a sub-tuple pattern `$binding_mode $ident @ ..`.
// This is not allowed as a sub-tuple pattern
- PatKind::Ident(ref _bm, ident, Some(ref sub)) if sub.is_rest() => {
+ PatKind::Ident(_, ident, Some(sub)) if sub.is_rest() => {
let sp = pat.span;
self.tcx.sess.emit_err(SubTupleBinding {
span: sp,
ident_name: ident.name,
- ident,
+ ident: *ident,
ctx,
});
}
@@ -176,7 +176,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let mut prev_rest_span = None;
// Lowers `$bm $ident @ ..` to `$bm $ident @ _`.
- let lower_rest_sub = |this: &mut Self, pat, ann, ident, sub| {
+ let lower_rest_sub = |this: &mut Self, pat, &ann, &ident, sub| {
let lower_sub = |this: &mut Self| Some(this.pat_wild_with_node_id_of(sub));
let node = this.lower_pat_ident(pat, ann, ident, lower_sub);
this.pat_with_node_id_of(pat, node)
@@ -185,7 +185,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let mut iter = pats.iter();
// Lower all the patterns until the first occurrence of a sub-slice pattern.
for pat in iter.by_ref() {
- match pat.kind {
+ match &pat.kind {
// Found a sub-slice pattern `..`. Record, lower it to `_`, and stop here.
PatKind::Rest => {
prev_rest_span = Some(pat.span);
@@ -194,7 +194,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
// Found a sub-slice pattern `$binding_mode $ident @ ..`.
// Record, lower it to `$binding_mode $ident @ _`, and stop here.
- PatKind::Ident(ann, ident, Some(ref sub)) if sub.is_rest() => {
+ PatKind::Ident(ann, ident, Some(sub)) if sub.is_rest() => {
prev_rest_span = Some(sub.span);
slice = Some(self.arena.alloc(lower_rest_sub(self, pat, ann, ident, sub)));
break;
@@ -207,9 +207,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// Lower all the patterns after the first sub-slice pattern.
for pat in iter {
// There was a previous subslice pattern; make sure we don't allow more.
- let rest_span = match pat.kind {
+ let rest_span = match &pat.kind {
PatKind::Rest => Some(pat.span),
- PatKind::Ident(ann, ident, Some(ref sub)) if sub.is_rest() => {
+ PatKind::Ident(ann, ident, Some(sub)) if sub.is_rest() => {
// #69103: Lower into `binding @ _` as above to avoid ICEs.
after.push(lower_rest_sub(self, pat, ann, ident, sub));
Some(sub.span)
@@ -322,10 +322,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// m!(S);
// ```
fn lower_expr_within_pat(&mut self, expr: &Expr, allow_paths: bool) -> &'hir hir::Expr<'hir> {
- match expr.kind {
- ExprKind::Lit(..) | ExprKind::ConstBlock(..) | ExprKind::Err => {}
+ match &expr.kind {
+ ExprKind::Lit(..)
+ | ExprKind::ConstBlock(..)
+ | ExprKind::IncludedBytes(..)
+ | ExprKind::Err => {}
ExprKind::Path(..) if allow_paths => {}
- ExprKind::Unary(UnOp::Neg, ref inner) if matches!(inner.kind, ExprKind::Lit(_)) => {}
+ ExprKind::Unary(UnOp::Neg, inner) if matches!(inner.kind, ExprKind::Lit(_)) => {}
_ => {
self.tcx.sess.emit_err(ArbitraryExpressionInPattern { span: expr.span });
return self.arena.alloc(self.expr_err(expr.span));
diff --git a/compiler/rustc_ast_lowering/src/path.rs b/compiler/rustc_ast_lowering/src/path.rs
index 888776ccc..592fc5aa6 100644
--- a/compiler/rustc_ast_lowering/src/path.rs
+++ b/compiler/rustc_ast_lowering/src/path.rs
@@ -9,17 +9,17 @@ use rustc_ast::{self as ast, *};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, PartialRes, Res};
use rustc_hir::GenericArg;
-use rustc_span::symbol::{kw, Ident};
+use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{BytePos, Span, DUMMY_SP};
-use smallvec::smallvec;
+use smallvec::{smallvec, SmallVec};
impl<'a, 'hir> LoweringContext<'a, 'hir> {
#[instrument(level = "trace", skip(self))]
pub(crate) fn lower_qpath(
&mut self,
id: NodeId,
- qself: &Option<QSelf>,
+ qself: &Option<ptr::P<QSelf>>,
p: &Path,
param_mode: ParamMode,
itctx: &ImplTraitContext,
@@ -144,13 +144,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
);
}
- pub(crate) fn lower_path_extra(
+ pub(crate) fn lower_use_path(
&mut self,
- res: Res,
+ res: SmallVec<[Res; 3]>,
p: &Path,
param_mode: ParamMode,
- ) -> &'hir hir::Path<'hir> {
- self.arena.alloc(hir::Path {
+ ) -> &'hir hir::UsePath<'hir> {
+ self.arena.alloc(hir::UsePath {
res,
segments: self.arena.alloc_from_iter(p.segments.iter().map(|segment| {
self.lower_path_segment(
@@ -165,17 +165,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
})
}
- pub(crate) fn lower_path(
- &mut self,
- id: NodeId,
- p: &Path,
- param_mode: ParamMode,
- ) -> &'hir hir::Path<'hir> {
- let res = self.expect_full_res(id);
- let res = self.lower_res(res);
- self.lower_path_extra(res, p, param_mode)
- }
-
pub(crate) fn lower_path_segment(
&mut self,
path_span: Span,
@@ -185,13 +174,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
itctx: &ImplTraitContext,
) -> hir::PathSegment<'hir> {
debug!("path_span: {:?}, lower_path_segment(segment: {:?})", path_span, segment,);
- let (mut generic_args, infer_args) = if let Some(ref generic_args) = segment.args {
- match **generic_args {
- GenericArgs::AngleBracketed(ref data) => {
+ let (mut generic_args, infer_args) = if let Some(generic_args) = segment.args.as_deref() {
+ match generic_args {
+ GenericArgs::AngleBracketed(data) => {
self.lower_angle_bracketed_parameter_data(data, param_mode, itctx)
}
- GenericArgs::Parenthesized(ref data) => match parenthesized_generic_args {
- ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
+ GenericArgs::Parenthesized(data) => match parenthesized_generic_args {
+ ParenthesizedGenericArgs::Ok => {
+ self.lower_parenthesized_parameter_data(data, itctx)
+ }
ParenthesizedGenericArgs::Err => {
// Suggest replacing parentheses with angle brackets `Trait(params...)` to `Trait<params...>`
let sub = if !data.inputs.is_empty() {
@@ -307,7 +298,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let id = NodeId::from_u32(i);
let l = self.lower_lifetime(&Lifetime {
id,
- ident: Ident::new(kw::UnderscoreLifetime, elided_lifetime_span),
+ ident: Ident::new(kw::Empty, elided_lifetime_span),
});
GenericArg::Lifetime(l)
}),
@@ -344,6 +335,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_parenthesized_parameter_data(
&mut self,
data: &ParenthesizedArgs,
+ itctx: &ImplTraitContext,
) -> (GenericArgsCtor<'hir>, bool) {
// Switch to `PassThrough` mode for anonymous lifetimes; this
// means that we permit things like `&Ref<T>`, where `Ref` has
@@ -355,6 +347,24 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.lower_ty_direct(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::FnTraitParam))
}));
let output_ty = match output {
+ // Only allow `impl Trait` in return position. i.e.:
+ // ```rust
+ // fn f(_: impl Fn() -> impl Debug) -> impl Fn() -> impl Debug
+ // // disallowed --^^^^^^^^^^ allowed --^^^^^^^^^^
+ // ```
+ FnRetTy::Ty(ty) if matches!(itctx, ImplTraitContext::ReturnPositionOpaqueTy { .. }) => {
+ if self.tcx.features().impl_trait_in_fn_trait_return {
+ self.lower_ty(&ty, itctx)
+ } else {
+ self.lower_ty(
+ &ty,
+ &ImplTraitContext::FeatureGated(
+ ImplTraitPosition::FnTraitReturn,
+ sym::impl_trait_in_fn_trait_return,
+ ),
+ )
+ }
+ }
FnRetTy::Ty(ty) => {
self.lower_ty(&ty, &ImplTraitContext::Disallowed(ImplTraitPosition::FnTraitReturn))
}
diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs
index 036643244..eb9c841d8 100644
--- a/compiler/rustc_ast_passes/src/ast_validation.rs
+++ b/compiler/rustc_ast_passes/src/ast_validation.rs
@@ -209,7 +209,7 @@ impl<'a> AstValidator<'a> {
// Mirrors `visit::walk_ty`, but tracks relevant state.
fn walk_ty(&mut self, t: &'a Ty) {
- match t.kind {
+ match &t.kind {
TyKind::ImplTrait(..) => {
self.with_impl_trait(Some(t.span), |this| visit::walk_ty(this, t))
}
@@ -217,7 +217,7 @@ impl<'a> AstValidator<'a> {
.with_banned_tilde_const(DisallowTildeConstContext::TraitObject, |this| {
visit::walk_ty(this, t)
}),
- TyKind::Path(ref qself, ref path) => {
+ TyKind::Path(qself, path) => {
// We allow these:
// - `Option<impl Trait>`
// - `option::Option<impl Trait>`
@@ -231,7 +231,7 @@ impl<'a> AstValidator<'a> {
// (for cases like `<impl Trait>::Foo>`)
// but we allow `impl Trait` in `GenericArgs`
// iff there are no more PathSegments.
- if let Some(ref qself) = *qself {
+ if let Some(qself) = qself {
// `impl Trait` in `qself` is always illegal
self.with_banned_impl_trait(|this| this.visit_ty(&qself.ty));
}
@@ -738,8 +738,8 @@ impl<'a> AstValidator<'a> {
}
fn visit_ty_common(&mut self, ty: &'a Ty) {
- match ty.kind {
- TyKind::BareFn(ref bfty) => {
+ match &ty.kind {
+ TyKind::BareFn(bfty) => {
self.check_fn_decl(&bfty.decl, SelfSemantic::No);
Self::check_decl_no_pat(&bfty.decl, |span, _, _| {
struct_span_err!(
@@ -756,10 +756,10 @@ impl<'a> AstValidator<'a> {
self.maybe_lint_missing_abi(sig_span, ty.id);
}
}
- TyKind::TraitObject(ref bounds, ..) => {
+ TyKind::TraitObject(bounds, ..) => {
let mut any_lifetime_bounds = false;
for bound in bounds {
- if let GenericBound::Outlives(ref lifetime) = *bound {
+ if let GenericBound::Outlives(lifetime) = bound {
if any_lifetime_bounds {
struct_span_err!(
self.session,
@@ -774,7 +774,7 @@ impl<'a> AstValidator<'a> {
}
}
}
- TyKind::ImplTrait(_, ref bounds) => {
+ TyKind::ImplTrait(_, bounds) => {
if self.is_impl_trait_banned {
struct_span_err!(
self.session,
@@ -842,8 +842,8 @@ fn validate_generic_param_order(
let (kind, bounds, span) = (&param.kind, &param.bounds, ident.span);
let (ord_kind, ident) = match &param.kind {
GenericParamKind::Lifetime => (ParamKindOrd::Lifetime, ident.to_string()),
- GenericParamKind::Type { default: _ } => (ParamKindOrd::TypeOrConst, ident.to_string()),
- GenericParamKind::Const { ref ty, kw_span: _, default: _ } => {
+ GenericParamKind::Type { .. } => (ParamKindOrd::TypeOrConst, ident.to_string()),
+ GenericParamKind::Const { ty, .. } => {
let ty = pprust::ty_to_string(ty);
(ParamKindOrd::TypeOrConst, format!("const {}: {}", ident, ty))
}
@@ -912,7 +912,7 @@ fn validate_generic_param_order(
impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_attribute(&mut self, attr: &Attribute) {
- validate_attr::check_meta(&self.session.parse_sess, attr);
+ validate_attr::check_attr(&self.session.parse_sess, attr);
}
fn visit_expr(&mut self, expr: &'a Expr) {
@@ -948,8 +948,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
ExprKind::Paren(local_expr) => {
fn has_let_expr(expr: &Expr) -> bool {
- match expr.kind {
- ExprKind::Binary(_, ref lhs, ref rhs) => has_let_expr(lhs) || has_let_expr(rhs),
+ match &expr.kind {
+ ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
ExprKind::Let(..) => true,
_ => false,
}
@@ -1005,18 +1005,18 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_nomangle_item_asciionly(item.ident, item.span);
}
- match item.kind {
+ match &item.kind {
ItemKind::Impl(box Impl {
unsafety,
polarity,
defaultness: _,
constness,
- ref generics,
- of_trait: Some(ref t),
- ref self_ty,
- ref items,
+ generics,
+ of_trait: Some(t),
+ self_ty,
+ items,
}) => {
- self.with_in_trait_impl(true, Some(constness), |this| {
+ self.with_in_trait_impl(true, Some(*constness), |this| {
this.invalid_visibility(&item.vis, None);
if let TyKind::Err = self_ty.kind {
this.err_handler()
@@ -1027,7 +1027,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
.help("use `auto trait Trait {}` instead")
.emit();
}
- if let (Unsafe::Yes(span), ImplPolarity::Negative(sp)) = (unsafety, polarity) {
+ if let (&Unsafe::Yes(span), &ImplPolarity::Negative(sp)) = (unsafety, polarity)
+ {
struct_span_err!(
this.session,
sp.to(t.path.span),
@@ -1051,6 +1052,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
walk_list!(this, visit_assoc_item, items, AssocCtxt::Impl);
});
+ walk_list!(self, visit_attribute, &item.attrs);
return; // Avoid visiting again.
}
ItemKind::Impl(box Impl {
@@ -1060,7 +1062,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
constness,
generics: _,
of_trait: None,
- ref self_ty,
+ self_ty,
items: _,
}) => {
let error = |annotation_span, annotation| {
@@ -1077,25 +1079,25 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
&item.vis,
Some(InvalidVisibilityNote::IndividualImplItems),
);
- if let Unsafe::Yes(span) = unsafety {
+ if let &Unsafe::Yes(span) = unsafety {
error(span, "unsafe").code(error_code!(E0197)).emit();
}
- if let ImplPolarity::Negative(span) = polarity {
+ if let &ImplPolarity::Negative(span) = polarity {
error(span, "negative").emit();
}
- if let Defaultness::Default(def_span) = defaultness {
+ if let &Defaultness::Default(def_span) = defaultness {
error(def_span, "`default`")
.note("only trait implementations may be annotated with `default`")
.emit();
}
- if let Const::Yes(span) = constness {
+ if let &Const::Yes(span) = constness {
error(span, "`const`")
.note("only trait implementations may be annotated with `const`")
.emit();
}
}
- ItemKind::Fn(box Fn { defaultness, ref sig, ref generics, ref body }) => {
- self.check_defaultness(item.span, defaultness);
+ ItemKind::Fn(box Fn { defaultness, sig, generics, body }) => {
+ self.check_defaultness(item.span, *defaultness);
if body.is_none() {
self.session.emit_err(FnWithoutBody {
@@ -1131,7 +1133,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
&item.vis,
Some(InvalidVisibilityNote::IndividualForeignItems),
);
- if let Unsafe::Yes(span) = unsafety {
+ if let &Unsafe::Yes(span) = unsafety {
self.err_handler().span_err(span, "extern block cannot be declared unsafe");
}
if abi.is_none() {
@@ -1141,7 +1143,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.extern_mod = old_item;
return; // Avoid visiting again.
}
- ItemKind::Enum(ref def, _) => {
+ ItemKind::Enum(def, _) => {
for variant in &def.variants {
self.invalid_visibility(&variant.vis, None);
for field in variant.data.fields() {
@@ -1149,8 +1151,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
}
}
- ItemKind::Trait(box Trait { is_auto, ref generics, ref bounds, ref items, .. }) => {
- if is_auto == IsAuto::Yes {
+ ItemKind::Trait(box Trait { is_auto, generics, bounds, items, .. }) => {
+ if *is_auto == IsAuto::Yes {
// Auto traits cannot have generics, super traits nor contain items.
self.deny_generic_params(generics, item.ident.span);
self.deny_super_traits(bounds, item.ident.span);
@@ -1168,10 +1170,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
});
walk_list!(self, visit_assoc_item, items, AssocCtxt::Trait);
walk_list!(self, visit_attribute, &item.attrs);
- return;
+ return; // Avoid visiting again
}
- ItemKind::Mod(unsafety, ref mod_kind) => {
- if let Unsafe::Yes(span) = unsafety {
+ ItemKind::Mod(unsafety, mod_kind) => {
+ if let &Unsafe::Yes(span) = unsafety {
self.err_handler().span_err(span, "module cannot be declared unsafe");
}
// Ensure that `path` attributes on modules are recorded as used (cf. issue #35584).
@@ -1181,13 +1183,13 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_mod_file_item_asciionly(item.ident);
}
}
- ItemKind::Union(ref vdata, ..) => {
+ ItemKind::Union(vdata, ..) => {
if vdata.fields().is_empty() {
self.err_handler().span_err(item.span, "unions cannot have zero fields");
}
}
ItemKind::Const(def, .., None) => {
- self.check_defaultness(item.span, def);
+ self.check_defaultness(item.span, *def);
self.session.emit_err(ConstWithoutBody {
span: item.span,
replace_span: self.ending_semi_or_hi(item.span),
@@ -1199,14 +1201,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
replace_span: self.ending_semi_or_hi(item.span),
});
}
- ItemKind::TyAlias(box TyAlias {
- defaultness,
- where_clauses,
- ref bounds,
- ref ty,
- ..
- }) => {
- self.check_defaultness(item.span, defaultness);
+ ItemKind::TyAlias(box TyAlias { defaultness, where_clauses, bounds, ty, .. }) => {
+ self.check_defaultness(item.span, *defaultness);
if ty.is_none() {
self.session.emit_err(TyAliasWithoutBody {
span: item.span,
@@ -1265,8 +1261,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
// Mirrors `visit::walk_generic_args`, but tracks relevant state.
fn visit_generic_args(&mut self, generic_args: &'a GenericArgs) {
- match *generic_args {
- GenericArgs::AngleBracketed(ref data) => {
+ match generic_args {
+ GenericArgs::AngleBracketed(data) => {
self.check_generic_args_before_constraints(data);
for arg in &data.args {
@@ -1282,7 +1278,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
}
}
- GenericArgs::Parenthesized(ref data) => {
+ GenericArgs::Parenthesized(data) => {
walk_list!(self, visit_ty, &data.inputs);
if let FnRetTy::Ty(ty) = &data.output {
// `-> Foo` syntax is essentially an associated type binding,
@@ -1318,7 +1314,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
validate_generic_param_order(self.err_handler(), &generics.params, generics.span);
for predicate in &generics.where_clause.predicates {
- if let WherePredicate::EqPredicate(ref predicate) = *predicate {
+ if let WherePredicate::EqPredicate(predicate) = predicate {
deny_equality_constraints(self, predicate, generics);
}
}
@@ -1367,7 +1363,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
fn visit_param_bound(&mut self, bound: &'a GenericBound, ctxt: BoundKind) {
- if let GenericBound::Trait(ref poly, modify) = *bound {
+ if let GenericBound::Trait(poly, modify) = bound {
match (ctxt, modify) {
(BoundKind::SuperTraits, TraitBoundModifier::Maybe) => {
let mut err = self
@@ -1572,8 +1568,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
self.check_item_named(item.ident, "const");
}
- match item.kind {
- AssocItemKind::Type(box TyAlias { ref generics, ref bounds, ref ty, .. })
+ match &item.kind {
+ AssocItemKind::Type(box TyAlias { generics, bounds, ty, .. })
if ctxt == AssocCtxt::Trait =>
{
self.visit_vis(&item.vis);
@@ -1585,7 +1581,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
});
walk_list!(self, visit_ty, ty);
}
- AssocItemKind::Fn(box Fn { ref sig, ref generics, ref body, .. })
+ AssocItemKind::Fn(box Fn { sig, generics, body, .. })
if self.in_const_trait_impl
|| ctxt == AssocCtxt::Trait
|| matches!(sig.header.constness, Const::Yes(_)) =>
@@ -1636,7 +1632,7 @@ fn deny_equality_constraints(
// Remove `Bar` from `Foo::Bar`.
assoc_path.segments.pop();
let len = assoc_path.segments.len() - 1;
- let gen_args = args.as_ref().map(|p| (**p).clone());
+ let gen_args = args.as_deref().cloned();
// Build `<Bar = RhsTy>`.
let arg = AngleBracketedArg::Constraint(AssocConstraint {
id: rustc_ast::node_id::DUMMY_NODE_ID,
diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs
index 546010135..32f45f8b5 100644
--- a/compiler/rustc_ast_passes/src/feature_gate.rs
+++ b/compiler/rustc_ast_passes/src/feature_gate.rs
@@ -198,8 +198,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_item(&mut self, i: &'a ast::Item) {
- match i.kind {
- ast::ItemKind::ForeignMod(ref foreign_module) => {
+ match &i.kind {
+ ast::ItemKind::ForeignMod(foreign_module) => {
if let Some(abi) = foreign_module.abi {
self.check_abi(abi, ast::Const::No);
}
@@ -233,8 +233,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
}
- ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, ref of_trait, .. }) => {
- if let ast::ImplPolarity::Negative(span) = polarity {
+ ast::ItemKind::Impl(box ast::Impl { polarity, defaultness, of_trait, .. }) => {
+ if let &ast::ImplPolarity::Negative(span) = polarity {
gate_feature_post!(
&self,
negative_impls,
@@ -267,7 +267,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
gate_feature_post!(&self, decl_macro, i.span, msg);
}
- ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ref ty), .. }) => {
+ ast::ItemKind::TyAlias(box ast::TyAlias { ty: Some(ty), .. }) => {
self.check_impl_trait(&ty)
}
@@ -302,8 +302,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_ty(&mut self, ty: &'a ast::Ty) {
- match ty.kind {
- ast::TyKind::BareFn(ref bare_fn_ty) => {
+ match &ty.kind {
+ ast::TyKind::BareFn(bare_fn_ty) => {
// Function pointers cannot be `const`
self.check_extern(bare_fn_ty.ext, ast::Const::No);
}
@@ -319,7 +319,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FnRetTy) {
- if let ast::FnRetTy::Ty(ref output_ty) = *ret_ty {
+ if let ast::FnRetTy::Ty(output_ty) = ret_ty {
if let ast::TyKind::Never = output_ty.kind {
// Do nothing.
} else {
@@ -455,9 +455,9 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
fn visit_assoc_item(&mut self, i: &'a ast::AssocItem, ctxt: AssocCtxt) {
- let is_fn = match i.kind {
+ let is_fn = match &i.kind {
ast::AssocItemKind::Fn(_) => true,
- ast::AssocItemKind::Type(box ast::TyAlias { ref ty, .. }) => {
+ ast::AssocItemKind::Type(box ast::TyAlias { ty, .. }) => {
if let (Some(_), AssocCtxt::Trait) = (ty, ctxt) {
gate_feature_post!(
&self,
diff --git a/compiler/rustc_ast_pretty/src/helpers.rs b/compiler/rustc_ast_pretty/src/helpers.rs
index 5ec71cddf..c3e0eccd3 100644
--- a/compiler/rustc_ast_pretty/src/helpers.rs
+++ b/compiler/rustc_ast_pretty/src/helpers.rs
@@ -36,8 +36,8 @@ impl Printer {
self.nbsp()
}
- // Synthesizes a comment that was not textually present in the original
- // source file.
+ /// Synthesizes a comment that was not textually present in the original
+ /// source file.
pub fn synth_comment(&mut self, text: impl Into<Cow<'static, str>>) {
self.word("/*");
self.space();
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index b87c6f78d..d3d8431c1 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -11,16 +11,15 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::util::classify;
use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
use rustc_ast::util::parser;
-use rustc_ast::{self as ast, BlockCheckMode, Mutability, PatKind, RangeEnd, RangeSyntax};
-use rustc_ast::{attr, BindingAnnotation, ByRef, Term};
-use rustc_ast::{GenericArg, MacArgs, MacArgsEq};
-use rustc_ast::{GenericBound, SelfKind, TraitBoundModifier};
+use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, BlockCheckMode, PatKind};
+use rustc_ast::{attr, BindingAnnotation, ByRef, DelimArgs, RangeEnd, RangeSyntax, Term};
+use rustc_ast::{GenericArg, GenericBound, SelfKind, TraitBoundModifier};
use rustc_ast::{InlineAsmOperand, InlineAsmRegOrRegClass};
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_span::edition::Edition;
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::symbol::{kw, sym, Ident, IdentPrinter, Symbol};
-use rustc_span::{BytePos, FileName, Span};
+use rustc_span::{BytePos, FileName, Span, DUMMY_SP};
use rustc_ast::attr::AttrIdGenerator;
use std::borrow::Cow;
@@ -65,6 +64,7 @@ impl<'a> Comments<'a> {
Comments { sm, comments, current: 0 }
}
+ // FIXME: This shouldn't probably clone lmao
pub fn next(&self) -> Option<Comment> {
self.comments.get(self.current).cloned()
}
@@ -120,17 +120,20 @@ pub fn print_crate<'a>(
// of the feature gate, so we fake them up here.
// `#![feature(prelude_import)]`
- let pi_nested = attr::mk_nested_word_item(Ident::with_dummy_span(sym::prelude_import));
- let list = attr::mk_list_item(Ident::with_dummy_span(sym::feature), vec![pi_nested]);
- let fake_attr = attr::mk_attr_inner(g, list);
+ let fake_attr = attr::mk_attr_nested_word(
+ g,
+ ast::AttrStyle::Inner,
+ sym::feature,
+ sym::prelude_import,
+ DUMMY_SP,
+ );
s.print_attribute(&fake_attr);
// Currently, in Rust 2018 we don't have `extern crate std;` at the crate
// root, so this is not needed, and actually breaks things.
if edition == Edition::Edition2015 {
// `#![no_std]`
- let no_std_meta = attr::mk_word_item(Ident::with_dummy_span(sym::no_std));
- let fake_attr = attr::mk_attr_inner(g, no_std_meta);
+ let fake_attr = attr::mk_attr_word(g, ast::AttrStyle::Inner, sym::no_std, DUMMY_SP);
s.print_attribute(&fake_attr);
}
}
@@ -269,10 +272,10 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn maybe_print_comment(&mut self, pos: BytePos) -> bool {
let mut has_comment = false;
- while let Some(ref cmnt) = self.next_comment() {
+ while let Some(cmnt) = self.next_comment() {
if cmnt.pos < pos {
has_comment = true;
- self.print_comment(cmnt);
+ self.print_comment(&cmnt);
} else {
break;
}
@@ -367,14 +370,18 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
if self.next_comment().is_none() {
self.hardbreak();
}
- while let Some(ref cmnt) = self.next_comment() {
- self.print_comment(cmnt)
+ while let Some(cmnt) = self.next_comment() {
+ self.print_comment(&cmnt)
}
}
- fn print_literal(&mut self, lit: &ast::Lit) {
- self.maybe_print_comment(lit.span.lo());
- self.word(lit.token_lit.to_string())
+ fn print_meta_item_lit(&mut self, lit: &ast::MetaItemLit) {
+ self.print_token_literal(lit.token_lit, lit.span)
+ }
+
+ fn print_token_literal(&mut self, token_lit: token::Lit, span: Span) {
+ self.maybe_print_comment(span.lo());
+ self.word(token_lit.to_string())
}
fn print_string(&mut self, st: &str, style: ast::StrStyle) {
@@ -443,8 +450,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.hardbreak_if_not_bol();
}
self.maybe_print_comment(attr.span.lo());
- match attr.kind {
- ast::AttrKind::Normal(ref normal) => {
+ match &attr.kind {
+ ast::AttrKind::Normal(normal) => {
match attr.style {
ast::AttrStyle::Inner => self.word("#!["),
ast::AttrStyle::Outer => self.word("#["),
@@ -453,7 +460,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.word("]");
}
ast::AttrKind::DocComment(comment_kind, data) => {
- self.word(doc_comment_to_string(comment_kind, attr.style, data));
+ self.word(doc_comment_to_string(*comment_kind, attr.style, *data));
self.hardbreak()
}
}
@@ -462,30 +469,30 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_attr_item(&mut self, item: &ast::AttrItem, span: Span) {
self.ibox(0);
match &item.args {
- MacArgs::Delimited(_, delim, tokens) => self.print_mac_common(
+ AttrArgs::Delimited(DelimArgs { dspan: _, delim, tokens }) => self.print_mac_common(
Some(MacHeader::Path(&item.path)),
false,
None,
- Some(delim.to_token()),
+ delim.to_token(),
tokens,
true,
span,
),
- MacArgs::Empty => {
+ AttrArgs::Empty => {
self.print_path(&item.path, false, 0);
}
- MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
+ AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
let token_str = self.expr_to_string(expr);
self.word(token_str);
}
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => {
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => {
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
- let token_str = self.literal_to_string(lit);
+ let token_str = self.meta_item_lit_to_string(lit);
self.word(token_str);
}
}
@@ -494,25 +501,25 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) {
match item {
- ast::NestedMetaItem::MetaItem(ref mi) => self.print_meta_item(mi),
- ast::NestedMetaItem::Literal(ref lit) => self.print_literal(lit),
+ ast::NestedMetaItem::MetaItem(mi) => self.print_meta_item(mi),
+ ast::NestedMetaItem::Lit(lit) => self.print_meta_item_lit(lit),
}
}
fn print_meta_item(&mut self, item: &ast::MetaItem) {
self.ibox(INDENT_UNIT);
- match item.kind {
+ match &item.kind {
ast::MetaItemKind::Word => self.print_path(&item.path, false, 0),
- ast::MetaItemKind::NameValue(ref value) => {
+ ast::MetaItemKind::NameValue(value) => {
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
- self.print_literal(value);
+ self.print_meta_item_lit(value);
}
- ast::MetaItemKind::List(ref items) => {
+ ast::MetaItemKind::List(items) => {
self.print_path(&item.path, false, 0);
self.popen();
- self.commasep(Consistent, &items, |s, i| s.print_meta_list_item(i));
+ self.commasep(Consistent, items, |s, i| s.print_meta_list_item(i));
self.pclose();
}
}
@@ -529,7 +536,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
match tt {
TokenTree::Token(token, _) => {
- let token_str = self.token_to_string_ext(&token, convert_dollar_crate);
+ let token_str = self.token_to_string_ext(token, convert_dollar_crate);
self.word(token_str);
if let token::DocComment(..) = token.kind {
self.hardbreak()
@@ -540,7 +547,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
None,
false,
None,
- Some(*delim),
+ *delim,
tts,
convert_dollar_crate,
dspan.entire(),
@@ -566,12 +573,12 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
header: Option<MacHeader<'_>>,
has_bang: bool,
ident: Option<Ident>,
- delim: Option<Delimiter>,
+ delim: Delimiter,
tts: &TokenStream,
convert_dollar_crate: bool,
span: Span,
) {
- if delim == Some(Delimiter::Brace) {
+ if delim == Delimiter::Brace {
self.cbox(INDENT_UNIT);
}
match header {
@@ -587,7 +594,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.print_ident(ident);
}
match delim {
- Some(Delimiter::Brace) => {
+ Delimiter::Brace => {
if header.is_some() || has_bang || ident.is_some() {
self.nbsp();
}
@@ -601,7 +608,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
let empty = tts.is_empty();
self.bclose(span, empty);
}
- Some(delim) => {
+ delim => {
let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
self.word(token_str);
self.ibox(0);
@@ -610,11 +617,6 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
self.word(token_str);
}
- None => {
- self.ibox(0);
- self.print_tts(tts, convert_dollar_crate);
- self.end();
- }
}
}
@@ -635,8 +637,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Some(MacHeader::Keyword(kw)),
has_bang,
Some(*ident),
- macro_def.body.delim(),
- &macro_def.body.inner_tokens(),
+ macro_def.body.delim.to_token(),
+ &macro_def.body.tokens.clone(),
true,
sp,
);
@@ -659,7 +661,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_path_segment(&mut self, segment: &ast::PathSegment, colons_before_params: bool) {
if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident);
- if let Some(ref args) = segment.args {
+ if let Some(args) = &segment.args {
self.print_generic_args(args, colons_before_params);
}
}
@@ -714,19 +716,19 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
- match *nt {
- token::NtExpr(ref e) => self.expr_to_string(e),
- token::NtMeta(ref e) => self.attr_item_to_string(e),
- token::NtTy(ref e) => self.ty_to_string(e),
- token::NtPath(ref e) => self.path_to_string(e),
- token::NtItem(ref e) => self.item_to_string(e),
- token::NtBlock(ref e) => self.block_to_string(e),
- token::NtStmt(ref e) => self.stmt_to_string(e),
- token::NtPat(ref e) => self.pat_to_string(e),
- token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
+ match nt {
+ token::NtExpr(e) => self.expr_to_string(e),
+ token::NtMeta(e) => self.attr_item_to_string(e),
+ token::NtTy(e) => self.ty_to_string(e),
+ token::NtPath(e) => self.path_to_string(e),
+ token::NtItem(e) => self.item_to_string(e),
+ token::NtBlock(e) => self.block_to_string(e),
+ token::NtStmt(e) => self.stmt_to_string(e),
+ token::NtPat(e) => self.pat_to_string(e),
+ token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(),
token::NtLifetime(e) => e.to_string(),
- token::NtLiteral(ref e) => self.expr_to_string(e),
- token::NtVis(ref e) => self.vis_to_string(e),
+ token::NtLiteral(e) => self.expr_to_string(e),
+ token::NtVis(e) => self.vis_to_string(e),
}
}
@@ -827,8 +829,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Self::to_string(|s| s.print_expr(e))
}
- fn literal_to_string(&self, lit: &ast::Lit) -> String {
- Self::to_string(|s| s.print_literal(lit))
+ fn meta_item_lit_to_string(&self, lit: &ast::MetaItemLit) -> String {
+ Self::to_string(|s| s.print_meta_item_lit(lit))
}
fn tt_to_string(&self, tt: &TokenTree) -> String {
@@ -919,8 +921,8 @@ impl<'a> PrintState<'a> for State<'a> {
self.word("::")
}
- match *args {
- ast::GenericArgs::AngleBracketed(ref data) => {
+ match args {
+ ast::GenericArgs::AngleBracketed(data) => {
self.word("<");
self.commasep(Inconsistent, &data.args, |s, arg| match arg {
ast::AngleBracketedArg::Arg(a) => s.print_generic_arg(a),
@@ -929,7 +931,7 @@ impl<'a> PrintState<'a> for State<'a> {
self.word(">")
}
- ast::GenericArgs::Parenthesized(ref data) => {
+ ast::GenericArgs::Parenthesized(data) => {
self.word("(");
self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(ty));
self.word(")");
@@ -996,7 +998,7 @@ impl<'a> State<'a> {
ast::AssocConstraintKind::Bound { bounds } => {
if !bounds.is_empty() {
self.word_nbsp(":");
- self.print_type_bounds(&bounds);
+ self.print_type_bounds(bounds);
}
}
}
@@ -1013,17 +1015,17 @@ impl<'a> State<'a> {
pub fn print_type(&mut self, ty: &ast::Ty) {
self.maybe_print_comment(ty.span.lo());
self.ibox(0);
- match ty.kind {
- ast::TyKind::Slice(ref ty) => {
+ match &ty.kind {
+ ast::TyKind::Slice(ty) => {
self.word("[");
self.print_type(ty);
self.word("]");
}
- ast::TyKind::Ptr(ref mt) => {
+ ast::TyKind::Ptr(mt) => {
self.word("*");
self.print_mt(mt, true);
}
- ast::TyKind::Rptr(ref lifetime, ref mt) => {
+ ast::TyKind::Rptr(lifetime, mt) => {
self.word("&");
self.print_opt_lifetime(lifetime);
self.print_mt(mt, false);
@@ -1031,44 +1033,44 @@ impl<'a> State<'a> {
ast::TyKind::Never => {
self.word("!");
}
- ast::TyKind::Tup(ref elts) => {
+ ast::TyKind::Tup(elts) => {
self.popen();
- self.commasep(Inconsistent, &elts, |s, ty| s.print_type(ty));
+ self.commasep(Inconsistent, elts, |s, ty| s.print_type(ty));
if elts.len() == 1 {
self.word(",");
}
self.pclose();
}
- ast::TyKind::Paren(ref typ) => {
+ ast::TyKind::Paren(typ) => {
self.popen();
self.print_type(typ);
self.pclose();
}
- ast::TyKind::BareFn(ref f) => {
+ ast::TyKind::BareFn(f) => {
self.print_ty_fn(f.ext, f.unsafety, &f.decl, None, &f.generic_params);
}
- ast::TyKind::Path(None, ref path) => {
+ ast::TyKind::Path(None, path) => {
self.print_path(path, false, 0);
}
- ast::TyKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, false),
- ast::TyKind::TraitObject(ref bounds, syntax) => {
- if syntax == ast::TraitObjectSyntax::Dyn {
+ ast::TyKind::Path(Some(qself), path) => self.print_qpath(path, qself, false),
+ ast::TyKind::TraitObject(bounds, syntax) => {
+ if *syntax == ast::TraitObjectSyntax::Dyn {
self.word_nbsp("dyn");
}
self.print_type_bounds(bounds);
}
- ast::TyKind::ImplTrait(_, ref bounds) => {
+ ast::TyKind::ImplTrait(_, bounds) => {
self.word_nbsp("impl");
self.print_type_bounds(bounds);
}
- ast::TyKind::Array(ref ty, ref length) => {
+ ast::TyKind::Array(ty, length) => {
self.word("[");
self.print_type(ty);
self.word("; ");
self.print_expr(&length.value);
self.word("]");
}
- ast::TyKind::Typeof(ref e) => {
+ ast::TyKind::Typeof(e) => {
self.word("typeof(");
self.print_expr(&e.value);
self.word(")");
@@ -1084,7 +1086,7 @@ impl<'a> State<'a> {
ast::TyKind::ImplicitSelf => {
self.word("Self");
}
- ast::TyKind::MacCall(ref m) => {
+ ast::TyKind::MacCall(m) => {
self.print_mac(m);
}
ast::TyKind::CVarArgs => {
@@ -1113,8 +1115,8 @@ impl<'a> State<'a> {
pub(crate) fn print_stmt(&mut self, st: &ast::Stmt) {
self.maybe_print_comment(st.span.lo());
- match st.kind {
- ast::StmtKind::Local(ref loc) => {
+ match &st.kind {
+ ast::StmtKind::Local(loc) => {
self.print_outer_attributes(&loc.attrs);
self.space_if_not_bol();
self.ibox(INDENT_UNIT);
@@ -1137,15 +1139,15 @@ impl<'a> State<'a> {
self.word(";");
self.end(); // `let` ibox
}
- ast::StmtKind::Item(ref item) => self.print_item(item),
- ast::StmtKind::Expr(ref expr) => {
+ ast::StmtKind::Item(item) => self.print_item(item),
+ ast::StmtKind::Expr(expr) => {
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
if classify::expr_requires_semi_to_be_stmt(expr) {
self.word(";");
}
}
- ast::StmtKind::Semi(ref expr) => {
+ ast::StmtKind::Semi(expr) => {
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
self.word(";");
@@ -1154,7 +1156,7 @@ impl<'a> State<'a> {
self.space_if_not_bol();
self.word(";");
}
- ast::StmtKind::MacCall(ref mac) => {
+ ast::StmtKind::MacCall(mac) => {
self.space_if_not_bol();
self.print_outer_attributes(&mac.attrs);
self.print_mac(&mac.mac);
@@ -1195,8 +1197,8 @@ impl<'a> State<'a> {
let has_attrs = self.print_inner_attributes(attrs);
for (i, st) in blk.stmts.iter().enumerate() {
- match st.kind {
- ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
+ match &st.kind {
+ ast::StmtKind::Expr(expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo());
self.space_if_not_bol();
self.print_expr_outer_attr_style(expr, false);
@@ -1226,8 +1228,8 @@ impl<'a> State<'a> {
Some(MacHeader::Path(&m.path)),
true,
None,
- m.args.delim(),
- &m.args.inner_tokens(),
+ m.args.delim.to_token(),
+ &m.args.tokens.clone(),
true,
m.span(),
);
@@ -1252,7 +1254,7 @@ impl<'a> State<'a> {
self.popen();
self.commasep(Consistent, &args, |s, arg| match arg {
- AsmArg::Template(template) => s.print_string(&template, ast::StrStyle::Cooked),
+ AsmArg::Template(template) => s.print_string(template, ast::StrStyle::Cooked),
AsmArg::Operand(op) => {
let print_reg_or_class = |s: &mut Self, r: &InlineAsmRegOrRegClass| match r {
InlineAsmRegOrRegClass::Reg(r) => s.print_symbol(*r, ast::StrStyle::Cooked),
@@ -1364,7 +1366,7 @@ impl<'a> State<'a> {
pub(crate) fn print_local_decl(&mut self, loc: &ast::Local) {
self.print_pat(&loc.pat);
- if let Some(ref ty) = loc.ty {
+ if let Some(ty) = &loc.ty {
self.word_space(":");
self.print_type(ty);
}
@@ -1388,7 +1390,7 @@ impl<'a> State<'a> {
for item_segment in &path.segments[qself.position..] {
self.word("::");
self.print_ident(item_segment.ident);
- if let Some(ref args) = item_segment.args {
+ if let Some(args) = &item_segment.args {
self.print_generic_args(args, colons_before_params)
}
}
@@ -1399,42 +1401,42 @@ impl<'a> State<'a> {
self.ann.pre(self, AnnNode::Pat(pat));
/* Pat isn't normalized, but the beauty of it
is that it doesn't matter */
- match pat.kind {
+ match &pat.kind {
PatKind::Wild => self.word("_"),
- PatKind::Ident(BindingAnnotation(by_ref, mutbl), ident, ref sub) => {
- if by_ref == ByRef::Yes {
+ PatKind::Ident(BindingAnnotation(by_ref, mutbl), ident, sub) => {
+ if *by_ref == ByRef::Yes {
self.word_nbsp("ref");
}
- if mutbl == Mutability::Mut {
+ if mutbl.is_mut() {
self.word_nbsp("mut");
}
- self.print_ident(ident);
- if let Some(ref p) = *sub {
+ self.print_ident(*ident);
+ if let Some(p) = sub {
self.space();
self.word_space("@");
self.print_pat(p);
}
}
- PatKind::TupleStruct(ref qself, ref path, ref elts) => {
+ PatKind::TupleStruct(qself, path, elts) => {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
self.print_path(path, true, 0);
}
self.popen();
- self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
+ self.commasep(Inconsistent, elts, |s, p| s.print_pat(p));
self.pclose();
}
- PatKind::Or(ref pats) => {
- self.strsep("|", true, Inconsistent, &pats, |s, p| s.print_pat(p));
+ PatKind::Or(pats) => {
+ self.strsep("|", true, Inconsistent, pats, |s, p| s.print_pat(p));
}
- PatKind::Path(None, ref path) => {
+ PatKind::Path(None, path) => {
self.print_path(path, true, 0);
}
- PatKind::Path(Some(ref qself), ref path) => {
+ PatKind::Path(Some(qself), path) => {
self.print_qpath(path, qself, false);
}
- PatKind::Struct(ref qself, ref path, ref fields, etc) => {
+ PatKind::Struct(qself, path, fields, etc) => {
if let Some(qself) = qself {
self.print_qpath(path, qself, true);
} else {
@@ -1448,7 +1450,7 @@ impl<'a> State<'a> {
}
self.commasep_cmnt(
Consistent,
- &fields,
+ fields,
|s, f| {
s.cbox(INDENT_UNIT);
if !f.is_shorthand {
@@ -1460,7 +1462,7 @@ impl<'a> State<'a> {
},
|f| f.pat.span,
);
- if etc {
+ if *etc {
if !fields.is_empty() {
self.word_space(",");
}
@@ -1471,21 +1473,21 @@ impl<'a> State<'a> {
}
self.word("}");
}
- PatKind::Tuple(ref elts) => {
+ PatKind::Tuple(elts) => {
self.popen();
- self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
+ self.commasep(Inconsistent, elts, |s, p| s.print_pat(p));
if elts.len() == 1 {
self.word(",");
}
self.pclose();
}
- PatKind::Box(ref inner) => {
+ PatKind::Box(inner) => {
self.word("box ");
self.print_pat(inner);
}
- PatKind::Ref(ref inner, mutbl) => {
+ PatKind::Ref(inner, mutbl) => {
self.word("&");
- if mutbl == Mutability::Mut {
+ if mutbl.is_mut() {
self.word("mut ");
}
if let PatKind::Ident(ast::BindingAnnotation::MUT, ..) = inner.kind {
@@ -1496,12 +1498,12 @@ impl<'a> State<'a> {
self.print_pat(inner);
}
}
- PatKind::Lit(ref e) => self.print_expr(&**e),
- PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => {
+ PatKind::Lit(e) => self.print_expr(e),
+ PatKind::Range(begin, end, Spanned { node: end_kind, .. }) => {
if let Some(e) = begin {
self.print_expr(e);
}
- match *end_kind {
+ match end_kind {
RangeEnd::Included(RangeSyntax::DotDotDot) => self.word("..."),
RangeEnd::Included(RangeSyntax::DotDotEq) => self.word("..="),
RangeEnd::Excluded => self.word(".."),
@@ -1510,36 +1512,36 @@ impl<'a> State<'a> {
self.print_expr(e);
}
}
- PatKind::Slice(ref elts) => {
+ PatKind::Slice(elts) => {
self.word("[");
- self.commasep(Inconsistent, &elts, |s, p| s.print_pat(p));
+ self.commasep(Inconsistent, elts, |s, p| s.print_pat(p));
self.word("]");
}
PatKind::Rest => self.word(".."),
- PatKind::Paren(ref inner) => {
+ PatKind::Paren(inner) => {
self.popen();
self.print_pat(inner);
self.pclose();
}
- PatKind::MacCall(ref m) => self.print_mac(m),
+ PatKind::MacCall(m) => self.print_mac(m),
}
self.ann.post(self, AnnNode::Pat(pat))
}
fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) {
- match explicit_self.node {
+ match &explicit_self.node {
SelfKind::Value(m) => {
- self.print_mutability(m, false);
+ self.print_mutability(*m, false);
self.word("self")
}
- SelfKind::Region(ref lt, m) => {
+ SelfKind::Region(lt, m) => {
self.word("&");
self.print_opt_lifetime(lt);
- self.print_mutability(m, false);
+ self.print_mutability(*m, false);
self.word("self")
}
- SelfKind::Explicit(ref typ, m) => {
- self.print_mutability(m, false);
+ SelfKind::Explicit(typ, m) => {
+ self.print_mutability(*m, false);
self.word("self");
self.word_space(":");
self.print_type(typ)
@@ -1598,10 +1600,10 @@ impl<'a> State<'a> {
self.word("<");
- self.commasep(Inconsistent, &generic_params, |s, param| {
+ self.commasep(Inconsistent, generic_params, |s, param| {
s.print_outer_attributes_inline(&param.attrs);
- match param.kind {
+ match &param.kind {
ast::GenericParamKind::Lifetime => {
let lt = ast::Lifetime { id: param.id, ident: param.ident };
s.print_lifetime(lt);
@@ -1610,19 +1612,19 @@ impl<'a> State<'a> {
s.print_lifetime_bounds(&param.bounds)
}
}
- ast::GenericParamKind::Type { ref default } => {
+ ast::GenericParamKind::Type { default } => {
s.print_ident(param.ident);
if !param.bounds.is_empty() {
s.word_nbsp(":");
s.print_type_bounds(&param.bounds);
}
- if let Some(ref default) = default {
+ if let Some(default) = default {
s.space();
s.word_space("=");
s.print_type(default)
}
}
- ast::GenericParamKind::Const { ref ty, kw_span: _, ref default } => {
+ ast::GenericParamKind::Const { ty, default, .. } => {
s.word_space("const");
s.print_ident(param.ident);
s.space();
@@ -1632,7 +1634,7 @@ impl<'a> State<'a> {
s.word_nbsp(":");
s.print_type_bounds(&param.bounds);
}
- if let Some(ref default) = default {
+ if let Some(default) = default {
s.space();
s.word_space("=");
s.print_expr(&default.value);
@@ -1714,9 +1716,9 @@ impl<'a> State<'a> {
where_clause: ast::WhereClause {
has_where_token: false,
predicates: Vec::new(),
- span: rustc_span::DUMMY_SP,
+ span: DUMMY_SP,
},
- span: rustc_span::DUMMY_SP,
+ span: DUMMY_SP,
};
let header = ast::FnHeader { unsafety, ext, ..ast::FnHeader::default() };
self.print_fn(decl, header, name, &generics);
@@ -1735,7 +1737,7 @@ impl<'a> State<'a> {
}
ast::Extern::Explicit(abi, _) => {
self.word_nbsp("extern");
- self.print_literal(&abi.as_lit());
+ self.print_token_literal(abi.as_token_lit(), abi.span);
self.nbsp();
}
}
diff --git a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs
index bcefa8ce0..4ed16e337 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state/expr.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state/expr.rs
@@ -8,9 +8,9 @@ use rustc_ast::{self as ast, BlockCheckMode};
impl<'a> State<'a> {
fn print_else(&mut self, els: Option<&ast::Expr>) {
if let Some(_else) = els {
- match _else.kind {
+ match &_else.kind {
// Another `else if` block.
- ast::ExprKind::If(ref i, ref then, ref e) => {
+ ast::ExprKind::If(i, then, e) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.word(" else if ");
@@ -20,7 +20,7 @@ impl<'a> State<'a> {
self.print_else(e.as_deref())
}
// Final `else` block.
- ast::ExprKind::Block(ref b, _) => {
+ ast::ExprKind::Block(b, _) => {
self.cbox(INDENT_UNIT - 1);
self.ibox(0);
self.word(" else ");
@@ -58,10 +58,10 @@ impl<'a> State<'a> {
self.print_expr_cond_paren(expr, Self::cond_needs_par(expr))
}
- // Does `expr` need parentheses when printed in a condition position?
- //
- // These cases need parens due to the parse error observed in #26461: `if return {}`
- // parses as the erroneous construct `if (return {})`, not `if (return) {}`.
+ /// Does `expr` need parentheses when printed in a condition position?
+ ///
+ /// These cases need parens due to the parse error observed in #26461: `if return {}`
+ /// parses as the erroneous construct `if (return {})`, not `if (return) {}`.
pub(super) fn cond_needs_par(expr: &ast::Expr) -> bool {
match expr.kind {
ast::ExprKind::Break(..)
@@ -121,7 +121,7 @@ impl<'a> State<'a> {
fn print_expr_struct(
&mut self,
- qself: &Option<ast::QSelf>,
+ qself: &Option<P<ast::QSelf>>,
path: &ast::Path,
fields: &[ast::ExprField],
rest: &ast::StructRest,
@@ -202,7 +202,7 @@ impl<'a> State<'a> {
self.print_expr_maybe_paren(receiver, parser::PREC_POSTFIX);
self.word(".");
self.print_ident(segment.ident);
- if let Some(ref args) = segment.args {
+ if let Some(args) = &segment.args {
self.print_generic_args(args, true);
}
self.print_call_post(base_args)
@@ -284,64 +284,66 @@ impl<'a> State<'a> {
self.ibox(INDENT_UNIT);
self.ann.pre(self, AnnNode::Expr(expr));
- match expr.kind {
- ast::ExprKind::Box(ref expr) => {
+ match &expr.kind {
+ ast::ExprKind::Box(expr) => {
self.word_space("box");
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX);
}
- ast::ExprKind::Array(ref exprs) => {
+ ast::ExprKind::Array(exprs) => {
self.print_expr_vec(exprs);
}
- ast::ExprKind::ConstBlock(ref anon_const) => {
+ ast::ExprKind::ConstBlock(anon_const) => {
self.print_expr_anon_const(anon_const, attrs);
}
- ast::ExprKind::Repeat(ref element, ref count) => {
+ ast::ExprKind::Repeat(element, count) => {
self.print_expr_repeat(element, count);
}
- ast::ExprKind::Struct(ref se) => {
+ ast::ExprKind::Struct(se) => {
self.print_expr_struct(&se.qself, &se.path, &se.fields, &se.rest);
}
- ast::ExprKind::Tup(ref exprs) => {
+ ast::ExprKind::Tup(exprs) => {
self.print_expr_tup(exprs);
}
- ast::ExprKind::Call(ref func, ref args) => {
- self.print_expr_call(func, &args);
+ ast::ExprKind::Call(func, args) => {
+ self.print_expr_call(func, args);
+ }
+ ast::ExprKind::MethodCall(box ast::MethodCall { seg, receiver, args, .. }) => {
+ self.print_expr_method_call(seg, receiver, args);
}
- ast::ExprKind::MethodCall(ref segment, ref receiver, ref args, _) => {
- self.print_expr_method_call(segment, &receiver, &args);
+ ast::ExprKind::Binary(op, lhs, rhs) => {
+ self.print_expr_binary(*op, lhs, rhs);
}
- ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
- self.print_expr_binary(op, lhs, rhs);
+ ast::ExprKind::Unary(op, expr) => {
+ self.print_expr_unary(*op, expr);
}
- ast::ExprKind::Unary(op, ref expr) => {
- self.print_expr_unary(op, expr);
+ ast::ExprKind::AddrOf(k, m, expr) => {
+ self.print_expr_addr_of(*k, *m, expr);
}
- ast::ExprKind::AddrOf(k, m, ref expr) => {
- self.print_expr_addr_of(k, m, expr);
+ ast::ExprKind::Lit(token_lit) => {
+ self.print_token_literal(*token_lit, expr.span);
}
- ast::ExprKind::Lit(ref lit) => {
- self.print_literal(lit);
+ ast::ExprKind::IncludedBytes(bytes) => {
+ let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
+ self.print_token_literal(lit, expr.span)
}
- ast::ExprKind::Cast(ref expr, ref ty) => {
+ ast::ExprKind::Cast(expr, ty) => {
let prec = AssocOp::As.precedence() as i8;
self.print_expr_maybe_paren(expr, prec);
self.space();
self.word_space("as");
self.print_type(ty);
}
- ast::ExprKind::Type(ref expr, ref ty) => {
+ ast::ExprKind::Type(expr, ty) => {
let prec = AssocOp::Colon.precedence() as i8;
self.print_expr_maybe_paren(expr, prec);
self.word_space(":");
self.print_type(ty);
}
- ast::ExprKind::Let(ref pat, ref scrutinee, _) => {
+ ast::ExprKind::Let(pat, scrutinee, _) => {
self.print_let(pat, scrutinee);
}
- ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
- self.print_if(test, blk, elseopt.as_deref())
- }
- ast::ExprKind::While(ref test, ref blk, opt_label) => {
+ ast::ExprKind::If(test, blk, elseopt) => self.print_if(test, blk, elseopt.as_deref()),
+ ast::ExprKind::While(test, blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@@ -353,7 +355,7 @@ impl<'a> State<'a> {
self.space();
self.print_block_with_attrs(blk, attrs);
}
- ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
+ ast::ExprKind::ForLoop(pat, iter, blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@@ -368,7 +370,7 @@ impl<'a> State<'a> {
self.space();
self.print_block_with_attrs(blk, attrs);
}
- ast::ExprKind::Loop(ref blk, opt_label) => {
+ ast::ExprKind::Loop(blk, opt_label, _) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@@ -378,7 +380,7 @@ impl<'a> State<'a> {
self.word_nbsp("loop");
self.print_block_with_attrs(blk, attrs);
}
- ast::ExprKind::Match(ref expr, ref arms) => {
+ ast::ExprKind::Match(expr, arms) => {
self.cbox(0);
self.ibox(0);
self.word_nbsp("match");
@@ -392,21 +394,22 @@ impl<'a> State<'a> {
let empty = attrs.is_empty() && arms.is_empty();
self.bclose(expr.span, empty);
}
- ast::ExprKind::Closure(
- ref binder,
+ ast::ExprKind::Closure(box ast::Closure {
+ binder,
capture_clause,
asyncness,
movability,
- ref decl,
- ref body,
- _,
- ) => {
+ fn_decl,
+ body,
+ fn_decl_span: _,
+ fn_arg_span: _,
+ }) => {
self.print_closure_binder(binder);
- self.print_movability(movability);
- self.print_asyncness(asyncness);
- self.print_capture_clause(capture_clause);
+ self.print_movability(*movability);
+ self.print_asyncness(*asyncness);
+ self.print_capture_clause(*capture_clause);
- self.print_fn_params_and_ret(decl, true);
+ self.print_fn_params_and_ret(fn_decl, true);
self.space();
self.print_expr(body);
self.end(); // need to close a box
@@ -416,7 +419,7 @@ impl<'a> State<'a> {
// empty box to satisfy the close.
self.ibox(0);
}
- ast::ExprKind::Block(ref blk, opt_label) => {
+ ast::ExprKind::Block(blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@@ -427,26 +430,26 @@ impl<'a> State<'a> {
self.ibox(0);
self.print_block_with_attrs(blk, attrs);
}
- ast::ExprKind::Async(capture_clause, _, ref blk) => {
+ ast::ExprKind::Async(capture_clause, _, blk) => {
self.word_nbsp("async");
- self.print_capture_clause(capture_clause);
+ self.print_capture_clause(*capture_clause);
// cbox/ibox in analogy to the `ExprKind::Block` arm above
self.cbox(0);
self.ibox(0);
self.print_block_with_attrs(blk, attrs);
}
- ast::ExprKind::Await(ref expr) => {
+ ast::ExprKind::Await(expr) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.word(".await");
}
- ast::ExprKind::Assign(ref lhs, ref rhs, _) => {
+ ast::ExprKind::Assign(lhs, rhs, _) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1);
self.space();
self.word_space("=");
self.print_expr_maybe_paren(rhs, prec);
}
- ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
+ ast::ExprKind::AssignOp(op, lhs, rhs) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1);
self.space();
@@ -454,45 +457,44 @@ impl<'a> State<'a> {
self.word_space("=");
self.print_expr_maybe_paren(rhs, prec);
}
- ast::ExprKind::Field(ref expr, ident) => {
+ ast::ExprKind::Field(expr, ident) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.word(".");
- self.print_ident(ident);
+ self.print_ident(*ident);
}
- ast::ExprKind::Index(ref expr, ref index) => {
+ ast::ExprKind::Index(expr, index) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX);
self.word("[");
self.print_expr(index);
self.word("]");
}
- ast::ExprKind::Range(ref start, ref end, limits) => {
+ ast::ExprKind::Range(start, end, limits) => {
// Special case for `Range`. `AssocOp` claims that `Range` has higher precedence
// than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`.
// Here we use a fake precedence value so that any child with lower precedence than
// a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.)
let fake_prec = AssocOp::LOr.precedence() as i8;
- if let Some(ref e) = *start {
+ if let Some(e) = start {
self.print_expr_maybe_paren(e, fake_prec);
}
- if limits == ast::RangeLimits::HalfOpen {
- self.word("..");
- } else {
- self.word("..=");
+ match limits {
+ ast::RangeLimits::HalfOpen => self.word(".."),
+ ast::RangeLimits::Closed => self.word("..="),
}
- if let Some(ref e) = *end {
+ if let Some(e) = end {
self.print_expr_maybe_paren(e, fake_prec);
}
}
ast::ExprKind::Underscore => self.word("_"),
- ast::ExprKind::Path(None, ref path) => self.print_path(path, true, 0),
- ast::ExprKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, true),
- ast::ExprKind::Break(opt_label, ref opt_expr) => {
+ ast::ExprKind::Path(None, path) => self.print_path(path, true, 0),
+ ast::ExprKind::Path(Some(qself), path) => self.print_qpath(path, qself, true),
+ ast::ExprKind::Break(opt_label, opt_expr) => {
self.word("break");
if let Some(label) = opt_label {
self.space();
self.print_ident(label.ident);
}
- if let Some(ref expr) = *opt_expr {
+ if let Some(expr) = opt_expr {
self.space();
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
@@ -504,45 +506,45 @@ impl<'a> State<'a> {
self.print_ident(label.ident);
}
}
- ast::ExprKind::Ret(ref result) => {
+ ast::ExprKind::Ret(result) => {
self.word("return");
- if let Some(ref expr) = *result {
+ if let Some(expr) = result {
self.word(" ");
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
- ast::ExprKind::Yeet(ref result) => {
+ ast::ExprKind::Yeet(result) => {
self.word("do");
self.word(" ");
self.word("yeet");
- if let Some(ref expr) = *result {
+ if let Some(expr) = result {
self.word(" ");
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
- ast::ExprKind::InlineAsm(ref a) => {
+ ast::ExprKind::InlineAsm(a) => {
self.word("asm!");
self.print_inline_asm(a);
}
- ast::ExprKind::MacCall(ref m) => self.print_mac(m),
- ast::ExprKind::Paren(ref e) => {
+ ast::ExprKind::MacCall(m) => self.print_mac(m),
+ ast::ExprKind::Paren(e) => {
self.popen();
self.print_expr(e);
self.pclose();
}
- ast::ExprKind::Yield(ref e) => {
+ ast::ExprKind::Yield(e) => {
self.word("yield");
- if let Some(ref expr) = *e {
+ if let Some(expr) = e {
self.space();
self.print_expr_maybe_paren(expr, parser::PREC_JUMP);
}
}
- ast::ExprKind::Try(ref e) => {
+ ast::ExprKind::Try(e) => {
self.print_expr_maybe_paren(e, parser::PREC_POSTFIX);
self.word("?")
}
- ast::ExprKind::TryBlock(ref blk) => {
+ ast::ExprKind::TryBlock(blk) => {
self.cbox(0);
self.ibox(0);
self.word_nbsp("try");
@@ -569,15 +571,15 @@ impl<'a> State<'a> {
self.print_outer_attributes(&arm.attrs);
self.print_pat(&arm.pat);
self.space();
- if let Some(ref e) = arm.guard {
+ if let Some(e) = &arm.guard {
self.word_space("if");
self.print_expr(e);
self.space();
}
self.word_space("=>");
- match arm.body.kind {
- ast::ExprKind::Block(ref blk, opt_label) => {
+ match &arm.body.kind {
+ ast::ExprKind::Block(blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident);
self.word_space(":");
@@ -604,7 +606,7 @@ impl<'a> State<'a> {
match binder {
ast::ClosureBinder::NotPresent => {}
ast::ClosureBinder::For { generic_params, .. } => {
- self.print_formal_generic_params(&generic_params)
+ self.print_formal_generic_params(generic_params)
}
}
}
diff --git a/compiler/rustc_ast_pretty/src/pprust/state/item.rs b/compiler/rustc_ast_pretty/src/pprust/state/item.rs
index 159853c9e..5b6a07721 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state/item.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state/item.rs
@@ -136,10 +136,10 @@ impl<'a> State<'a> {
self.maybe_print_comment(item.span.lo());
self.print_outer_attributes(&item.attrs);
self.ann.pre(self, AnnNode::Item(item));
- match item.kind {
+ match &item.kind {
ast::ItemKind::ExternCrate(orig_name) => {
self.head(visibility_qualified(&item.vis, "extern crate"));
- if let Some(orig_name) = orig_name {
+ if let &Some(orig_name) = orig_name {
self.print_name(orig_name);
self.space();
self.word("as");
@@ -150,35 +150,41 @@ impl<'a> State<'a> {
self.end(); // end inner head-block
self.end(); // end outer head-block
}
- ast::ItemKind::Use(ref tree) => {
+ ast::ItemKind::Use(tree) => {
self.print_visibility(&item.vis);
self.word_nbsp("use");
self.print_use_tree(tree);
self.word(";");
}
- ast::ItemKind::Static(ref ty, mutbl, ref body) => {
+ ast::ItemKind::Static(ty, mutbl, body) => {
let def = ast::Defaultness::Final;
- self.print_item_const(item.ident, Some(mutbl), ty, body.as_deref(), &item.vis, def);
+ self.print_item_const(
+ item.ident,
+ Some(*mutbl),
+ ty,
+ body.as_deref(),
+ &item.vis,
+ def,
+ );
}
- ast::ItemKind::Const(def, ref ty, ref body) => {
- self.print_item_const(item.ident, None, ty, body.as_deref(), &item.vis, def);
+ ast::ItemKind::Const(def, ty, body) => {
+ self.print_item_const(item.ident, None, ty, body.as_deref(), &item.vis, *def);
}
- ast::ItemKind::Fn(box ast::Fn { defaultness, ref sig, ref generics, ref body }) => {
- let body = body.as_deref();
+ ast::ItemKind::Fn(box ast::Fn { defaultness, sig, generics, body }) => {
self.print_fn_full(
sig,
item.ident,
generics,
&item.vis,
- defaultness,
- body,
+ *defaultness,
+ body.as_deref(),
&item.attrs,
);
}
- ast::ItemKind::Mod(unsafety, ref mod_kind) => {
+ ast::ItemKind::Mod(unsafety, mod_kind) => {
self.head(Self::to_string(|s| {
s.print_visibility(&item.vis);
- s.print_unsafety(unsafety);
+ s.print_unsafety(*unsafety);
s.word("mod");
}));
self.print_ident(item.ident);
@@ -201,13 +207,13 @@ impl<'a> State<'a> {
}
}
}
- ast::ItemKind::ForeignMod(ref nmod) => {
+ ast::ItemKind::ForeignMod(nmod) => {
self.head(Self::to_string(|s| {
s.print_unsafety(nmod.unsafety);
s.word("extern");
}));
if let Some(abi) = nmod.abi {
- self.print_literal(&abi.as_lit());
+ self.print_token_literal(abi.as_token_lit(), abi.span);
self.nbsp();
}
self.bopen();
@@ -215,7 +221,7 @@ impl<'a> State<'a> {
let empty = item.attrs.is_empty() && nmod.items.is_empty();
self.bclose(item.span, empty);
}
- ast::ItemKind::GlobalAsm(ref asm) => {
+ ast::ItemKind::GlobalAsm(asm) => {
self.head(visibility_qualified(&item.vis, "global_asm!"));
self.print_inline_asm(asm);
self.word(";");
@@ -224,32 +230,31 @@ impl<'a> State<'a> {
}
ast::ItemKind::TyAlias(box ast::TyAlias {
defaultness,
- ref generics,
+ generics,
where_clauses,
where_predicates_split,
- ref bounds,
- ref ty,
+ bounds,
+ ty,
}) => {
- let ty = ty.as_deref();
self.print_associated_type(
item.ident,
generics,
- where_clauses,
- where_predicates_split,
+ *where_clauses,
+ *where_predicates_split,
bounds,
- ty,
+ ty.as_deref(),
&item.vis,
- defaultness,
+ *defaultness,
);
}
- ast::ItemKind::Enum(ref enum_definition, ref params) => {
+ ast::ItemKind::Enum(enum_definition, params) => {
self.print_enum_def(enum_definition, params, item.ident, item.span, &item.vis);
}
- ast::ItemKind::Struct(ref struct_def, ref generics) => {
+ ast::ItemKind::Struct(struct_def, generics) => {
self.head(visibility_qualified(&item.vis, "struct"));
self.print_struct(struct_def, generics, item.ident, item.span, true);
}
- ast::ItemKind::Union(ref struct_def, ref generics) => {
+ ast::ItemKind::Union(struct_def, generics) => {
self.head(visibility_qualified(&item.vis, "union"));
self.print_struct(struct_def, generics, item.ident, item.span, true);
}
@@ -258,15 +263,15 @@ impl<'a> State<'a> {
polarity,
defaultness,
constness,
- ref generics,
- ref of_trait,
- ref self_ty,
- ref items,
+ generics,
+ of_trait,
+ self_ty,
+ items,
}) => {
self.head("");
self.print_visibility(&item.vis);
- self.print_defaultness(defaultness);
- self.print_unsafety(unsafety);
+ self.print_defaultness(*defaultness);
+ self.print_unsafety(*unsafety);
self.word("impl");
if generics.params.is_empty() {
@@ -276,13 +281,13 @@ impl<'a> State<'a> {
self.space();
}
- self.print_constness(constness);
+ self.print_constness(*constness);
if let ast::ImplPolarity::Negative(_) = polarity {
self.word("!");
}
- if let Some(ref t) = *of_trait {
+ if let Some(t) = of_trait {
self.print_trait_ref(t);
self.space();
self.word_space("for");
@@ -303,21 +308,21 @@ impl<'a> State<'a> {
ast::ItemKind::Trait(box ast::Trait {
is_auto,
unsafety,
- ref generics,
- ref bounds,
- ref items,
+ generics,
+ bounds,
+ items,
..
}) => {
self.head("");
self.print_visibility(&item.vis);
- self.print_unsafety(unsafety);
- self.print_is_auto(is_auto);
+ self.print_unsafety(*unsafety);
+ self.print_is_auto(*is_auto);
self.word_nbsp("trait");
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
for b in bounds.iter() {
- if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
+ if let GenericBound::Trait(ptr, ast::TraitBoundModifier::Maybe) = b {
self.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
@@ -339,38 +344,27 @@ impl<'a> State<'a> {
let empty = item.attrs.is_empty() && items.is_empty();
self.bclose(item.span, empty);
}
- ast::ItemKind::TraitAlias(ref generics, ref bounds) => {
+ ast::ItemKind::TraitAlias(generics, bounds) => {
self.head(visibility_qualified(&item.vis, "trait"));
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
- let mut real_bounds = Vec::with_capacity(bounds.len());
- // FIXME(durka) this seems to be some quite outdated syntax
- for b in bounds.iter() {
- if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
- self.space();
- self.word_space("for ?");
- self.print_trait_ref(&ptr.trait_ref);
- } else {
- real_bounds.push(b.clone());
- }
- }
self.nbsp();
- if !real_bounds.is_empty() {
+ if !bounds.is_empty() {
self.word_nbsp("=");
- self.print_type_bounds(&real_bounds);
+ self.print_type_bounds(&bounds);
}
self.print_where_clause(&generics.where_clause);
self.word(";");
self.end(); // end inner head-block
self.end(); // end outer head-block
}
- ast::ItemKind::MacCall(ref mac) => {
+ ast::ItemKind::MacCall(mac) => {
self.print_mac(mac);
if mac.args.need_semicolon() {
self.word(";");
}
}
- ast::ItemKind::MacroDef(ref macro_def) => {
+ ast::ItemKind::MacroDef(macro_def) => {
self.print_mac_def(macro_def, &item.ident, item.span, |state| {
state.print_visibility(&item.vis)
});
@@ -412,11 +406,11 @@ impl<'a> State<'a> {
}
pub(crate) fn print_visibility(&mut self, vis: &ast::Visibility) {
- match vis.kind {
+ match &vis.kind {
ast::VisibilityKind::Public => self.word_nbsp("pub"),
- ast::VisibilityKind::Restricted { ref path, id: _, shorthand } => {
+ ast::VisibilityKind::Restricted { path, shorthand, .. } => {
let path = Self::to_string(|s| s.print_path(path, false, 0));
- if shorthand && (path == "crate" || path == "self" || path == "super") {
+ if *shorthand && (path == "crate" || path == "self" || path == "super") {
self.word_nbsp(format!("pub({})", path))
} else {
self.word_nbsp(format!("pub(in {})", path))
@@ -465,7 +459,7 @@ impl<'a> State<'a> {
) {
self.print_ident(ident);
self.print_generic_params(&generics.params);
- match struct_def {
+ match &struct_def {
ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => {
if let ast::VariantData::Tuple(..) = struct_def {
self.popen();
@@ -484,7 +478,7 @@ impl<'a> State<'a> {
self.end();
self.end(); // Close the outer-box.
}
- ast::VariantData::Struct(ref fields, ..) => {
+ ast::VariantData::Struct(fields, ..) => {
self.print_where_clause(&generics.where_clause);
self.print_record_struct_body(fields, span);
}
@@ -496,7 +490,7 @@ impl<'a> State<'a> {
self.print_visibility(&v.vis);
let generics = ast::Generics::default();
self.print_struct(&v.data, &generics, v.ident, v.span, false);
- if let Some(ref d) = v.disr_expr {
+ if let Some(d) = &v.disr_expr {
self.space();
self.word_space("=");
self.print_expr(&d.value)
@@ -657,10 +651,10 @@ impl<'a> State<'a> {
}
fn print_use_tree(&mut self, tree: &ast::UseTree) {
- match tree.kind {
- ast::UseTreeKind::Simple(rename, ..) => {
+ match &tree.kind {
+ ast::UseTreeKind::Simple(rename) => {
self.print_path(&tree.prefix, false, 0);
- if let Some(rename) = rename {
+ if let &Some(rename) = rename {
self.nbsp();
self.word_nbsp("as");
self.print_ident(rename);
@@ -673,7 +667,7 @@ impl<'a> State<'a> {
}
self.word("*");
}
- ast::UseTreeKind::Nested(ref items) => {
+ ast::UseTreeKind::Nested(items) => {
if !tree.prefix.segments.is_empty() {
self.print_path(&tree.prefix, false, 0);
self.word("::");
diff --git a/compiler/rustc_attr/src/builtin.rs b/compiler/rustc_attr/src/builtin.rs
index 753f62dd5..ab5e19050 100644
--- a/compiler/rustc_attr/src/builtin.rs
+++ b/compiler/rustc_attr/src/builtin.rs
@@ -1,7 +1,7 @@
//! Parsing and validation of builtin attributes
use rustc_ast as ast;
-use rustc_ast::{Attribute, Lit, LitKind, MetaItem, MetaItemKind, NestedMetaItem, NodeId};
+use rustc_ast::{Attribute, LitKind, MetaItem, MetaItemKind, MetaItemLit, NestedMetaItem, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg};
use rustc_macros::HashStable_Generic;
@@ -277,8 +277,7 @@ where
allowed_through_unstable_modules = true;
}
// attributes with data
- else if let Some(MetaItem { kind: MetaItemKind::List(ref metas), .. }) = meta {
- let meta = meta.as_ref().unwrap();
+ else if let Some(meta @ MetaItem { kind: MetaItemKind::List(metas), .. }) = &meta {
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
handle_errors(
@@ -486,7 +485,7 @@ where
continue 'outer;
}
},
- NestedMetaItem::Literal(lit) => {
+ NestedMetaItem::Lit(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,
@@ -533,25 +532,24 @@ where
// Merge the const-unstable info into the stability info
if promotable {
- if let Some((ref mut stab, _)) = const_stab {
- stab.promotable = promotable;
- } else {
- sess.emit_err(session_diagnostics::RustcPromotablePairing { span: item_sp });
+ match &mut const_stab {
+ Some((stab, _)) => stab.promotable = promotable,
+ _ => _ = sess.emit_err(session_diagnostics::RustcPromotablePairing { span: item_sp }),
}
}
if allowed_through_unstable_modules {
- if let Some((
- Stability {
- level: StabilityLevel::Stable { ref mut allowed_through_unstable_modules, .. },
- ..
- },
- _,
- )) = stab
- {
- *allowed_through_unstable_modules = true;
- } else {
- sess.emit_err(session_diagnostics::RustcAllowedUnstablePairing { span: item_sp });
+ match &mut stab {
+ Some((
+ Stability {
+ level: StabilityLevel::Stable { allowed_through_unstable_modules, .. },
+ ..
+ },
+ _,
+ )) => *allowed_through_unstable_modules = true,
+ _ => {
+ sess.emit_err(session_diagnostics::RustcAllowedUnstablePairing { span: item_sp });
+ }
}
}
@@ -654,15 +652,15 @@ pub fn eval_condition(
features: Option<&Features>,
eval: &mut impl FnMut(Condition) -> bool,
) -> bool {
- match cfg.kind {
- ast::MetaItemKind::List(ref mis) if cfg.name_or_empty() == sym::version => {
+ match &cfg.kind {
+ ast::MetaItemKind::List(mis) if cfg.name_or_empty() == sym::version => {
try_gate_cfg(sym::version, cfg.span, sess, features);
let (min_version, span) = match &mis[..] {
- [NestedMetaItem::Literal(Lit { kind: LitKind::Str(sym, ..), span, .. })] => {
+ [NestedMetaItem::Lit(MetaItemLit { kind: LitKind::Str(sym, ..), span, .. })] => {
(sym, span)
}
[
- NestedMetaItem::Literal(Lit { span, .. })
+ NestedMetaItem::Lit(MetaItemLit { span, .. })
| NestedMetaItem::MetaItem(MetaItem { span, .. }),
] => {
sess.emit_err(session_diagnostics::ExpectedVersionLiteral { span: *span });
@@ -688,7 +686,7 @@ pub fn eval_condition(
rustc_version >= min_version
}
}
- ast::MetaItemKind::List(ref mis) => {
+ ast::MetaItemKind::List(mis) => {
for mi in mis.iter() {
if !mi.is_meta_item() {
handle_errors(
@@ -759,7 +757,7 @@ pub fn eval_condition(
sess.emit_err(session_diagnostics::CfgPredicateIdentifier { span: cfg.path.span });
true
}
- MetaItemKind::NameValue(ref lit) if !lit.kind.is_str() => {
+ MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
handle_errors(
sess,
lit.span,
@@ -899,7 +897,7 @@ where
continue 'outer;
}
},
- NestedMetaItem::Literal(lit) => {
+ NestedMetaItem::Lit(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,
@@ -1036,52 +1034,58 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
});
}
} else if let Some(meta_item) = item.meta_item() {
- if let MetaItemKind::NameValue(ref value) = meta_item.kind {
- if meta_item.has_name(sym::align) || meta_item.has_name(sym::packed) {
- let name = meta_item.name_or_empty().to_ident_string();
- recognised = true;
- sess.emit_err(session_diagnostics::IncorrectReprFormatGeneric {
- span: item.span(),
- repr_arg: &name,
- cause: IncorrectReprFormatGenericCause::from_lit_kind(
- item.span(),
- &value.kind,
- &name,
- ),
- });
- } else if matches!(
- meta_item.name_or_empty(),
- sym::C | sym::simd | sym::transparent
- ) || int_type_of_word(meta_item.name_or_empty()).is_some()
- {
- recognised = true;
- sess.emit_err(session_diagnostics::InvalidReprHintNoValue {
- span: meta_item.span,
- name: meta_item.name_or_empty().to_ident_string(),
- });
+ match &meta_item.kind {
+ MetaItemKind::NameValue(value) => {
+ if meta_item.has_name(sym::align) || meta_item.has_name(sym::packed) {
+ let name = meta_item.name_or_empty().to_ident_string();
+ recognised = true;
+ sess.emit_err(session_diagnostics::IncorrectReprFormatGeneric {
+ span: item.span(),
+ repr_arg: &name,
+ cause: IncorrectReprFormatGenericCause::from_lit_kind(
+ item.span(),
+ &value.kind,
+ &name,
+ ),
+ });
+ } else if matches!(
+ meta_item.name_or_empty(),
+ sym::C | sym::simd | sym::transparent
+ ) || int_type_of_word(meta_item.name_or_empty()).is_some()
+ {
+ recognised = true;
+ sess.emit_err(session_diagnostics::InvalidReprHintNoValue {
+ span: meta_item.span,
+ name: meta_item.name_or_empty().to_ident_string(),
+ });
+ }
}
- } else if let MetaItemKind::List(_) = meta_item.kind {
- if meta_item.has_name(sym::align) {
- recognised = true;
- sess.emit_err(session_diagnostics::IncorrectReprFormatAlignOneArg {
- span: meta_item.span,
- });
- } else if meta_item.has_name(sym::packed) {
- recognised = true;
- sess.emit_err(session_diagnostics::IncorrectReprFormatPackedOneOrZeroArg {
- span: meta_item.span,
- });
- } else if matches!(
- meta_item.name_or_empty(),
- sym::C | sym::simd | sym::transparent
- ) || int_type_of_word(meta_item.name_or_empty()).is_some()
- {
- recognised = true;
- sess.emit_err(session_diagnostics::InvalidReprHintNoParen {
- span: meta_item.span,
- name: meta_item.name_or_empty().to_ident_string(),
- });
+ MetaItemKind::List(_) => {
+ if meta_item.has_name(sym::align) {
+ recognised = true;
+ sess.emit_err(session_diagnostics::IncorrectReprFormatAlignOneArg {
+ span: meta_item.span,
+ });
+ } else if meta_item.has_name(sym::packed) {
+ recognised = true;
+ sess.emit_err(
+ session_diagnostics::IncorrectReprFormatPackedOneOrZeroArg {
+ span: meta_item.span,
+ },
+ );
+ } else if matches!(
+ meta_item.name_or_empty(),
+ sym::C | sym::simd | sym::transparent
+ ) || int_type_of_word(meta_item.name_or_empty()).is_some()
+ {
+ recognised = true;
+ sess.emit_err(session_diagnostics::InvalidReprHintNoParen {
+ span: meta_item.span,
+ name: meta_item.name_or_empty().to_ident_string(),
+ });
+ }
}
+ _ => (),
}
}
if !recognised {
diff --git a/compiler/rustc_attr/src/session_diagnostics.rs b/compiler/rustc_attr/src/session_diagnostics.rs
index edccfa1c8..91c6bcb08 100644
--- a/compiler/rustc_attr/src/session_diagnostics.rs
+++ b/compiler/rustc_attr/src/session_diagnostics.rs
@@ -41,7 +41,7 @@ pub(crate) struct IncorrectMetaItem {
pub span: Span,
}
-// Error code: E0541
+/// Error code: E0541
pub(crate) struct UnknownMetaItem<'a> {
pub span: Span,
pub item: String,
@@ -200,7 +200,7 @@ pub(crate) struct InvalidReprHintNoValue {
pub name: String,
}
-// Error code: E0565
+/// Error code: E0565
pub(crate) struct UnsupportedLiteral {
pub span: Span,
pub reason: UnsupportedLiteralReason,
diff --git a/compiler/rustc_baked_icu_data/Cargo.toml b/compiler/rustc_baked_icu_data/Cargo.toml
new file mode 100644
index 000000000..3477306db
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "rustc_baked_icu_data"
+version = "0.0.0"
+edition = "2021"
+
+[dependencies]
+icu_list = "1.0.0"
+icu_locid = "1.0.0"
+icu_provider = "1.0.1"
+icu_provider_adapters = "1.0.0"
+litemap = "0.6.0"
+zerovec = "0.9.0"
+
+[features]
+rustc_use_parallel_compiler = ['icu_provider/sync']
diff --git a/compiler/rustc_baked_icu_data/src/data/any.rs b/compiler/rustc_baked_icu_data/src/data/any.rs
new file mode 100644
index 000000000..e8e99be93
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/any.rs
@@ -0,0 +1,42 @@
+// @generated
+impl AnyProvider for BakedDataProvider {
+ fn load_any(&self, key: DataKey, req: DataRequest) -> Result<AnyResponse, DataError> {
+ const ANDLISTV1MARKER: ::icu_provider::DataKeyHash =
+ ::icu_list::provider::AndListV1Marker::KEY.hashed();
+ const COLLATIONFALLBACKSUPPLEMENTV1MARKER: ::icu_provider::DataKeyHash =
+ ::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY
+ .hashed();
+ const LOCALEFALLBACKLIKELYSUBTAGSV1MARKER: ::icu_provider::DataKeyHash =
+ ::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY
+ .hashed();
+ const LOCALEFALLBACKPARENTSV1MARKER: ::icu_provider::DataKeyHash =
+ ::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY
+ .hashed();
+ #[allow(clippy::match_single_binding)]
+ match key.hashed() {
+ ANDLISTV1MARKER => list::and_v1::DATA
+ .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse())
+ .copied()
+ .map(AnyPayload::from_static_ref)
+ .ok_or(DataErrorKind::MissingLocale),
+ COLLATIONFALLBACKSUPPLEMENTV1MARKER => fallback::supplement::co_v1::DATA
+ .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse())
+ .copied()
+ .map(AnyPayload::from_static_ref)
+ .ok_or(DataErrorKind::MissingLocale),
+ LOCALEFALLBACKLIKELYSUBTAGSV1MARKER => fallback::likelysubtags_v1::DATA
+ .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse())
+ .copied()
+ .map(AnyPayload::from_static_ref)
+ .ok_or(DataErrorKind::MissingLocale),
+ LOCALEFALLBACKPARENTSV1MARKER => fallback::parents_v1::DATA
+ .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse())
+ .copied()
+ .map(AnyPayload::from_static_ref)
+ .ok_or(DataErrorKind::MissingLocale),
+ _ => Err(DataErrorKind::MissingDataKey),
+ }
+ .map_err(|e| e.with_req(key, req))
+ .map(|payload| AnyResponse { payload: Some(payload), metadata: Default::default() })
+ }
+}
diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs
new file mode 100644
index 000000000..0a90c832e
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs
@@ -0,0 +1,733 @@
+// @generated
+type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackLikelySubtagsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ;
+pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> =
+ litemap::LiteMap::from_sorted_store_unchecked(&[("und", UND)]);
+static UND: &DataStruct =
+ &::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1 {
+ l2s: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap::from_parts_unchecked(
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 97u8, 98u8, 0u8, 97u8, 98u8, 113u8, 97u8, 100u8, 112u8, 97u8, 100u8, 121u8,
+ 97u8, 101u8, 0u8, 97u8, 101u8, 98u8, 97u8, 104u8, 111u8, 97u8, 106u8,
+ 116u8, 97u8, 107u8, 107u8, 97u8, 108u8, 116u8, 97u8, 109u8, 0u8, 97u8,
+ 112u8, 99u8, 97u8, 112u8, 100u8, 97u8, 114u8, 0u8, 97u8, 114u8, 99u8, 97u8,
+ 114u8, 113u8, 97u8, 114u8, 115u8, 97u8, 114u8, 121u8, 97u8, 114u8, 122u8,
+ 97u8, 115u8, 0u8, 97u8, 115u8, 101u8, 97u8, 118u8, 0u8, 97u8, 118u8, 108u8,
+ 97u8, 119u8, 97u8, 98u8, 97u8, 0u8, 98u8, 97u8, 108u8, 98u8, 97u8, 112u8,
+ 98u8, 97u8, 120u8, 98u8, 99u8, 113u8, 98u8, 101u8, 0u8, 98u8, 101u8, 106u8,
+ 98u8, 102u8, 113u8, 98u8, 102u8, 116u8, 98u8, 102u8, 121u8, 98u8, 103u8,
+ 0u8, 98u8, 103u8, 99u8, 98u8, 103u8, 110u8, 98u8, 103u8, 120u8, 98u8,
+ 104u8, 98u8, 98u8, 104u8, 105u8, 98u8, 104u8, 111u8, 98u8, 106u8, 105u8,
+ 98u8, 106u8, 106u8, 98u8, 108u8, 116u8, 98u8, 110u8, 0u8, 98u8, 111u8, 0u8,
+ 98u8, 112u8, 121u8, 98u8, 113u8, 105u8, 98u8, 114u8, 97u8, 98u8, 114u8,
+ 104u8, 98u8, 114u8, 120u8, 98u8, 115u8, 113u8, 98u8, 115u8, 116u8, 98u8,
+ 116u8, 118u8, 98u8, 117u8, 97u8, 98u8, 121u8, 110u8, 99u8, 99u8, 112u8,
+ 99u8, 101u8, 0u8, 99u8, 104u8, 109u8, 99u8, 104u8, 114u8, 99u8, 106u8,
+ 97u8, 99u8, 106u8, 109u8, 99u8, 107u8, 98u8, 99u8, 109u8, 103u8, 99u8,
+ 111u8, 112u8, 99u8, 114u8, 0u8, 99u8, 114u8, 104u8, 99u8, 114u8, 107u8,
+ 99u8, 114u8, 108u8, 99u8, 115u8, 119u8, 99u8, 116u8, 100u8, 99u8, 117u8,
+ 0u8, 99u8, 118u8, 0u8, 100u8, 97u8, 114u8, 100u8, 99u8, 99u8, 100u8, 103u8,
+ 108u8, 100u8, 109u8, 102u8, 100u8, 111u8, 105u8, 100u8, 114u8, 104u8,
+ 100u8, 114u8, 115u8, 100u8, 116u8, 121u8, 100u8, 118u8, 0u8, 100u8, 122u8,
+ 0u8, 101u8, 103u8, 121u8, 101u8, 107u8, 121u8, 101u8, 108u8, 0u8, 101u8,
+ 115u8, 103u8, 101u8, 116u8, 116u8, 102u8, 97u8, 0u8, 102u8, 105u8, 97u8,
+ 102u8, 117u8, 98u8, 103u8, 97u8, 110u8, 103u8, 98u8, 109u8, 103u8, 98u8,
+ 122u8, 103u8, 101u8, 122u8, 103u8, 103u8, 110u8, 103u8, 106u8, 107u8,
+ 103u8, 106u8, 117u8, 103u8, 108u8, 107u8, 103u8, 109u8, 118u8, 103u8,
+ 111u8, 102u8, 103u8, 111u8, 109u8, 103u8, 111u8, 110u8, 103u8, 111u8,
+ 116u8, 103u8, 114u8, 99u8, 103u8, 114u8, 116u8, 103u8, 117u8, 0u8, 103u8,
+ 118u8, 114u8, 103u8, 119u8, 99u8, 103u8, 119u8, 116u8, 104u8, 97u8, 107u8,
+ 104u8, 97u8, 122u8, 104u8, 100u8, 121u8, 104u8, 101u8, 0u8, 104u8, 105u8,
+ 0u8, 104u8, 108u8, 117u8, 104u8, 109u8, 100u8, 104u8, 110u8, 100u8, 104u8,
+ 110u8, 101u8, 104u8, 110u8, 106u8, 104u8, 110u8, 111u8, 104u8, 111u8, 99u8,
+ 104u8, 111u8, 106u8, 104u8, 115u8, 110u8, 104u8, 121u8, 0u8, 105u8, 105u8,
+ 0u8, 105u8, 110u8, 104u8, 105u8, 117u8, 0u8, 105u8, 119u8, 0u8, 106u8,
+ 97u8, 0u8, 106u8, 105u8, 0u8, 106u8, 109u8, 108u8, 107u8, 97u8, 0u8, 107u8,
+ 97u8, 97u8, 107u8, 97u8, 119u8, 107u8, 98u8, 100u8, 107u8, 98u8, 121u8,
+ 107u8, 100u8, 116u8, 107u8, 102u8, 114u8, 107u8, 102u8, 121u8, 107u8,
+ 104u8, 98u8, 107u8, 104u8, 110u8, 107u8, 104u8, 116u8, 107u8, 104u8, 119u8,
+ 107u8, 106u8, 103u8, 107u8, 107u8, 0u8, 107u8, 109u8, 0u8, 107u8, 110u8,
+ 0u8, 107u8, 111u8, 0u8, 107u8, 111u8, 105u8, 107u8, 111u8, 107u8, 107u8,
+ 113u8, 121u8, 107u8, 114u8, 99u8, 107u8, 114u8, 117u8, 107u8, 115u8, 0u8,
+ 107u8, 116u8, 98u8, 107u8, 117u8, 109u8, 107u8, 118u8, 0u8, 107u8, 118u8,
+ 120u8, 107u8, 120u8, 99u8, 107u8, 120u8, 108u8, 107u8, 120u8, 109u8, 107u8,
+ 120u8, 112u8, 107u8, 121u8, 0u8, 107u8, 122u8, 104u8, 108u8, 97u8, 98u8,
+ 108u8, 97u8, 100u8, 108u8, 97u8, 104u8, 108u8, 98u8, 101u8, 108u8, 99u8,
+ 112u8, 108u8, 101u8, 112u8, 108u8, 101u8, 122u8, 108u8, 105u8, 102u8,
+ 108u8, 105u8, 115u8, 108u8, 107u8, 105u8, 108u8, 109u8, 110u8, 108u8,
+ 111u8, 0u8, 108u8, 114u8, 99u8, 108u8, 117u8, 122u8, 108u8, 119u8, 108u8,
+ 108u8, 122u8, 104u8, 109u8, 97u8, 103u8, 109u8, 97u8, 105u8, 109u8, 100u8,
+ 101u8, 109u8, 100u8, 102u8, 109u8, 100u8, 120u8, 109u8, 102u8, 97u8, 109u8,
+ 103u8, 112u8, 109u8, 107u8, 0u8, 109u8, 107u8, 105u8, 109u8, 108u8, 0u8,
+ 109u8, 110u8, 0u8, 109u8, 110u8, 105u8, 109u8, 110u8, 119u8, 109u8, 114u8,
+ 0u8, 109u8, 114u8, 100u8, 109u8, 114u8, 106u8, 109u8, 114u8, 111u8, 109u8,
+ 116u8, 114u8, 109u8, 118u8, 121u8, 109u8, 119u8, 114u8, 109u8, 119u8,
+ 119u8, 109u8, 121u8, 0u8, 109u8, 121u8, 109u8, 109u8, 121u8, 118u8, 109u8,
+ 121u8, 122u8, 109u8, 122u8, 110u8, 110u8, 97u8, 110u8, 110u8, 101u8, 0u8,
+ 110u8, 101u8, 119u8, 110u8, 110u8, 112u8, 110u8, 111u8, 100u8, 110u8,
+ 111u8, 101u8, 110u8, 111u8, 110u8, 110u8, 113u8, 111u8, 110u8, 115u8,
+ 107u8, 110u8, 115u8, 116u8, 111u8, 106u8, 0u8, 111u8, 106u8, 115u8, 111u8,
+ 114u8, 0u8, 111u8, 114u8, 117u8, 111u8, 115u8, 0u8, 111u8, 115u8, 97u8,
+ 111u8, 116u8, 97u8, 111u8, 116u8, 107u8, 111u8, 117u8, 105u8, 112u8, 97u8,
+ 0u8, 112u8, 97u8, 108u8, 112u8, 101u8, 111u8, 112u8, 104u8, 108u8, 112u8,
+ 104u8, 110u8, 112u8, 107u8, 97u8, 112u8, 110u8, 116u8, 112u8, 112u8, 97u8,
+ 112u8, 114u8, 97u8, 112u8, 114u8, 100u8, 112u8, 115u8, 0u8, 114u8, 97u8,
+ 106u8, 114u8, 104u8, 103u8, 114u8, 105u8, 102u8, 114u8, 106u8, 115u8,
+ 114u8, 107u8, 116u8, 114u8, 109u8, 116u8, 114u8, 117u8, 0u8, 114u8, 117u8,
+ 101u8, 114u8, 121u8, 117u8, 115u8, 97u8, 0u8, 115u8, 97u8, 104u8, 115u8,
+ 97u8, 116u8, 115u8, 97u8, 122u8, 115u8, 99u8, 107u8, 115u8, 99u8, 108u8,
+ 115u8, 100u8, 0u8, 115u8, 100u8, 104u8, 115u8, 103u8, 97u8, 115u8, 103u8,
+ 119u8, 115u8, 104u8, 105u8, 115u8, 104u8, 110u8, 115u8, 104u8, 117u8,
+ 115u8, 105u8, 0u8, 115u8, 107u8, 114u8, 115u8, 109u8, 112u8, 115u8, 111u8,
+ 103u8, 115u8, 111u8, 117u8, 115u8, 114u8, 0u8, 115u8, 114u8, 98u8, 115u8,
+ 114u8, 120u8, 115u8, 119u8, 98u8, 115u8, 119u8, 118u8, 115u8, 121u8, 108u8,
+ 115u8, 121u8, 114u8, 116u8, 97u8, 0u8, 116u8, 97u8, 106u8, 116u8, 99u8,
+ 121u8, 116u8, 100u8, 100u8, 116u8, 100u8, 103u8, 116u8, 100u8, 104u8,
+ 116u8, 101u8, 0u8, 116u8, 103u8, 0u8, 116u8, 104u8, 0u8, 116u8, 104u8,
+ 108u8, 116u8, 104u8, 113u8, 116u8, 104u8, 114u8, 116u8, 105u8, 0u8, 116u8,
+ 105u8, 103u8, 116u8, 107u8, 116u8, 116u8, 114u8, 119u8, 116u8, 115u8,
+ 100u8, 116u8, 115u8, 102u8, 116u8, 115u8, 106u8, 116u8, 116u8, 0u8, 116u8,
+ 116u8, 115u8, 116u8, 120u8, 103u8, 116u8, 120u8, 111u8, 116u8, 121u8,
+ 118u8, 117u8, 100u8, 105u8, 117u8, 100u8, 109u8, 117u8, 103u8, 0u8, 117u8,
+ 103u8, 97u8, 117u8, 107u8, 0u8, 117u8, 110u8, 114u8, 117u8, 110u8, 120u8,
+ 117u8, 114u8, 0u8, 118u8, 97u8, 105u8, 119u8, 97u8, 108u8, 119u8, 98u8,
+ 113u8, 119u8, 98u8, 114u8, 119u8, 110u8, 105u8, 119u8, 115u8, 103u8, 119u8,
+ 116u8, 109u8, 119u8, 117u8, 117u8, 120u8, 99u8, 111u8, 120u8, 99u8, 114u8,
+ 120u8, 108u8, 99u8, 120u8, 108u8, 100u8, 120u8, 109u8, 102u8, 120u8, 109u8,
+ 110u8, 120u8, 109u8, 114u8, 120u8, 110u8, 97u8, 120u8, 110u8, 114u8, 120u8,
+ 112u8, 114u8, 120u8, 115u8, 97u8, 120u8, 115u8, 114u8, 121u8, 105u8, 0u8,
+ 121u8, 117u8, 101u8, 122u8, 100u8, 106u8, 122u8, 103u8, 104u8, 122u8,
+ 104u8, 0u8, 122u8, 104u8, 120u8, 122u8, 107u8, 116u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 84u8, 105u8, 98u8,
+ 116u8, 67u8, 121u8, 114u8, 108u8, 65u8, 118u8, 115u8, 116u8, 65u8, 114u8,
+ 97u8, 98u8, 65u8, 104u8, 111u8, 109u8, 65u8, 114u8, 97u8, 98u8, 88u8,
+ 115u8, 117u8, 120u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, 105u8,
+ 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8,
+ 65u8, 114u8, 109u8, 105u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8,
+ 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 66u8, 101u8, 110u8,
+ 103u8, 83u8, 103u8, 110u8, 119u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8,
+ 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 65u8,
+ 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 66u8, 97u8, 109u8, 117u8,
+ 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, 97u8,
+ 98u8, 84u8, 97u8, 109u8, 108u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8,
+ 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, 118u8, 97u8, 65u8,
+ 114u8, 97u8, 98u8, 71u8, 114u8, 101u8, 107u8, 68u8, 101u8, 118u8, 97u8,
+ 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 69u8, 116u8, 104u8,
+ 105u8, 68u8, 101u8, 118u8, 97u8, 84u8, 97u8, 118u8, 116u8, 66u8, 101u8,
+ 110u8, 103u8, 84u8, 105u8, 98u8, 116u8, 66u8, 101u8, 110u8, 103u8, 65u8,
+ 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 68u8,
+ 101u8, 118u8, 97u8, 66u8, 97u8, 115u8, 115u8, 69u8, 116u8, 104u8, 105u8,
+ 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8,
+ 105u8, 67u8, 97u8, 107u8, 109u8, 67u8, 121u8, 114u8, 108u8, 67u8, 121u8,
+ 114u8, 108u8, 67u8, 104u8, 101u8, 114u8, 65u8, 114u8, 97u8, 98u8, 67u8,
+ 104u8, 97u8, 109u8, 65u8, 114u8, 97u8, 98u8, 83u8, 111u8, 121u8, 111u8,
+ 67u8, 111u8, 112u8, 116u8, 67u8, 97u8, 110u8, 115u8, 67u8, 121u8, 114u8,
+ 108u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8,
+ 110u8, 115u8, 80u8, 97u8, 117u8, 99u8, 67u8, 121u8, 114u8, 108u8, 67u8,
+ 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, 98u8,
+ 65u8, 114u8, 97u8, 98u8, 77u8, 101u8, 100u8, 102u8, 68u8, 101u8, 118u8,
+ 97u8, 77u8, 111u8, 110u8, 103u8, 69u8, 116u8, 104u8, 105u8, 68u8, 101u8,
+ 118u8, 97u8, 84u8, 104u8, 97u8, 97u8, 84u8, 105u8, 98u8, 116u8, 69u8,
+ 103u8, 121u8, 112u8, 75u8, 97u8, 108u8, 105u8, 71u8, 114u8, 101u8, 107u8,
+ 71u8, 111u8, 110u8, 109u8, 73u8, 116u8, 97u8, 108u8, 65u8, 114u8, 97u8,
+ 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8,
+ 115u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 69u8, 116u8,
+ 104u8, 105u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 65u8,
+ 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8,
+ 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, 118u8, 97u8, 84u8, 101u8, 108u8,
+ 117u8, 71u8, 111u8, 116u8, 104u8, 67u8, 112u8, 114u8, 116u8, 66u8, 101u8,
+ 110u8, 103u8, 71u8, 117u8, 106u8, 114u8, 68u8, 101u8, 118u8, 97u8, 65u8,
+ 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, 115u8, 65u8,
+ 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, 72u8, 101u8, 98u8, 114u8,
+ 68u8, 101u8, 118u8, 97u8, 72u8, 108u8, 117u8, 119u8, 80u8, 108u8, 114u8,
+ 100u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 109u8,
+ 110u8, 112u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 68u8,
+ 101u8, 118u8, 97u8, 72u8, 97u8, 110u8, 115u8, 65u8, 114u8, 109u8, 110u8,
+ 89u8, 105u8, 105u8, 105u8, 67u8, 121u8, 114u8, 108u8, 67u8, 97u8, 110u8,
+ 115u8, 72u8, 101u8, 98u8, 114u8, 74u8, 112u8, 97u8, 110u8, 72u8, 101u8,
+ 98u8, 114u8, 68u8, 101u8, 118u8, 97u8, 71u8, 101u8, 111u8, 114u8, 67u8,
+ 121u8, 114u8, 108u8, 75u8, 97u8, 119u8, 105u8, 67u8, 121u8, 114u8, 108u8,
+ 65u8, 114u8, 97u8, 98u8, 84u8, 104u8, 97u8, 105u8, 68u8, 101u8, 118u8,
+ 97u8, 68u8, 101u8, 118u8, 97u8, 84u8, 97u8, 108u8, 117u8, 68u8, 101u8,
+ 118u8, 97u8, 77u8, 121u8, 109u8, 114u8, 65u8, 114u8, 97u8, 98u8, 76u8,
+ 97u8, 111u8, 111u8, 67u8, 121u8, 114u8, 108u8, 75u8, 104u8, 109u8, 114u8,
+ 75u8, 110u8, 100u8, 97u8, 75u8, 111u8, 114u8, 101u8, 67u8, 121u8, 114u8,
+ 108u8, 68u8, 101u8, 118u8, 97u8, 69u8, 116u8, 104u8, 105u8, 67u8, 121u8,
+ 114u8, 108u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 69u8,
+ 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8,
+ 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, 118u8,
+ 97u8, 84u8, 104u8, 97u8, 105u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8,
+ 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 76u8, 105u8, 110u8, 97u8, 72u8,
+ 101u8, 98u8, 114u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8,
+ 84u8, 104u8, 97u8, 105u8, 76u8, 101u8, 112u8, 99u8, 67u8, 121u8, 114u8,
+ 108u8, 68u8, 101u8, 118u8, 97u8, 76u8, 105u8, 115u8, 117u8, 65u8, 114u8,
+ 97u8, 98u8, 84u8, 101u8, 108u8, 117u8, 76u8, 97u8, 111u8, 111u8, 65u8,
+ 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 84u8, 104u8, 97u8, 105u8, 72u8,
+ 97u8, 110u8, 115u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8,
+ 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8,
+ 105u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8,
+ 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 77u8, 108u8, 121u8, 109u8, 67u8,
+ 121u8, 114u8, 108u8, 66u8, 101u8, 110u8, 103u8, 77u8, 121u8, 109u8, 114u8,
+ 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8,
+ 108u8, 77u8, 114u8, 111u8, 111u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8,
+ 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 109u8, 110u8, 112u8, 77u8,
+ 121u8, 109u8, 114u8, 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8,
+ 77u8, 97u8, 110u8, 100u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8,
+ 115u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 87u8, 99u8,
+ 104u8, 111u8, 76u8, 97u8, 110u8, 97u8, 68u8, 101u8, 118u8, 97u8, 82u8,
+ 117u8, 110u8, 114u8, 78u8, 107u8, 111u8, 111u8, 67u8, 97u8, 110u8, 115u8,
+ 84u8, 110u8, 115u8, 97u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, 110u8,
+ 115u8, 79u8, 114u8, 121u8, 97u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8,
+ 114u8, 108u8, 79u8, 115u8, 103u8, 101u8, 65u8, 114u8, 97u8, 98u8, 79u8,
+ 114u8, 107u8, 104u8, 79u8, 117u8, 103u8, 114u8, 71u8, 117u8, 114u8, 117u8,
+ 80u8, 104u8, 108u8, 105u8, 88u8, 112u8, 101u8, 111u8, 65u8, 114u8, 97u8,
+ 98u8, 80u8, 104u8, 110u8, 120u8, 66u8, 114u8, 97u8, 104u8, 71u8, 114u8,
+ 101u8, 107u8, 68u8, 101u8, 118u8, 97u8, 75u8, 104u8, 97u8, 114u8, 65u8,
+ 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 82u8,
+ 111u8, 104u8, 103u8, 84u8, 102u8, 110u8, 103u8, 68u8, 101u8, 118u8, 97u8,
+ 66u8, 101u8, 110u8, 103u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8,
+ 108u8, 67u8, 121u8, 114u8, 108u8, 75u8, 97u8, 110u8, 97u8, 68u8, 101u8,
+ 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 79u8, 108u8, 99u8, 107u8, 83u8,
+ 97u8, 117u8, 114u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8,
+ 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 79u8, 103u8, 97u8, 109u8,
+ 69u8, 116u8, 104u8, 105u8, 84u8, 102u8, 110u8, 103u8, 77u8, 121u8, 109u8,
+ 114u8, 65u8, 114u8, 97u8, 98u8, 83u8, 105u8, 110u8, 104u8, 65u8, 114u8,
+ 97u8, 98u8, 83u8, 97u8, 109u8, 114u8, 83u8, 111u8, 103u8, 100u8, 84u8,
+ 104u8, 97u8, 105u8, 67u8, 121u8, 114u8, 108u8, 83u8, 111u8, 114u8, 97u8,
+ 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8,
+ 97u8, 66u8, 101u8, 110u8, 103u8, 83u8, 121u8, 114u8, 99u8, 84u8, 97u8,
+ 109u8, 108u8, 68u8, 101u8, 118u8, 97u8, 75u8, 110u8, 100u8, 97u8, 84u8,
+ 97u8, 108u8, 101u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8,
+ 84u8, 101u8, 108u8, 117u8, 67u8, 121u8, 114u8, 108u8, 84u8, 104u8, 97u8,
+ 105u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8,
+ 118u8, 97u8, 69u8, 116u8, 104u8, 105u8, 69u8, 116u8, 104u8, 105u8, 68u8,
+ 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 71u8, 114u8, 101u8, 107u8,
+ 68u8, 101u8, 118u8, 97u8, 84u8, 105u8, 98u8, 116u8, 67u8, 121u8, 114u8,
+ 108u8, 84u8, 104u8, 97u8, 105u8, 84u8, 97u8, 110u8, 103u8, 84u8, 111u8,
+ 116u8, 111u8, 67u8, 121u8, 114u8, 108u8, 65u8, 103u8, 104u8, 98u8, 67u8,
+ 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 85u8, 103u8, 97u8, 114u8,
+ 67u8, 121u8, 114u8, 108u8, 66u8, 101u8, 110u8, 103u8, 66u8, 101u8, 110u8,
+ 103u8, 65u8, 114u8, 97u8, 98u8, 86u8, 97u8, 105u8, 105u8, 69u8, 116u8,
+ 104u8, 105u8, 84u8, 101u8, 108u8, 117u8, 68u8, 101u8, 118u8, 97u8, 65u8,
+ 114u8, 97u8, 98u8, 71u8, 111u8, 110u8, 103u8, 68u8, 101u8, 118u8, 97u8,
+ 72u8, 97u8, 110u8, 115u8, 67u8, 104u8, 114u8, 115u8, 67u8, 97u8, 114u8,
+ 105u8, 76u8, 121u8, 99u8, 105u8, 76u8, 121u8, 100u8, 105u8, 71u8, 101u8,
+ 111u8, 114u8, 77u8, 97u8, 110u8, 105u8, 77u8, 101u8, 114u8, 99u8, 78u8,
+ 97u8, 114u8, 98u8, 68u8, 101u8, 118u8, 97u8, 80u8, 114u8, 116u8, 105u8,
+ 83u8, 97u8, 114u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 101u8, 98u8,
+ 114u8, 72u8, 97u8, 110u8, 116u8, 65u8, 114u8, 97u8, 98u8, 84u8, 102u8,
+ 110u8, 103u8, 72u8, 97u8, 110u8, 115u8, 78u8, 115u8, 104u8, 117u8, 75u8,
+ 105u8, 116u8, 115u8,
+ ])
+ },
+ )
+ },
+ lr2s: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap2d::from_parts_unchecked(
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 97u8, 122u8, 0u8, 104u8, 97u8, 0u8, 107u8, 107u8, 0u8, 107u8, 117u8, 0u8,
+ 107u8, 121u8, 0u8, 109u8, 97u8, 110u8, 109u8, 110u8, 0u8, 109u8, 115u8,
+ 0u8, 112u8, 97u8, 0u8, 114u8, 105u8, 102u8, 115u8, 100u8, 0u8, 115u8,
+ 114u8, 0u8, 116u8, 103u8, 0u8, 117u8, 103u8, 0u8, 117u8, 110u8, 114u8,
+ 117u8, 122u8, 0u8, 121u8, 117u8, 101u8, 122u8, 104u8, 0u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 3u8, 0u8, 0u8, 0u8, 5u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 10u8, 0u8, 0u8,
+ 0u8, 12u8, 0u8, 0u8, 0u8, 13u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 15u8,
+ 0u8, 0u8, 0u8, 16u8, 0u8, 0u8, 0u8, 17u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 22u8, 0u8, 0u8, 0u8, 23u8, 0u8, 0u8, 0u8, 25u8, 0u8, 0u8, 0u8, 26u8,
+ 0u8, 0u8, 0u8, 28u8, 0u8, 0u8, 0u8, 29u8, 0u8, 0u8, 0u8, 44u8, 0u8, 0u8,
+ 0u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 73u8, 81u8, 0u8, 73u8, 82u8, 0u8, 82u8, 85u8, 0u8, 67u8, 77u8, 0u8, 83u8,
+ 68u8, 0u8, 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 73u8, 82u8, 0u8, 77u8, 78u8,
+ 0u8, 76u8, 66u8, 0u8, 67u8, 78u8, 0u8, 84u8, 82u8, 0u8, 71u8, 78u8, 0u8,
+ 67u8, 78u8, 0u8, 67u8, 67u8, 0u8, 80u8, 75u8, 0u8, 78u8, 76u8, 0u8, 73u8,
+ 78u8, 0u8, 77u8, 69u8, 0u8, 82u8, 79u8, 0u8, 82u8, 85u8, 0u8, 84u8, 82u8,
+ 0u8, 80u8, 75u8, 0u8, 75u8, 90u8, 0u8, 77u8, 78u8, 0u8, 78u8, 80u8, 0u8,
+ 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 67u8, 78u8, 0u8, 65u8, 85u8, 0u8, 66u8,
+ 78u8, 0u8, 71u8, 66u8, 0u8, 71u8, 70u8, 0u8, 72u8, 75u8, 0u8, 73u8, 68u8,
+ 0u8, 77u8, 79u8, 0u8, 80u8, 65u8, 0u8, 80u8, 70u8, 0u8, 80u8, 72u8, 0u8,
+ 83u8, 82u8, 0u8, 84u8, 72u8, 0u8, 84u8, 87u8, 0u8, 85u8, 83u8, 0u8, 86u8,
+ 78u8, 0u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8,
+ 108u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8,
+ 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8,
+ 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, 116u8,
+ 110u8, 78u8, 107u8, 111u8, 111u8, 77u8, 111u8, 110u8, 103u8, 65u8, 114u8,
+ 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, 116u8, 110u8, 68u8, 101u8,
+ 118u8, 97u8, 76u8, 97u8, 116u8, 110u8, 76u8, 97u8, 116u8, 110u8, 76u8,
+ 97u8, 116u8, 110u8, 76u8, 97u8, 116u8, 110u8, 65u8, 114u8, 97u8, 98u8,
+ 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, 118u8,
+ 97u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 72u8, 97u8,
+ 110u8, 115u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8,
+ 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8,
+ 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8,
+ 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8,
+ 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8,
+ 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8,
+ ])
+ },
+ )
+ },
+ l2r: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap::from_parts_unchecked(
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 97u8, 97u8, 0u8, 97u8, 98u8, 0u8, 97u8, 98u8, 114u8, 97u8, 99u8, 101u8,
+ 97u8, 99u8, 104u8, 97u8, 100u8, 97u8, 97u8, 100u8, 112u8, 97u8, 100u8,
+ 121u8, 97u8, 101u8, 0u8, 97u8, 101u8, 98u8, 97u8, 102u8, 0u8, 97u8, 103u8,
+ 113u8, 97u8, 104u8, 111u8, 97u8, 106u8, 116u8, 97u8, 107u8, 0u8, 97u8,
+ 107u8, 107u8, 97u8, 108u8, 110u8, 97u8, 108u8, 116u8, 97u8, 109u8, 0u8,
+ 97u8, 109u8, 111u8, 97u8, 110u8, 0u8, 97u8, 110u8, 110u8, 97u8, 111u8,
+ 122u8, 97u8, 112u8, 100u8, 97u8, 114u8, 0u8, 97u8, 114u8, 99u8, 97u8,
+ 114u8, 110u8, 97u8, 114u8, 111u8, 97u8, 114u8, 113u8, 97u8, 114u8, 115u8,
+ 97u8, 114u8, 121u8, 97u8, 114u8, 122u8, 97u8, 115u8, 0u8, 97u8, 115u8,
+ 97u8, 97u8, 115u8, 101u8, 97u8, 115u8, 116u8, 97u8, 116u8, 106u8, 97u8,
+ 118u8, 0u8, 97u8, 119u8, 97u8, 97u8, 121u8, 0u8, 97u8, 122u8, 0u8, 98u8,
+ 97u8, 0u8, 98u8, 97u8, 108u8, 98u8, 97u8, 110u8, 98u8, 97u8, 112u8, 98u8,
+ 97u8, 114u8, 98u8, 97u8, 115u8, 98u8, 97u8, 120u8, 98u8, 98u8, 99u8, 98u8,
+ 98u8, 106u8, 98u8, 99u8, 105u8, 98u8, 101u8, 0u8, 98u8, 101u8, 106u8, 98u8,
+ 101u8, 109u8, 98u8, 101u8, 119u8, 98u8, 101u8, 122u8, 98u8, 102u8, 100u8,
+ 98u8, 102u8, 113u8, 98u8, 102u8, 116u8, 98u8, 102u8, 121u8, 98u8, 103u8,
+ 0u8, 98u8, 103u8, 99u8, 98u8, 103u8, 110u8, 98u8, 103u8, 120u8, 98u8,
+ 104u8, 98u8, 98u8, 104u8, 105u8, 98u8, 104u8, 111u8, 98u8, 105u8, 0u8,
+ 98u8, 105u8, 107u8, 98u8, 105u8, 110u8, 98u8, 106u8, 106u8, 98u8, 106u8,
+ 110u8, 98u8, 106u8, 116u8, 98u8, 107u8, 109u8, 98u8, 107u8, 117u8, 98u8,
+ 108u8, 97u8, 98u8, 108u8, 103u8, 98u8, 108u8, 116u8, 98u8, 109u8, 0u8,
+ 98u8, 109u8, 113u8, 98u8, 110u8, 0u8, 98u8, 111u8, 0u8, 98u8, 112u8, 121u8,
+ 98u8, 113u8, 105u8, 98u8, 113u8, 118u8, 98u8, 114u8, 0u8, 98u8, 114u8,
+ 97u8, 98u8, 114u8, 104u8, 98u8, 114u8, 120u8, 98u8, 115u8, 0u8, 98u8,
+ 115u8, 113u8, 98u8, 115u8, 115u8, 98u8, 116u8, 111u8, 98u8, 116u8, 118u8,
+ 98u8, 117u8, 97u8, 98u8, 117u8, 99u8, 98u8, 117u8, 103u8, 98u8, 117u8,
+ 109u8, 98u8, 118u8, 98u8, 98u8, 121u8, 110u8, 98u8, 121u8, 118u8, 98u8,
+ 122u8, 101u8, 99u8, 97u8, 0u8, 99u8, 97u8, 100u8, 99u8, 99u8, 104u8, 99u8,
+ 99u8, 112u8, 99u8, 101u8, 0u8, 99u8, 101u8, 98u8, 99u8, 103u8, 103u8, 99u8,
+ 104u8, 0u8, 99u8, 104u8, 107u8, 99u8, 104u8, 109u8, 99u8, 104u8, 111u8,
+ 99u8, 104u8, 112u8, 99u8, 104u8, 114u8, 99u8, 105u8, 99u8, 99u8, 106u8,
+ 97u8, 99u8, 106u8, 109u8, 99u8, 107u8, 98u8, 99u8, 108u8, 99u8, 99u8,
+ 109u8, 103u8, 99u8, 111u8, 0u8, 99u8, 111u8, 112u8, 99u8, 112u8, 115u8,
+ 99u8, 114u8, 0u8, 99u8, 114u8, 103u8, 99u8, 114u8, 104u8, 99u8, 114u8,
+ 107u8, 99u8, 114u8, 108u8, 99u8, 114u8, 115u8, 99u8, 115u8, 0u8, 99u8,
+ 115u8, 98u8, 99u8, 115u8, 119u8, 99u8, 116u8, 100u8, 99u8, 117u8, 0u8,
+ 99u8, 118u8, 0u8, 99u8, 121u8, 0u8, 100u8, 97u8, 0u8, 100u8, 97u8, 102u8,
+ 100u8, 97u8, 107u8, 100u8, 97u8, 114u8, 100u8, 97u8, 118u8, 100u8, 99u8,
+ 99u8, 100u8, 101u8, 0u8, 100u8, 101u8, 110u8, 100u8, 103u8, 114u8, 100u8,
+ 106u8, 101u8, 100u8, 109u8, 102u8, 100u8, 110u8, 106u8, 100u8, 111u8,
+ 105u8, 100u8, 114u8, 104u8, 100u8, 115u8, 98u8, 100u8, 116u8, 109u8, 100u8,
+ 116u8, 112u8, 100u8, 116u8, 121u8, 100u8, 117u8, 97u8, 100u8, 118u8, 0u8,
+ 100u8, 121u8, 111u8, 100u8, 121u8, 117u8, 100u8, 122u8, 0u8, 101u8, 98u8,
+ 117u8, 101u8, 101u8, 0u8, 101u8, 102u8, 105u8, 101u8, 103u8, 108u8, 101u8,
+ 103u8, 121u8, 101u8, 107u8, 121u8, 101u8, 108u8, 0u8, 101u8, 110u8, 0u8,
+ 101u8, 111u8, 0u8, 101u8, 115u8, 0u8, 101u8, 115u8, 103u8, 101u8, 115u8,
+ 117u8, 101u8, 116u8, 0u8, 101u8, 116u8, 116u8, 101u8, 117u8, 0u8, 101u8,
+ 119u8, 111u8, 101u8, 120u8, 116u8, 102u8, 97u8, 0u8, 102u8, 97u8, 110u8,
+ 102u8, 102u8, 0u8, 102u8, 102u8, 109u8, 102u8, 105u8, 0u8, 102u8, 105u8,
+ 97u8, 102u8, 105u8, 108u8, 102u8, 105u8, 116u8, 102u8, 106u8, 0u8, 102u8,
+ 111u8, 0u8, 102u8, 111u8, 110u8, 102u8, 114u8, 0u8, 102u8, 114u8, 99u8,
+ 102u8, 114u8, 112u8, 102u8, 114u8, 114u8, 102u8, 114u8, 115u8, 102u8,
+ 117u8, 98u8, 102u8, 117u8, 100u8, 102u8, 117u8, 102u8, 102u8, 117u8, 113u8,
+ 102u8, 117u8, 114u8, 102u8, 117u8, 118u8, 102u8, 118u8, 114u8, 102u8,
+ 121u8, 0u8, 103u8, 97u8, 0u8, 103u8, 97u8, 97u8, 103u8, 97u8, 103u8, 103u8,
+ 97u8, 110u8, 103u8, 97u8, 121u8, 103u8, 98u8, 109u8, 103u8, 98u8, 122u8,
+ 103u8, 99u8, 114u8, 103u8, 100u8, 0u8, 103u8, 101u8, 122u8, 103u8, 103u8,
+ 110u8, 103u8, 105u8, 108u8, 103u8, 106u8, 107u8, 103u8, 106u8, 117u8,
+ 103u8, 108u8, 0u8, 103u8, 108u8, 107u8, 103u8, 110u8, 0u8, 103u8, 111u8,
+ 109u8, 103u8, 111u8, 110u8, 103u8, 111u8, 114u8, 103u8, 111u8, 115u8,
+ 103u8, 111u8, 116u8, 103u8, 114u8, 99u8, 103u8, 114u8, 116u8, 103u8, 115u8,
+ 119u8, 103u8, 117u8, 0u8, 103u8, 117u8, 98u8, 103u8, 117u8, 99u8, 103u8,
+ 117u8, 114u8, 103u8, 117u8, 122u8, 103u8, 118u8, 0u8, 103u8, 118u8, 114u8,
+ 103u8, 119u8, 105u8, 104u8, 97u8, 0u8, 104u8, 97u8, 107u8, 104u8, 97u8,
+ 119u8, 104u8, 97u8, 122u8, 104u8, 101u8, 0u8, 104u8, 105u8, 0u8, 104u8,
+ 105u8, 102u8, 104u8, 105u8, 108u8, 104u8, 108u8, 117u8, 104u8, 109u8,
+ 100u8, 104u8, 110u8, 100u8, 104u8, 110u8, 101u8, 104u8, 110u8, 106u8,
+ 104u8, 110u8, 110u8, 104u8, 110u8, 111u8, 104u8, 111u8, 0u8, 104u8, 111u8,
+ 99u8, 104u8, 111u8, 106u8, 104u8, 114u8, 0u8, 104u8, 115u8, 98u8, 104u8,
+ 115u8, 110u8, 104u8, 116u8, 0u8, 104u8, 117u8, 0u8, 104u8, 117u8, 114u8,
+ 104u8, 121u8, 0u8, 104u8, 122u8, 0u8, 105u8, 97u8, 0u8, 105u8, 98u8, 97u8,
+ 105u8, 98u8, 98u8, 105u8, 100u8, 0u8, 105u8, 102u8, 101u8, 105u8, 103u8,
+ 0u8, 105u8, 105u8, 0u8, 105u8, 107u8, 0u8, 105u8, 108u8, 111u8, 105u8,
+ 110u8, 0u8, 105u8, 110u8, 104u8, 105u8, 111u8, 0u8, 105u8, 115u8, 0u8,
+ 105u8, 116u8, 0u8, 105u8, 117u8, 0u8, 105u8, 119u8, 0u8, 105u8, 122u8,
+ 104u8, 106u8, 97u8, 0u8, 106u8, 97u8, 109u8, 106u8, 98u8, 111u8, 106u8,
+ 103u8, 111u8, 106u8, 105u8, 0u8, 106u8, 109u8, 99u8, 106u8, 109u8, 108u8,
+ 106u8, 117u8, 116u8, 106u8, 118u8, 0u8, 106u8, 119u8, 0u8, 107u8, 97u8,
+ 0u8, 107u8, 97u8, 97u8, 107u8, 97u8, 98u8, 107u8, 97u8, 99u8, 107u8, 97u8,
+ 106u8, 107u8, 97u8, 109u8, 107u8, 97u8, 111u8, 107u8, 97u8, 119u8, 107u8,
+ 98u8, 100u8, 107u8, 98u8, 121u8, 107u8, 99u8, 103u8, 107u8, 99u8, 107u8,
+ 107u8, 100u8, 101u8, 107u8, 100u8, 104u8, 107u8, 100u8, 116u8, 107u8,
+ 101u8, 97u8, 107u8, 101u8, 110u8, 107u8, 102u8, 111u8, 107u8, 102u8, 114u8,
+ 107u8, 102u8, 121u8, 107u8, 103u8, 0u8, 107u8, 103u8, 101u8, 107u8, 103u8,
+ 112u8, 107u8, 104u8, 97u8, 107u8, 104u8, 98u8, 107u8, 104u8, 110u8, 107u8,
+ 104u8, 113u8, 107u8, 104u8, 116u8, 107u8, 104u8, 119u8, 107u8, 105u8, 0u8,
+ 107u8, 105u8, 117u8, 107u8, 106u8, 0u8, 107u8, 106u8, 103u8, 107u8, 107u8,
+ 0u8, 107u8, 107u8, 106u8, 107u8, 108u8, 0u8, 107u8, 108u8, 110u8, 107u8,
+ 109u8, 0u8, 107u8, 109u8, 98u8, 107u8, 110u8, 0u8, 107u8, 110u8, 102u8,
+ 107u8, 111u8, 0u8, 107u8, 111u8, 105u8, 107u8, 111u8, 107u8, 107u8, 111u8,
+ 115u8, 107u8, 112u8, 101u8, 107u8, 114u8, 99u8, 107u8, 114u8, 105u8, 107u8,
+ 114u8, 106u8, 107u8, 114u8, 108u8, 107u8, 114u8, 117u8, 107u8, 115u8, 0u8,
+ 107u8, 115u8, 98u8, 107u8, 115u8, 102u8, 107u8, 115u8, 104u8, 107u8, 116u8,
+ 114u8, 107u8, 117u8, 0u8, 107u8, 117u8, 109u8, 107u8, 118u8, 0u8, 107u8,
+ 118u8, 114u8, 107u8, 118u8, 120u8, 107u8, 119u8, 0u8, 107u8, 119u8, 107u8,
+ 107u8, 120u8, 108u8, 107u8, 120u8, 109u8, 107u8, 120u8, 112u8, 107u8,
+ 121u8, 0u8, 107u8, 122u8, 106u8, 107u8, 122u8, 116u8, 108u8, 97u8, 0u8,
+ 108u8, 97u8, 98u8, 108u8, 97u8, 100u8, 108u8, 97u8, 103u8, 108u8, 97u8,
+ 104u8, 108u8, 97u8, 106u8, 108u8, 98u8, 0u8, 108u8, 98u8, 101u8, 108u8,
+ 98u8, 119u8, 108u8, 99u8, 112u8, 108u8, 101u8, 112u8, 108u8, 101u8, 122u8,
+ 108u8, 103u8, 0u8, 108u8, 105u8, 0u8, 108u8, 105u8, 102u8, 108u8, 105u8,
+ 106u8, 108u8, 105u8, 108u8, 108u8, 105u8, 115u8, 108u8, 106u8, 112u8,
+ 108u8, 107u8, 105u8, 108u8, 107u8, 116u8, 108u8, 109u8, 110u8, 108u8,
+ 109u8, 111u8, 108u8, 110u8, 0u8, 108u8, 111u8, 0u8, 108u8, 111u8, 108u8,
+ 108u8, 111u8, 122u8, 108u8, 114u8, 99u8, 108u8, 116u8, 0u8, 108u8, 116u8,
+ 103u8, 108u8, 117u8, 0u8, 108u8, 117u8, 97u8, 108u8, 117u8, 111u8, 108u8,
+ 117u8, 121u8, 108u8, 117u8, 122u8, 108u8, 118u8, 0u8, 108u8, 119u8, 108u8,
+ 108u8, 122u8, 104u8, 108u8, 122u8, 122u8, 109u8, 97u8, 100u8, 109u8, 97u8,
+ 102u8, 109u8, 97u8, 103u8, 109u8, 97u8, 105u8, 109u8, 97u8, 107u8, 109u8,
+ 97u8, 110u8, 109u8, 97u8, 115u8, 109u8, 97u8, 122u8, 109u8, 100u8, 102u8,
+ 109u8, 100u8, 104u8, 109u8, 100u8, 114u8, 109u8, 101u8, 110u8, 109u8,
+ 101u8, 114u8, 109u8, 102u8, 97u8, 109u8, 102u8, 101u8, 109u8, 103u8, 0u8,
+ 109u8, 103u8, 104u8, 109u8, 103u8, 111u8, 109u8, 103u8, 112u8, 109u8,
+ 103u8, 121u8, 109u8, 104u8, 0u8, 109u8, 105u8, 0u8, 109u8, 105u8, 99u8,
+ 109u8, 105u8, 110u8, 109u8, 107u8, 0u8, 109u8, 108u8, 0u8, 109u8, 108u8,
+ 115u8, 109u8, 110u8, 0u8, 109u8, 110u8, 105u8, 109u8, 110u8, 119u8, 109u8,
+ 111u8, 0u8, 109u8, 111u8, 101u8, 109u8, 111u8, 104u8, 109u8, 111u8, 115u8,
+ 109u8, 114u8, 0u8, 109u8, 114u8, 100u8, 109u8, 114u8, 106u8, 109u8, 114u8,
+ 111u8, 109u8, 115u8, 0u8, 109u8, 116u8, 0u8, 109u8, 116u8, 114u8, 109u8,
+ 117u8, 97u8, 109u8, 117u8, 115u8, 109u8, 118u8, 121u8, 109u8, 119u8, 107u8,
+ 109u8, 119u8, 114u8, 109u8, 119u8, 118u8, 109u8, 119u8, 119u8, 109u8,
+ 120u8, 99u8, 109u8, 121u8, 0u8, 109u8, 121u8, 118u8, 109u8, 121u8, 120u8,
+ 109u8, 121u8, 122u8, 109u8, 122u8, 110u8, 110u8, 97u8, 0u8, 110u8, 97u8,
+ 110u8, 110u8, 97u8, 112u8, 110u8, 97u8, 113u8, 110u8, 98u8, 0u8, 110u8,
+ 99u8, 104u8, 110u8, 100u8, 0u8, 110u8, 100u8, 99u8, 110u8, 100u8, 115u8,
+ 110u8, 101u8, 0u8, 110u8, 101u8, 119u8, 110u8, 103u8, 0u8, 110u8, 103u8,
+ 108u8, 110u8, 104u8, 101u8, 110u8, 104u8, 119u8, 110u8, 105u8, 106u8,
+ 110u8, 105u8, 117u8, 110u8, 106u8, 111u8, 110u8, 108u8, 0u8, 110u8, 109u8,
+ 103u8, 110u8, 110u8, 0u8, 110u8, 110u8, 104u8, 110u8, 110u8, 112u8, 110u8,
+ 111u8, 0u8, 110u8, 111u8, 100u8, 110u8, 111u8, 101u8, 110u8, 111u8, 110u8,
+ 110u8, 113u8, 111u8, 110u8, 114u8, 0u8, 110u8, 115u8, 107u8, 110u8, 115u8,
+ 111u8, 110u8, 115u8, 116u8, 110u8, 117u8, 115u8, 110u8, 118u8, 0u8, 110u8,
+ 120u8, 113u8, 110u8, 121u8, 0u8, 110u8, 121u8, 109u8, 110u8, 121u8, 110u8,
+ 110u8, 122u8, 105u8, 111u8, 99u8, 0u8, 111u8, 106u8, 0u8, 111u8, 106u8,
+ 115u8, 111u8, 107u8, 97u8, 111u8, 109u8, 0u8, 111u8, 114u8, 0u8, 111u8,
+ 115u8, 0u8, 111u8, 115u8, 97u8, 111u8, 116u8, 107u8, 111u8, 117u8, 105u8,
+ 112u8, 97u8, 0u8, 112u8, 97u8, 103u8, 112u8, 97u8, 108u8, 112u8, 97u8,
+ 109u8, 112u8, 97u8, 112u8, 112u8, 97u8, 117u8, 112u8, 99u8, 100u8, 112u8,
+ 99u8, 109u8, 112u8, 100u8, 99u8, 112u8, 100u8, 116u8, 112u8, 101u8, 111u8,
+ 112u8, 102u8, 108u8, 112u8, 104u8, 110u8, 112u8, 105u8, 115u8, 112u8,
+ 107u8, 97u8, 112u8, 107u8, 111u8, 112u8, 108u8, 0u8, 112u8, 109u8, 115u8,
+ 112u8, 110u8, 116u8, 112u8, 111u8, 110u8, 112u8, 112u8, 97u8, 112u8, 113u8,
+ 109u8, 112u8, 114u8, 97u8, 112u8, 114u8, 100u8, 112u8, 114u8, 103u8, 112u8,
+ 115u8, 0u8, 112u8, 116u8, 0u8, 112u8, 117u8, 117u8, 113u8, 117u8, 0u8,
+ 113u8, 117u8, 99u8, 113u8, 117u8, 103u8, 114u8, 97u8, 106u8, 114u8, 99u8,
+ 102u8, 114u8, 101u8, 106u8, 114u8, 103u8, 110u8, 114u8, 104u8, 103u8,
+ 114u8, 105u8, 97u8, 114u8, 105u8, 102u8, 114u8, 106u8, 115u8, 114u8, 107u8,
+ 116u8, 114u8, 109u8, 0u8, 114u8, 109u8, 102u8, 114u8, 109u8, 111u8, 114u8,
+ 109u8, 116u8, 114u8, 109u8, 117u8, 114u8, 110u8, 0u8, 114u8, 110u8, 103u8,
+ 114u8, 111u8, 0u8, 114u8, 111u8, 98u8, 114u8, 111u8, 102u8, 114u8, 116u8,
+ 109u8, 114u8, 117u8, 0u8, 114u8, 117u8, 101u8, 114u8, 117u8, 103u8, 114u8,
+ 119u8, 0u8, 114u8, 119u8, 107u8, 114u8, 121u8, 117u8, 115u8, 97u8, 0u8,
+ 115u8, 97u8, 102u8, 115u8, 97u8, 104u8, 115u8, 97u8, 113u8, 115u8, 97u8,
+ 115u8, 115u8, 97u8, 116u8, 115u8, 97u8, 118u8, 115u8, 97u8, 122u8, 115u8,
+ 98u8, 112u8, 115u8, 99u8, 0u8, 115u8, 99u8, 107u8, 115u8, 99u8, 110u8,
+ 115u8, 99u8, 111u8, 115u8, 100u8, 0u8, 115u8, 100u8, 99u8, 115u8, 100u8,
+ 104u8, 115u8, 101u8, 0u8, 115u8, 101u8, 102u8, 115u8, 101u8, 104u8, 115u8,
+ 101u8, 105u8, 115u8, 101u8, 115u8, 115u8, 103u8, 0u8, 115u8, 103u8, 97u8,
+ 115u8, 103u8, 115u8, 115u8, 104u8, 105u8, 115u8, 104u8, 110u8, 115u8,
+ 105u8, 0u8, 115u8, 105u8, 100u8, 115u8, 107u8, 0u8, 115u8, 107u8, 114u8,
+ 115u8, 108u8, 0u8, 115u8, 108u8, 105u8, 115u8, 108u8, 121u8, 115u8, 109u8,
+ 0u8, 115u8, 109u8, 97u8, 115u8, 109u8, 100u8, 115u8, 109u8, 106u8, 115u8,
+ 109u8, 110u8, 115u8, 109u8, 112u8, 115u8, 109u8, 115u8, 115u8, 110u8, 0u8,
+ 115u8, 110u8, 98u8, 115u8, 110u8, 107u8, 115u8, 111u8, 0u8, 115u8, 111u8,
+ 103u8, 115u8, 111u8, 117u8, 115u8, 113u8, 0u8, 115u8, 114u8, 0u8, 115u8,
+ 114u8, 98u8, 115u8, 114u8, 110u8, 115u8, 114u8, 114u8, 115u8, 114u8, 120u8,
+ 115u8, 115u8, 0u8, 115u8, 115u8, 121u8, 115u8, 116u8, 0u8, 115u8, 116u8,
+ 113u8, 115u8, 117u8, 0u8, 115u8, 117u8, 107u8, 115u8, 117u8, 115u8, 115u8,
+ 118u8, 0u8, 115u8, 119u8, 0u8, 115u8, 119u8, 98u8, 115u8, 119u8, 99u8,
+ 115u8, 119u8, 103u8, 115u8, 119u8, 118u8, 115u8, 120u8, 110u8, 115u8,
+ 121u8, 108u8, 115u8, 121u8, 114u8, 115u8, 122u8, 108u8, 116u8, 97u8, 0u8,
+ 116u8, 97u8, 106u8, 116u8, 98u8, 119u8, 116u8, 99u8, 121u8, 116u8, 100u8,
+ 100u8, 116u8, 100u8, 103u8, 116u8, 100u8, 104u8, 116u8, 100u8, 117u8,
+ 116u8, 101u8, 0u8, 116u8, 101u8, 109u8, 116u8, 101u8, 111u8, 116u8, 101u8,
+ 116u8, 116u8, 103u8, 0u8, 116u8, 104u8, 0u8, 116u8, 104u8, 108u8, 116u8,
+ 104u8, 113u8, 116u8, 104u8, 114u8, 116u8, 105u8, 0u8, 116u8, 105u8, 103u8,
+ 116u8, 105u8, 118u8, 116u8, 107u8, 0u8, 116u8, 107u8, 108u8, 116u8, 107u8,
+ 114u8, 116u8, 107u8, 116u8, 116u8, 108u8, 0u8, 116u8, 108u8, 121u8, 116u8,
+ 109u8, 104u8, 116u8, 110u8, 0u8, 116u8, 111u8, 0u8, 116u8, 111u8, 103u8,
+ 116u8, 111u8, 107u8, 116u8, 112u8, 105u8, 116u8, 114u8, 0u8, 116u8, 114u8,
+ 117u8, 116u8, 114u8, 118u8, 116u8, 114u8, 119u8, 116u8, 115u8, 0u8, 116u8,
+ 115u8, 100u8, 116u8, 115u8, 102u8, 116u8, 115u8, 103u8, 116u8, 115u8,
+ 106u8, 116u8, 116u8, 0u8, 116u8, 116u8, 106u8, 116u8, 116u8, 115u8, 116u8,
+ 116u8, 116u8, 116u8, 117u8, 109u8, 116u8, 118u8, 108u8, 116u8, 119u8,
+ 113u8, 116u8, 120u8, 103u8, 116u8, 120u8, 111u8, 116u8, 121u8, 0u8, 116u8,
+ 121u8, 118u8, 116u8, 122u8, 109u8, 117u8, 100u8, 105u8, 117u8, 100u8,
+ 109u8, 117u8, 103u8, 0u8, 117u8, 103u8, 97u8, 117u8, 107u8, 0u8, 117u8,
+ 108u8, 105u8, 117u8, 109u8, 98u8, 117u8, 110u8, 114u8, 117u8, 110u8, 120u8,
+ 117u8, 114u8, 0u8, 117u8, 122u8, 0u8, 118u8, 97u8, 105u8, 118u8, 101u8,
+ 0u8, 118u8, 101u8, 99u8, 118u8, 101u8, 112u8, 118u8, 105u8, 0u8, 118u8,
+ 105u8, 99u8, 118u8, 108u8, 115u8, 118u8, 109u8, 102u8, 118u8, 109u8, 119u8,
+ 118u8, 111u8, 0u8, 118u8, 111u8, 116u8, 118u8, 114u8, 111u8, 118u8, 117u8,
+ 110u8, 119u8, 97u8, 0u8, 119u8, 97u8, 101u8, 119u8, 97u8, 108u8, 119u8,
+ 97u8, 114u8, 119u8, 98u8, 112u8, 119u8, 98u8, 113u8, 119u8, 98u8, 114u8,
+ 119u8, 108u8, 115u8, 119u8, 110u8, 105u8, 119u8, 111u8, 0u8, 119u8, 115u8,
+ 103u8, 119u8, 116u8, 109u8, 119u8, 117u8, 117u8, 120u8, 97u8, 118u8, 120u8,
+ 99u8, 111u8, 120u8, 99u8, 114u8, 120u8, 104u8, 0u8, 120u8, 108u8, 99u8,
+ 120u8, 108u8, 100u8, 120u8, 109u8, 102u8, 120u8, 109u8, 110u8, 120u8,
+ 109u8, 114u8, 120u8, 110u8, 97u8, 120u8, 110u8, 114u8, 120u8, 111u8, 103u8,
+ 120u8, 112u8, 114u8, 120u8, 115u8, 97u8, 120u8, 115u8, 114u8, 121u8, 97u8,
+ 111u8, 121u8, 97u8, 112u8, 121u8, 97u8, 118u8, 121u8, 98u8, 98u8, 121u8,
+ 105u8, 0u8, 121u8, 111u8, 0u8, 121u8, 114u8, 108u8, 121u8, 117u8, 97u8,
+ 121u8, 117u8, 101u8, 122u8, 97u8, 0u8, 122u8, 97u8, 103u8, 122u8, 100u8,
+ 106u8, 122u8, 101u8, 97u8, 122u8, 103u8, 104u8, 122u8, 104u8, 0u8, 122u8,
+ 104u8, 120u8, 122u8, 107u8, 116u8, 122u8, 108u8, 109u8, 122u8, 109u8,
+ 105u8, 122u8, 117u8, 0u8, 122u8, 122u8, 97u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 69u8, 84u8, 0u8, 71u8, 69u8, 0u8, 71u8, 72u8, 0u8, 73u8, 68u8, 0u8, 85u8,
+ 71u8, 0u8, 71u8, 72u8, 0u8, 66u8, 84u8, 0u8, 82u8, 85u8, 0u8, 73u8, 82u8,
+ 0u8, 84u8, 78u8, 0u8, 90u8, 65u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8,
+ 84u8, 78u8, 0u8, 71u8, 72u8, 0u8, 73u8, 81u8, 0u8, 88u8, 75u8, 0u8, 82u8,
+ 85u8, 0u8, 69u8, 84u8, 0u8, 78u8, 71u8, 0u8, 69u8, 83u8, 0u8, 78u8, 71u8,
+ 0u8, 73u8, 68u8, 0u8, 84u8, 71u8, 0u8, 69u8, 71u8, 0u8, 73u8, 82u8, 0u8,
+ 67u8, 76u8, 0u8, 66u8, 79u8, 0u8, 68u8, 90u8, 0u8, 83u8, 65u8, 0u8, 77u8,
+ 65u8, 0u8, 69u8, 71u8, 0u8, 73u8, 78u8, 0u8, 84u8, 90u8, 0u8, 85u8, 83u8,
+ 0u8, 69u8, 83u8, 0u8, 67u8, 65u8, 0u8, 82u8, 85u8, 0u8, 73u8, 78u8, 0u8,
+ 66u8, 79u8, 0u8, 65u8, 90u8, 0u8, 82u8, 85u8, 0u8, 80u8, 75u8, 0u8, 73u8,
+ 68u8, 0u8, 78u8, 80u8, 0u8, 65u8, 84u8, 0u8, 67u8, 77u8, 0u8, 67u8, 77u8,
+ 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 67u8, 73u8, 0u8, 66u8, 89u8, 0u8,
+ 83u8, 68u8, 0u8, 90u8, 77u8, 0u8, 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 67u8,
+ 77u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 66u8, 71u8,
+ 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 84u8, 82u8, 0u8, 73u8, 78u8, 0u8,
+ 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 86u8, 85u8, 0u8, 80u8, 72u8, 0u8, 78u8,
+ 71u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, 83u8, 78u8, 0u8, 67u8, 77u8,
+ 0u8, 80u8, 72u8, 0u8, 67u8, 65u8, 0u8, 77u8, 89u8, 0u8, 86u8, 78u8, 0u8,
+ 77u8, 76u8, 0u8, 77u8, 76u8, 0u8, 66u8, 68u8, 0u8, 67u8, 78u8, 0u8, 73u8,
+ 78u8, 0u8, 73u8, 82u8, 0u8, 67u8, 73u8, 0u8, 70u8, 82u8, 0u8, 73u8, 78u8,
+ 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 66u8, 65u8, 0u8, 76u8, 82u8, 0u8,
+ 67u8, 77u8, 0u8, 80u8, 72u8, 0u8, 80u8, 75u8, 0u8, 82u8, 85u8, 0u8, 89u8,
+ 84u8, 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 71u8, 81u8, 0u8, 69u8, 82u8,
+ 0u8, 67u8, 77u8, 0u8, 77u8, 76u8, 0u8, 69u8, 83u8, 0u8, 85u8, 83u8, 0u8,
+ 78u8, 71u8, 0u8, 66u8, 68u8, 0u8, 82u8, 85u8, 0u8, 80u8, 72u8, 0u8, 85u8,
+ 71u8, 0u8, 71u8, 85u8, 0u8, 70u8, 77u8, 0u8, 82u8, 85u8, 0u8, 85u8, 83u8,
+ 0u8, 67u8, 65u8, 0u8, 85u8, 83u8, 0u8, 85u8, 83u8, 0u8, 75u8, 72u8, 0u8,
+ 86u8, 78u8, 0u8, 73u8, 81u8, 0u8, 67u8, 65u8, 0u8, 77u8, 78u8, 0u8, 70u8,
+ 82u8, 0u8, 69u8, 71u8, 0u8, 80u8, 72u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8,
+ 0u8, 85u8, 65u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 83u8, 67u8, 0u8,
+ 67u8, 90u8, 0u8, 80u8, 76u8, 0u8, 67u8, 65u8, 0u8, 77u8, 77u8, 0u8, 82u8,
+ 85u8, 0u8, 82u8, 85u8, 0u8, 71u8, 66u8, 0u8, 68u8, 75u8, 0u8, 67u8, 73u8,
+ 0u8, 85u8, 83u8, 0u8, 82u8, 85u8, 0u8, 75u8, 69u8, 0u8, 73u8, 78u8, 0u8,
+ 68u8, 69u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 78u8, 69u8, 0u8, 78u8,
+ 71u8, 0u8, 67u8, 73u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, 68u8, 69u8,
+ 0u8, 77u8, 76u8, 0u8, 77u8, 89u8, 0u8, 78u8, 80u8, 0u8, 67u8, 77u8, 0u8,
+ 77u8, 86u8, 0u8, 83u8, 78u8, 0u8, 66u8, 70u8, 0u8, 66u8, 84u8, 0u8, 75u8,
+ 69u8, 0u8, 71u8, 72u8, 0u8, 78u8, 71u8, 0u8, 73u8, 84u8, 0u8, 69u8, 71u8,
+ 0u8, 77u8, 77u8, 0u8, 71u8, 82u8, 0u8, 85u8, 83u8, 0u8, 48u8, 48u8, 49u8,
+ 69u8, 83u8, 0u8, 73u8, 78u8, 0u8, 85u8, 83u8, 0u8, 69u8, 69u8, 0u8, 73u8,
+ 84u8, 0u8, 69u8, 83u8, 0u8, 67u8, 77u8, 0u8, 69u8, 83u8, 0u8, 73u8, 82u8,
+ 0u8, 71u8, 81u8, 0u8, 83u8, 78u8, 0u8, 77u8, 76u8, 0u8, 70u8, 73u8, 0u8,
+ 83u8, 68u8, 0u8, 80u8, 72u8, 0u8, 83u8, 69u8, 0u8, 70u8, 74u8, 0u8, 70u8,
+ 79u8, 0u8, 66u8, 74u8, 0u8, 70u8, 82u8, 0u8, 85u8, 83u8, 0u8, 70u8, 82u8,
+ 0u8, 68u8, 69u8, 0u8, 68u8, 69u8, 0u8, 67u8, 77u8, 0u8, 87u8, 70u8, 0u8,
+ 71u8, 78u8, 0u8, 78u8, 69u8, 0u8, 73u8, 84u8, 0u8, 78u8, 71u8, 0u8, 83u8,
+ 68u8, 0u8, 78u8, 76u8, 0u8, 73u8, 69u8, 0u8, 71u8, 72u8, 0u8, 77u8, 68u8,
+ 0u8, 67u8, 78u8, 0u8, 73u8, 68u8, 0u8, 73u8, 78u8, 0u8, 73u8, 82u8, 0u8,
+ 71u8, 70u8, 0u8, 71u8, 66u8, 0u8, 69u8, 84u8, 0u8, 78u8, 80u8, 0u8, 75u8,
+ 73u8, 0u8, 80u8, 75u8, 0u8, 80u8, 75u8, 0u8, 69u8, 83u8, 0u8, 73u8, 82u8,
+ 0u8, 80u8, 89u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8,
+ 78u8, 76u8, 0u8, 85u8, 65u8, 0u8, 67u8, 89u8, 0u8, 73u8, 78u8, 0u8, 67u8,
+ 72u8, 0u8, 73u8, 78u8, 0u8, 66u8, 82u8, 0u8, 67u8, 79u8, 0u8, 71u8, 72u8,
+ 0u8, 75u8, 69u8, 0u8, 73u8, 77u8, 0u8, 78u8, 80u8, 0u8, 67u8, 65u8, 0u8,
+ 78u8, 71u8, 0u8, 67u8, 78u8, 0u8, 85u8, 83u8, 0u8, 65u8, 70u8, 0u8, 73u8,
+ 76u8, 0u8, 73u8, 78u8, 0u8, 70u8, 74u8, 0u8, 80u8, 72u8, 0u8, 84u8, 82u8,
+ 0u8, 67u8, 78u8, 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 85u8, 83u8, 0u8,
+ 80u8, 72u8, 0u8, 80u8, 75u8, 0u8, 80u8, 71u8, 0u8, 73u8, 78u8, 0u8, 73u8,
+ 78u8, 0u8, 72u8, 82u8, 0u8, 68u8, 69u8, 0u8, 67u8, 78u8, 0u8, 72u8, 84u8,
+ 0u8, 72u8, 85u8, 0u8, 67u8, 65u8, 0u8, 65u8, 77u8, 0u8, 78u8, 65u8, 0u8,
+ 48u8, 48u8, 49u8, 77u8, 89u8, 0u8, 78u8, 71u8, 0u8, 73u8, 68u8, 0u8, 84u8,
+ 71u8, 0u8, 78u8, 71u8, 0u8, 67u8, 78u8, 0u8, 85u8, 83u8, 0u8, 80u8, 72u8,
+ 0u8, 73u8, 68u8, 0u8, 82u8, 85u8, 0u8, 48u8, 48u8, 49u8, 73u8, 83u8, 0u8,
+ 73u8, 84u8, 0u8, 67u8, 65u8, 0u8, 73u8, 76u8, 0u8, 82u8, 85u8, 0u8, 74u8,
+ 80u8, 0u8, 74u8, 77u8, 0u8, 48u8, 48u8, 49u8, 67u8, 77u8, 0u8, 85u8, 65u8,
+ 0u8, 84u8, 90u8, 0u8, 78u8, 80u8, 0u8, 68u8, 75u8, 0u8, 73u8, 68u8, 0u8,
+ 73u8, 68u8, 0u8, 71u8, 69u8, 0u8, 85u8, 90u8, 0u8, 68u8, 90u8, 0u8, 77u8,
+ 77u8, 0u8, 78u8, 71u8, 0u8, 75u8, 69u8, 0u8, 77u8, 76u8, 0u8, 73u8, 68u8,
+ 0u8, 82u8, 85u8, 0u8, 78u8, 69u8, 0u8, 78u8, 71u8, 0u8, 90u8, 87u8, 0u8,
+ 84u8, 90u8, 0u8, 84u8, 71u8, 0u8, 84u8, 72u8, 0u8, 67u8, 86u8, 0u8, 67u8,
+ 77u8, 0u8, 67u8, 73u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 67u8, 68u8,
+ 0u8, 73u8, 68u8, 0u8, 66u8, 82u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8,
+ 73u8, 78u8, 0u8, 77u8, 76u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 75u8,
+ 69u8, 0u8, 84u8, 82u8, 0u8, 78u8, 65u8, 0u8, 76u8, 65u8, 0u8, 75u8, 90u8,
+ 0u8, 67u8, 77u8, 0u8, 71u8, 76u8, 0u8, 75u8, 69u8, 0u8, 75u8, 72u8, 0u8,
+ 65u8, 79u8, 0u8, 73u8, 78u8, 0u8, 71u8, 87u8, 0u8, 75u8, 82u8, 0u8, 82u8,
+ 85u8, 0u8, 73u8, 78u8, 0u8, 70u8, 77u8, 0u8, 76u8, 82u8, 0u8, 82u8, 85u8,
+ 0u8, 83u8, 76u8, 0u8, 80u8, 72u8, 0u8, 82u8, 85u8, 0u8, 73u8, 78u8, 0u8,
+ 73u8, 78u8, 0u8, 84u8, 90u8, 0u8, 67u8, 77u8, 0u8, 68u8, 69u8, 0u8, 77u8,
+ 89u8, 0u8, 84u8, 82u8, 0u8, 82u8, 85u8, 0u8, 82u8, 85u8, 0u8, 73u8, 68u8,
+ 0u8, 80u8, 75u8, 0u8, 71u8, 66u8, 0u8, 67u8, 65u8, 0u8, 73u8, 78u8, 0u8,
+ 84u8, 72u8, 0u8, 80u8, 75u8, 0u8, 75u8, 71u8, 0u8, 77u8, 89u8, 0u8, 77u8,
+ 89u8, 0u8, 86u8, 65u8, 0u8, 71u8, 82u8, 0u8, 73u8, 76u8, 0u8, 84u8, 90u8,
+ 0u8, 80u8, 75u8, 0u8, 85u8, 71u8, 0u8, 76u8, 85u8, 0u8, 82u8, 85u8, 0u8,
+ 73u8, 68u8, 0u8, 67u8, 78u8, 0u8, 73u8, 78u8, 0u8, 82u8, 85u8, 0u8, 85u8,
+ 71u8, 0u8, 78u8, 76u8, 0u8, 78u8, 80u8, 0u8, 73u8, 84u8, 0u8, 67u8, 65u8,
+ 0u8, 67u8, 78u8, 0u8, 73u8, 68u8, 0u8, 73u8, 82u8, 0u8, 85u8, 83u8, 0u8,
+ 73u8, 78u8, 0u8, 73u8, 84u8, 0u8, 67u8, 68u8, 0u8, 76u8, 65u8, 0u8, 67u8,
+ 68u8, 0u8, 90u8, 77u8, 0u8, 73u8, 82u8, 0u8, 76u8, 84u8, 0u8, 76u8, 86u8,
+ 0u8, 67u8, 68u8, 0u8, 67u8, 68u8, 0u8, 75u8, 69u8, 0u8, 75u8, 69u8, 0u8,
+ 73u8, 82u8, 0u8, 76u8, 86u8, 0u8, 84u8, 72u8, 0u8, 67u8, 78u8, 0u8, 84u8,
+ 82u8, 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8,
+ 0u8, 73u8, 68u8, 0u8, 71u8, 77u8, 0u8, 75u8, 69u8, 0u8, 77u8, 88u8, 0u8,
+ 82u8, 85u8, 0u8, 80u8, 72u8, 0u8, 73u8, 68u8, 0u8, 83u8, 76u8, 0u8, 75u8,
+ 69u8, 0u8, 84u8, 72u8, 0u8, 77u8, 85u8, 0u8, 77u8, 71u8, 0u8, 77u8, 90u8,
+ 0u8, 67u8, 77u8, 0u8, 78u8, 80u8, 0u8, 84u8, 90u8, 0u8, 77u8, 72u8, 0u8,
+ 78u8, 90u8, 0u8, 67u8, 65u8, 0u8, 73u8, 68u8, 0u8, 77u8, 75u8, 0u8, 73u8,
+ 78u8, 0u8, 83u8, 68u8, 0u8, 77u8, 78u8, 0u8, 73u8, 78u8, 0u8, 77u8, 77u8,
+ 0u8, 82u8, 79u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 66u8, 70u8, 0u8,
+ 73u8, 78u8, 0u8, 78u8, 80u8, 0u8, 82u8, 85u8, 0u8, 66u8, 68u8, 0u8, 77u8,
+ 89u8, 0u8, 77u8, 84u8, 0u8, 73u8, 78u8, 0u8, 67u8, 77u8, 0u8, 85u8, 83u8,
+ 0u8, 80u8, 75u8, 0u8, 77u8, 76u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8,
+ 85u8, 83u8, 0u8, 90u8, 87u8, 0u8, 77u8, 77u8, 0u8, 82u8, 85u8, 0u8, 85u8,
+ 71u8, 0u8, 73u8, 82u8, 0u8, 73u8, 82u8, 0u8, 78u8, 82u8, 0u8, 67u8, 78u8,
+ 0u8, 73u8, 84u8, 0u8, 78u8, 65u8, 0u8, 78u8, 79u8, 0u8, 77u8, 88u8, 0u8,
+ 90u8, 87u8, 0u8, 77u8, 90u8, 0u8, 68u8, 69u8, 0u8, 78u8, 80u8, 0u8, 78u8,
+ 80u8, 0u8, 78u8, 65u8, 0u8, 77u8, 90u8, 0u8, 77u8, 88u8, 0u8, 77u8, 88u8,
+ 0u8, 73u8, 68u8, 0u8, 78u8, 85u8, 0u8, 73u8, 78u8, 0u8, 78u8, 76u8, 0u8,
+ 67u8, 77u8, 0u8, 78u8, 79u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, 78u8,
+ 79u8, 0u8, 84u8, 72u8, 0u8, 73u8, 78u8, 0u8, 83u8, 69u8, 0u8, 71u8, 78u8,
+ 0u8, 90u8, 65u8, 0u8, 67u8, 65u8, 0u8, 90u8, 65u8, 0u8, 73u8, 78u8, 0u8,
+ 83u8, 83u8, 0u8, 85u8, 83u8, 0u8, 67u8, 78u8, 0u8, 77u8, 87u8, 0u8, 84u8,
+ 90u8, 0u8, 85u8, 71u8, 0u8, 71u8, 72u8, 0u8, 70u8, 82u8, 0u8, 67u8, 65u8,
+ 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 69u8, 84u8, 0u8, 73u8, 78u8, 0u8,
+ 71u8, 69u8, 0u8, 85u8, 83u8, 0u8, 77u8, 78u8, 0u8, 49u8, 52u8, 51u8, 73u8,
+ 78u8, 0u8, 80u8, 72u8, 0u8, 73u8, 82u8, 0u8, 80u8, 72u8, 0u8, 65u8, 87u8,
+ 0u8, 80u8, 87u8, 0u8, 70u8, 82u8, 0u8, 78u8, 71u8, 0u8, 85u8, 83u8, 0u8,
+ 67u8, 65u8, 0u8, 73u8, 82u8, 0u8, 68u8, 69u8, 0u8, 76u8, 66u8, 0u8, 83u8,
+ 66u8, 0u8, 73u8, 78u8, 0u8, 75u8, 69u8, 0u8, 80u8, 76u8, 0u8, 73u8, 84u8,
+ 0u8, 71u8, 82u8, 0u8, 70u8, 77u8, 0u8, 73u8, 78u8, 0u8, 67u8, 65u8, 0u8,
+ 80u8, 75u8, 0u8, 73u8, 82u8, 0u8, 48u8, 48u8, 49u8, 65u8, 70u8, 0u8, 66u8,
+ 82u8, 0u8, 71u8, 65u8, 0u8, 80u8, 69u8, 0u8, 71u8, 84u8, 0u8, 69u8, 67u8,
+ 0u8, 73u8, 78u8, 0u8, 82u8, 69u8, 0u8, 73u8, 68u8, 0u8, 73u8, 84u8, 0u8,
+ 77u8, 77u8, 0u8, 73u8, 78u8, 0u8, 77u8, 65u8, 0u8, 78u8, 80u8, 0u8, 66u8,
+ 68u8, 0u8, 67u8, 72u8, 0u8, 70u8, 73u8, 0u8, 67u8, 72u8, 0u8, 73u8, 82u8,
+ 0u8, 83u8, 69u8, 0u8, 66u8, 73u8, 0u8, 77u8, 90u8, 0u8, 82u8, 79u8, 0u8,
+ 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 70u8, 74u8, 0u8, 82u8, 85u8, 0u8, 85u8,
+ 65u8, 0u8, 83u8, 66u8, 0u8, 82u8, 87u8, 0u8, 84u8, 90u8, 0u8, 74u8, 80u8,
+ 0u8, 73u8, 78u8, 0u8, 71u8, 72u8, 0u8, 82u8, 85u8, 0u8, 75u8, 69u8, 0u8,
+ 73u8, 68u8, 0u8, 73u8, 78u8, 0u8, 83u8, 78u8, 0u8, 73u8, 78u8, 0u8, 84u8,
+ 90u8, 0u8, 73u8, 84u8, 0u8, 73u8, 78u8, 0u8, 73u8, 84u8, 0u8, 71u8, 66u8,
+ 0u8, 80u8, 75u8, 0u8, 73u8, 84u8, 0u8, 73u8, 82u8, 0u8, 78u8, 79u8, 0u8,
+ 67u8, 73u8, 0u8, 77u8, 90u8, 0u8, 77u8, 88u8, 0u8, 77u8, 76u8, 0u8, 67u8,
+ 70u8, 0u8, 73u8, 69u8, 0u8, 76u8, 84u8, 0u8, 77u8, 65u8, 0u8, 77u8, 77u8,
+ 0u8, 76u8, 75u8, 0u8, 69u8, 84u8, 0u8, 83u8, 75u8, 0u8, 80u8, 75u8, 0u8,
+ 83u8, 73u8, 0u8, 80u8, 76u8, 0u8, 73u8, 68u8, 0u8, 87u8, 83u8, 0u8, 83u8,
+ 69u8, 0u8, 65u8, 79u8, 0u8, 83u8, 69u8, 0u8, 70u8, 73u8, 0u8, 73u8, 76u8,
+ 0u8, 70u8, 73u8, 0u8, 90u8, 87u8, 0u8, 77u8, 89u8, 0u8, 77u8, 76u8, 0u8,
+ 83u8, 79u8, 0u8, 85u8, 90u8, 0u8, 84u8, 72u8, 0u8, 65u8, 76u8, 0u8, 82u8,
+ 83u8, 0u8, 73u8, 78u8, 0u8, 83u8, 82u8, 0u8, 83u8, 78u8, 0u8, 73u8, 78u8,
+ 0u8, 90u8, 65u8, 0u8, 69u8, 82u8, 0u8, 90u8, 65u8, 0u8, 68u8, 69u8, 0u8,
+ 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 71u8, 78u8, 0u8, 83u8, 69u8, 0u8, 84u8,
+ 90u8, 0u8, 89u8, 84u8, 0u8, 67u8, 68u8, 0u8, 68u8, 69u8, 0u8, 73u8, 78u8,
+ 0u8, 73u8, 68u8, 0u8, 66u8, 68u8, 0u8, 73u8, 81u8, 0u8, 80u8, 76u8, 0u8,
+ 73u8, 78u8, 0u8, 78u8, 80u8, 0u8, 80u8, 72u8, 0u8, 73u8, 78u8, 0u8, 67u8,
+ 78u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 77u8, 89u8, 0u8, 73u8, 78u8,
+ 0u8, 83u8, 76u8, 0u8, 85u8, 71u8, 0u8, 84u8, 76u8, 0u8, 84u8, 74u8, 0u8,
+ 84u8, 72u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 69u8,
+ 84u8, 0u8, 69u8, 82u8, 0u8, 78u8, 71u8, 0u8, 84u8, 77u8, 0u8, 84u8, 75u8,
+ 0u8, 65u8, 90u8, 0u8, 78u8, 80u8, 0u8, 80u8, 72u8, 0u8, 65u8, 90u8, 0u8,
+ 78u8, 69u8, 0u8, 90u8, 65u8, 0u8, 84u8, 79u8, 0u8, 77u8, 87u8, 0u8, 48u8,
+ 48u8, 49u8, 80u8, 71u8, 0u8, 84u8, 82u8, 0u8, 84u8, 82u8, 0u8, 84u8, 87u8,
+ 0u8, 80u8, 75u8, 0u8, 90u8, 65u8, 0u8, 71u8, 82u8, 0u8, 78u8, 80u8, 0u8,
+ 80u8, 72u8, 0u8, 66u8, 84u8, 0u8, 82u8, 85u8, 0u8, 85u8, 71u8, 0u8, 84u8,
+ 72u8, 0u8, 65u8, 90u8, 0u8, 77u8, 87u8, 0u8, 84u8, 86u8, 0u8, 78u8, 69u8,
+ 0u8, 67u8, 78u8, 0u8, 73u8, 78u8, 0u8, 80u8, 70u8, 0u8, 82u8, 85u8, 0u8,
+ 77u8, 65u8, 0u8, 82u8, 85u8, 0u8, 82u8, 85u8, 0u8, 67u8, 78u8, 0u8, 83u8,
+ 89u8, 0u8, 85u8, 65u8, 0u8, 70u8, 77u8, 0u8, 65u8, 79u8, 0u8, 73u8, 78u8,
+ 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 85u8, 90u8, 0u8, 76u8, 82u8, 0u8,
+ 90u8, 65u8, 0u8, 73u8, 84u8, 0u8, 82u8, 85u8, 0u8, 86u8, 78u8, 0u8, 83u8,
+ 88u8, 0u8, 66u8, 69u8, 0u8, 68u8, 69u8, 0u8, 77u8, 90u8, 0u8, 48u8, 48u8,
+ 49u8, 82u8, 85u8, 0u8, 69u8, 69u8, 0u8, 84u8, 90u8, 0u8, 66u8, 69u8, 0u8,
+ 67u8, 72u8, 0u8, 69u8, 84u8, 0u8, 80u8, 72u8, 0u8, 65u8, 85u8, 0u8, 73u8,
+ 78u8, 0u8, 73u8, 78u8, 0u8, 87u8, 70u8, 0u8, 75u8, 77u8, 0u8, 83u8, 78u8,
+ 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, 66u8, 82u8, 0u8,
+ 85u8, 90u8, 0u8, 84u8, 82u8, 0u8, 90u8, 65u8, 0u8, 84u8, 82u8, 0u8, 84u8,
+ 82u8, 0u8, 71u8, 69u8, 0u8, 67u8, 78u8, 0u8, 83u8, 68u8, 0u8, 83u8, 65u8,
+ 0u8, 73u8, 78u8, 0u8, 85u8, 71u8, 0u8, 73u8, 82u8, 0u8, 89u8, 69u8, 0u8,
+ 78u8, 80u8, 0u8, 77u8, 90u8, 0u8, 70u8, 77u8, 0u8, 67u8, 77u8, 0u8, 67u8,
+ 77u8, 0u8, 48u8, 48u8, 49u8, 78u8, 71u8, 0u8, 66u8, 82u8, 0u8, 77u8, 88u8,
+ 0u8, 72u8, 75u8, 0u8, 67u8, 78u8, 0u8, 83u8, 68u8, 0u8, 75u8, 77u8, 0u8,
+ 78u8, 76u8, 0u8, 77u8, 65u8, 0u8, 67u8, 78u8, 0u8, 67u8, 78u8, 0u8, 67u8,
+ 78u8, 0u8, 84u8, 71u8, 0u8, 77u8, 89u8, 0u8, 90u8, 65u8, 0u8, 84u8, 82u8,
+ 0u8,
+ ])
+ },
+ )
+ },
+ ls2r: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap2d::from_parts_unchecked(
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 97u8, 114u8, 99u8, 97u8, 122u8, 0u8, 99u8, 117u8, 0u8, 101u8, 110u8, 0u8,
+ 102u8, 102u8, 0u8, 103u8, 114u8, 99u8, 107u8, 107u8, 0u8, 107u8, 117u8,
+ 0u8, 107u8, 121u8, 0u8, 108u8, 105u8, 102u8, 109u8, 97u8, 110u8, 109u8,
+ 110u8, 0u8, 112u8, 97u8, 0u8, 112u8, 97u8, 108u8, 115u8, 100u8, 0u8, 116u8,
+ 103u8, 0u8, 117u8, 103u8, 0u8, 117u8, 110u8, 114u8, 117u8, 122u8, 0u8,
+ 121u8, 117u8, 101u8, 122u8, 104u8, 0u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 2u8, 0u8, 0u8, 0u8, 3u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 5u8, 0u8, 0u8,
+ 0u8, 6u8, 0u8, 0u8, 0u8, 7u8, 0u8, 0u8, 0u8, 8u8, 0u8, 0u8, 0u8, 10u8, 0u8,
+ 0u8, 0u8, 12u8, 0u8, 0u8, 0u8, 13u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8,
+ 15u8, 0u8, 0u8, 0u8, 16u8, 0u8, 0u8, 0u8, 17u8, 0u8, 0u8, 0u8, 20u8, 0u8,
+ 0u8, 0u8, 21u8, 0u8, 0u8, 0u8, 22u8, 0u8, 0u8, 0u8, 23u8, 0u8, 0u8, 0u8,
+ 24u8, 0u8, 0u8, 0u8, 25u8, 0u8, 0u8, 0u8, 28u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 78u8, 98u8, 97u8, 116u8, 80u8, 97u8, 108u8, 109u8, 65u8, 114u8, 97u8, 98u8,
+ 71u8, 108u8, 97u8, 103u8, 83u8, 104u8, 97u8, 119u8, 65u8, 100u8, 108u8,
+ 109u8, 76u8, 105u8, 110u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8,
+ 97u8, 98u8, 89u8, 101u8, 122u8, 105u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8,
+ 116u8, 110u8, 76u8, 105u8, 109u8, 98u8, 78u8, 107u8, 111u8, 111u8, 77u8,
+ 111u8, 110u8, 103u8, 65u8, 114u8, 97u8, 98u8, 80u8, 104u8, 108u8, 112u8,
+ 68u8, 101u8, 118u8, 97u8, 75u8, 104u8, 111u8, 106u8, 83u8, 105u8, 110u8,
+ 100u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8,
+ 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, 115u8, 66u8,
+ 111u8, 112u8, 111u8, 72u8, 97u8, 110u8, 98u8, 72u8, 97u8, 110u8, 116u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 74u8, 79u8, 0u8, 83u8, 89u8, 0u8, 73u8, 82u8, 0u8, 66u8, 71u8, 0u8, 71u8,
+ 66u8, 0u8, 71u8, 78u8, 0u8, 71u8, 82u8, 0u8, 67u8, 78u8, 0u8, 73u8, 81u8,
+ 0u8, 71u8, 69u8, 0u8, 67u8, 78u8, 0u8, 84u8, 82u8, 0u8, 73u8, 78u8, 0u8,
+ 71u8, 78u8, 0u8, 67u8, 78u8, 0u8, 80u8, 75u8, 0u8, 67u8, 78u8, 0u8, 73u8,
+ 78u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 75u8, 90u8,
+ 0u8, 78u8, 80u8, 0u8, 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 84u8, 87u8, 0u8,
+ 84u8, 87u8, 0u8, 84u8, 87u8, 0u8,
+ ])
+ },
+ )
+ },
+ };
diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs b/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs
new file mode 100644
index 000000000..a485a5af6
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/fallback/mod.rs
@@ -0,0 +1,4 @@
+// @generated
+pub mod likelysubtags_v1;
+pub mod parents_v1;
+pub mod supplement;
diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs
new file mode 100644
index 000000000..f07b4b806
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs
@@ -0,0 +1,207 @@
+// @generated
+type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackParentsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ;
+pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> =
+ litemap::LiteMap::from_sorted_store_unchecked(&[("und", UND)]);
+static UND: &DataStruct = &::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1 {
+ parents: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap::from_parts_unchecked(
+ unsafe {
+ ::zerovec::VarZeroVec::from_bytes_unchecked(&[
+ 131u8, 0u8, 0u8, 0u8, 0u8, 0u8, 6u8, 0u8, 11u8, 0u8, 16u8, 0u8, 21u8, 0u8,
+ 26u8, 0u8, 31u8, 0u8, 36u8, 0u8, 41u8, 0u8, 46u8, 0u8, 51u8, 0u8, 56u8, 0u8,
+ 61u8, 0u8, 66u8, 0u8, 71u8, 0u8, 76u8, 0u8, 81u8, 0u8, 86u8, 0u8, 91u8, 0u8,
+ 96u8, 0u8, 101u8, 0u8, 106u8, 0u8, 111u8, 0u8, 116u8, 0u8, 121u8, 0u8, 126u8,
+ 0u8, 131u8, 0u8, 136u8, 0u8, 141u8, 0u8, 146u8, 0u8, 151u8, 0u8, 156u8, 0u8,
+ 161u8, 0u8, 166u8, 0u8, 171u8, 0u8, 176u8, 0u8, 181u8, 0u8, 186u8, 0u8, 191u8,
+ 0u8, 196u8, 0u8, 201u8, 0u8, 206u8, 0u8, 211u8, 0u8, 216u8, 0u8, 221u8, 0u8,
+ 226u8, 0u8, 231u8, 0u8, 236u8, 0u8, 241u8, 0u8, 246u8, 0u8, 251u8, 0u8, 0u8,
+ 1u8, 5u8, 1u8, 10u8, 1u8, 15u8, 1u8, 20u8, 1u8, 25u8, 1u8, 30u8, 1u8, 35u8,
+ 1u8, 40u8, 1u8, 45u8, 1u8, 50u8, 1u8, 55u8, 1u8, 60u8, 1u8, 65u8, 1u8, 70u8,
+ 1u8, 75u8, 1u8, 80u8, 1u8, 85u8, 1u8, 90u8, 1u8, 95u8, 1u8, 100u8, 1u8, 105u8,
+ 1u8, 110u8, 1u8, 115u8, 1u8, 120u8, 1u8, 125u8, 1u8, 130u8, 1u8, 135u8, 1u8,
+ 140u8, 1u8, 145u8, 1u8, 150u8, 1u8, 155u8, 1u8, 160u8, 1u8, 165u8, 1u8, 170u8,
+ 1u8, 175u8, 1u8, 180u8, 1u8, 185u8, 1u8, 190u8, 1u8, 195u8, 1u8, 200u8, 1u8,
+ 205u8, 1u8, 210u8, 1u8, 215u8, 1u8, 220u8, 1u8, 225u8, 1u8, 230u8, 1u8, 235u8,
+ 1u8, 240u8, 1u8, 245u8, 1u8, 250u8, 1u8, 255u8, 1u8, 4u8, 2u8, 9u8, 2u8, 14u8,
+ 2u8, 19u8, 2u8, 24u8, 2u8, 29u8, 2u8, 34u8, 2u8, 39u8, 2u8, 44u8, 2u8, 49u8,
+ 2u8, 54u8, 2u8, 59u8, 2u8, 64u8, 2u8, 71u8, 2u8, 73u8, 2u8, 75u8, 2u8, 77u8,
+ 2u8, 82u8, 2u8, 87u8, 2u8, 92u8, 2u8, 97u8, 2u8, 102u8, 2u8, 107u8, 2u8, 112u8,
+ 2u8, 117u8, 2u8, 122u8, 2u8, 127u8, 2u8, 132u8, 2u8, 101u8, 110u8, 45u8, 49u8,
+ 53u8, 48u8, 101u8, 110u8, 45u8, 65u8, 71u8, 101u8, 110u8, 45u8, 65u8, 73u8,
+ 101u8, 110u8, 45u8, 65u8, 84u8, 101u8, 110u8, 45u8, 65u8, 85u8, 101u8, 110u8,
+ 45u8, 66u8, 66u8, 101u8, 110u8, 45u8, 66u8, 69u8, 101u8, 110u8, 45u8, 66u8,
+ 77u8, 101u8, 110u8, 45u8, 66u8, 83u8, 101u8, 110u8, 45u8, 66u8, 87u8, 101u8,
+ 110u8, 45u8, 66u8, 90u8, 101u8, 110u8, 45u8, 67u8, 67u8, 101u8, 110u8, 45u8,
+ 67u8, 72u8, 101u8, 110u8, 45u8, 67u8, 75u8, 101u8, 110u8, 45u8, 67u8, 77u8,
+ 101u8, 110u8, 45u8, 67u8, 88u8, 101u8, 110u8, 45u8, 67u8, 89u8, 101u8, 110u8,
+ 45u8, 68u8, 69u8, 101u8, 110u8, 45u8, 68u8, 71u8, 101u8, 110u8, 45u8, 68u8,
+ 75u8, 101u8, 110u8, 45u8, 68u8, 77u8, 101u8, 110u8, 45u8, 69u8, 82u8, 101u8,
+ 110u8, 45u8, 70u8, 73u8, 101u8, 110u8, 45u8, 70u8, 74u8, 101u8, 110u8, 45u8,
+ 70u8, 75u8, 101u8, 110u8, 45u8, 70u8, 77u8, 101u8, 110u8, 45u8, 71u8, 66u8,
+ 101u8, 110u8, 45u8, 71u8, 68u8, 101u8, 110u8, 45u8, 71u8, 71u8, 101u8, 110u8,
+ 45u8, 71u8, 72u8, 101u8, 110u8, 45u8, 71u8, 73u8, 101u8, 110u8, 45u8, 71u8,
+ 77u8, 101u8, 110u8, 45u8, 71u8, 89u8, 101u8, 110u8, 45u8, 72u8, 75u8, 101u8,
+ 110u8, 45u8, 73u8, 69u8, 101u8, 110u8, 45u8, 73u8, 76u8, 101u8, 110u8, 45u8,
+ 73u8, 77u8, 101u8, 110u8, 45u8, 73u8, 78u8, 101u8, 110u8, 45u8, 73u8, 79u8,
+ 101u8, 110u8, 45u8, 74u8, 69u8, 101u8, 110u8, 45u8, 74u8, 77u8, 101u8, 110u8,
+ 45u8, 75u8, 69u8, 101u8, 110u8, 45u8, 75u8, 73u8, 101u8, 110u8, 45u8, 75u8,
+ 78u8, 101u8, 110u8, 45u8, 75u8, 89u8, 101u8, 110u8, 45u8, 76u8, 67u8, 101u8,
+ 110u8, 45u8, 76u8, 82u8, 101u8, 110u8, 45u8, 76u8, 83u8, 101u8, 110u8, 45u8,
+ 77u8, 71u8, 101u8, 110u8, 45u8, 77u8, 79u8, 101u8, 110u8, 45u8, 77u8, 83u8,
+ 101u8, 110u8, 45u8, 77u8, 84u8, 101u8, 110u8, 45u8, 77u8, 85u8, 101u8, 110u8,
+ 45u8, 77u8, 86u8, 101u8, 110u8, 45u8, 77u8, 87u8, 101u8, 110u8, 45u8, 77u8,
+ 89u8, 101u8, 110u8, 45u8, 78u8, 65u8, 101u8, 110u8, 45u8, 78u8, 70u8, 101u8,
+ 110u8, 45u8, 78u8, 71u8, 101u8, 110u8, 45u8, 78u8, 76u8, 101u8, 110u8, 45u8,
+ 78u8, 82u8, 101u8, 110u8, 45u8, 78u8, 85u8, 101u8, 110u8, 45u8, 78u8, 90u8,
+ 101u8, 110u8, 45u8, 80u8, 71u8, 101u8, 110u8, 45u8, 80u8, 75u8, 101u8, 110u8,
+ 45u8, 80u8, 78u8, 101u8, 110u8, 45u8, 80u8, 87u8, 101u8, 110u8, 45u8, 82u8,
+ 87u8, 101u8, 110u8, 45u8, 83u8, 66u8, 101u8, 110u8, 45u8, 83u8, 67u8, 101u8,
+ 110u8, 45u8, 83u8, 68u8, 101u8, 110u8, 45u8, 83u8, 69u8, 101u8, 110u8, 45u8,
+ 83u8, 71u8, 101u8, 110u8, 45u8, 83u8, 72u8, 101u8, 110u8, 45u8, 83u8, 73u8,
+ 101u8, 110u8, 45u8, 83u8, 76u8, 101u8, 110u8, 45u8, 83u8, 83u8, 101u8, 110u8,
+ 45u8, 83u8, 88u8, 101u8, 110u8, 45u8, 83u8, 90u8, 101u8, 110u8, 45u8, 84u8,
+ 67u8, 101u8, 110u8, 45u8, 84u8, 75u8, 101u8, 110u8, 45u8, 84u8, 79u8, 101u8,
+ 110u8, 45u8, 84u8, 84u8, 101u8, 110u8, 45u8, 84u8, 86u8, 101u8, 110u8, 45u8,
+ 84u8, 90u8, 101u8, 110u8, 45u8, 85u8, 71u8, 101u8, 110u8, 45u8, 86u8, 67u8,
+ 101u8, 110u8, 45u8, 86u8, 71u8, 101u8, 110u8, 45u8, 86u8, 85u8, 101u8, 110u8,
+ 45u8, 87u8, 83u8, 101u8, 110u8, 45u8, 90u8, 65u8, 101u8, 110u8, 45u8, 90u8,
+ 77u8, 101u8, 110u8, 45u8, 90u8, 87u8, 101u8, 115u8, 45u8, 65u8, 82u8, 101u8,
+ 115u8, 45u8, 66u8, 79u8, 101u8, 115u8, 45u8, 66u8, 82u8, 101u8, 115u8, 45u8,
+ 66u8, 90u8, 101u8, 115u8, 45u8, 67u8, 76u8, 101u8, 115u8, 45u8, 67u8, 79u8,
+ 101u8, 115u8, 45u8, 67u8, 82u8, 101u8, 115u8, 45u8, 67u8, 85u8, 101u8, 115u8,
+ 45u8, 68u8, 79u8, 101u8, 115u8, 45u8, 69u8, 67u8, 101u8, 115u8, 45u8, 71u8,
+ 84u8, 101u8, 115u8, 45u8, 72u8, 78u8, 101u8, 115u8, 45u8, 77u8, 88u8, 101u8,
+ 115u8, 45u8, 78u8, 73u8, 101u8, 115u8, 45u8, 80u8, 65u8, 101u8, 115u8, 45u8,
+ 80u8, 69u8, 101u8, 115u8, 45u8, 80u8, 82u8, 101u8, 115u8, 45u8, 80u8, 89u8,
+ 101u8, 115u8, 45u8, 83u8, 86u8, 101u8, 115u8, 45u8, 85u8, 83u8, 101u8, 115u8,
+ 45u8, 85u8, 89u8, 101u8, 115u8, 45u8, 86u8, 69u8, 104u8, 105u8, 45u8, 76u8,
+ 97u8, 116u8, 110u8, 104u8, 116u8, 110u8, 98u8, 110u8, 110u8, 112u8, 116u8,
+ 45u8, 65u8, 79u8, 112u8, 116u8, 45u8, 67u8, 72u8, 112u8, 116u8, 45u8, 67u8,
+ 86u8, 112u8, 116u8, 45u8, 70u8, 82u8, 112u8, 116u8, 45u8, 71u8, 81u8, 112u8,
+ 116u8, 45u8, 71u8, 87u8, 112u8, 116u8, 45u8, 76u8, 85u8, 112u8, 116u8, 45u8,
+ 77u8, 79u8, 112u8, 116u8, 45u8, 77u8, 90u8, 112u8, 116u8, 45u8, 83u8, 84u8,
+ 112u8, 116u8, 45u8, 84u8, 76u8, 122u8, 104u8, 45u8, 72u8, 97u8, 110u8, 116u8,
+ 45u8, 77u8, 79u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8,
+ 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8,
+ 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8,
+ 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 115u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8,
+ 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8,
+ 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8,
+ 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8,
+ 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8,
+ 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8,
+ 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8,
+ 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 110u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 73u8, 78u8, 0u8, 102u8, 114u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 1u8, 72u8, 84u8, 0u8, 110u8, 111u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 110u8, 111u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8,
+ 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8,
+ 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 122u8, 104u8, 0u8, 1u8, 72u8, 97u8, 110u8,
+ 116u8, 1u8, 72u8, 75u8, 0u8,
+ ])
+ },
+ )
+ },
+};
diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs
new file mode 100644
index 000000000..7df33c12e
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs
@@ -0,0 +1,41 @@
+// @generated
+type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: CollationFallbackSupplementV1Marker as :: icu_provider :: DataMarker > :: Yokeable ;
+pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> =
+ litemap::LiteMap::from_sorted_store_unchecked(&[("und", UND)]);
+static UND: &DataStruct =
+ &::icu_provider_adapters::fallback::provider::LocaleFallbackSupplementV1 {
+ parents: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap::from_parts_unchecked(
+ unsafe {
+ ::zerovec::VarZeroVec::from_bytes_unchecked(&[
+ 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 121u8, 117u8, 101u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::ZeroVec::from_bytes_unchecked(&[
+ 122u8, 104u8, 0u8, 1u8, 72u8, 97u8, 110u8, 116u8, 0u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ )
+ },
+ unicode_extension_defaults: unsafe {
+ #[allow(unused_unsafe)]
+ ::zerovec::ZeroMap2d::from_parts_unchecked(
+ unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(&[99u8, 111u8]) },
+ unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(&[2u8, 0u8, 0u8, 0u8]) },
+ unsafe {
+ ::zerovec::VarZeroVec::from_bytes_unchecked(&[
+ 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 122u8, 104u8, 122u8, 104u8, 45u8,
+ 72u8, 97u8, 110u8, 116u8,
+ ])
+ },
+ unsafe {
+ ::zerovec::VarZeroVec::from_bytes_unchecked(&[
+ 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 6u8, 0u8, 112u8, 105u8, 110u8, 121u8, 105u8,
+ 110u8, 115u8, 116u8, 114u8, 111u8, 107u8, 101u8,
+ ])
+ },
+ )
+ },
+ };
diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs
new file mode 100644
index 000000000..62957134f
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/mod.rs
@@ -0,0 +1,2 @@
+// @generated
+pub mod co_v1;
diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1.rs b/compiler/rustc_baked_icu_data/src/data/list/and_v1.rs
new file mode 100644
index 000000000..9cae549e1
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1.rs
@@ -0,0 +1,1161 @@
+// @generated
+type DataStruct = <::icu_list::provider::AndListV1Marker as ::icu_provider::DataMarker>::Yokeable;
+pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> =
+ litemap::LiteMap::from_sorted_store_unchecked(&[
+ ("en", EN),
+ ("es", ES),
+ ("fr", FR),
+ ("it", IT),
+ ("ja", JA),
+ ("pt", PT),
+ ("ru", RU),
+ ("tr", TR),
+ ("und", UND),
+ ("zh", ZH_ZH_HANS),
+ ("zh-Hans", ZH_ZH_HANS),
+ ("zh-Hant", ZH_HANT),
+ ]);
+static EN: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", and ", 6u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" and ", 5u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", & ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" & ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+]);
+static ES: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8)
+ },
+ special_case: Some(::icu_list::provider::SpecialCasePattern {
+ condition: unsafe {
+ ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[
+ 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8,
+ 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8,
+ 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8,
+ 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8,
+ 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8,
+ 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8,
+ 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8,
+ 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8,
+ 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8,
+ 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8,
+ 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8,
+ 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8,
+ 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8,
+ 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8,
+ 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8,
+ 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8,
+ 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8,
+ 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8,
+ 0u8, 35u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ pattern: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ }),
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8)
+ },
+ special_case: Some(::icu_list::provider::SpecialCasePattern {
+ condition: unsafe {
+ ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[
+ 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8,
+ 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8,
+ 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8,
+ 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8,
+ 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8,
+ 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8,
+ 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8,
+ 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8,
+ 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8,
+ 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8,
+ 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8,
+ 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8,
+ 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8,
+ 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8,
+ 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8,
+ 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8,
+ 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8,
+ 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8,
+ 0u8, 35u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ pattern: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ }),
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8)
+ },
+ special_case: Some(::icu_list::provider::SpecialCasePattern {
+ condition: unsafe {
+ ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[
+ 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8,
+ 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8,
+ 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8,
+ 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8,
+ 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8,
+ 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8,
+ 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8,
+ 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8,
+ 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8,
+ 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8,
+ 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8,
+ 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8,
+ 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8,
+ 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8,
+ 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8,
+ 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8,
+ 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8,
+ 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8,
+ 0u8, 35u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ pattern: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ }),
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8)
+ },
+ special_case: Some(::icu_list::provider::SpecialCasePattern {
+ condition: unsafe {
+ ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[
+ 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8,
+ 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8,
+ 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8,
+ 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8,
+ 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8,
+ 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8,
+ 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8,
+ 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8,
+ 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8,
+ 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8,
+ 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8,
+ 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8,
+ 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8,
+ 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8,
+ 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8,
+ 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8,
+ 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8,
+ 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8,
+ 0u8, 35u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ pattern: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ }),
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8)
+ },
+ special_case: Some(::icu_list::provider::SpecialCasePattern {
+ condition: unsafe {
+ ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[
+ 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8,
+ 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8,
+ 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8,
+ 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8,
+ 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8,
+ 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8,
+ 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8,
+ 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8,
+ 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8,
+ 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8,
+ 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8,
+ 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8,
+ 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8,
+ 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8,
+ 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8,
+ 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8,
+ 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8,
+ 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8,
+ 0u8, 35u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ pattern: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ }),
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8)
+ },
+ special_case: Some(::icu_list::provider::SpecialCasePattern {
+ condition: unsafe {
+ ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[
+ 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8,
+ 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8,
+ 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8,
+ 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8,
+ 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8,
+ 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8,
+ 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8,
+ 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8,
+ 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8,
+ 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8,
+ 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8,
+ 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8,
+ 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8,
+ 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8,
+ 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8,
+ 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8,
+ 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8,
+ 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8,
+ 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8,
+ 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8,
+ 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8,
+ 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8,
+ 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8,
+ 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8,
+ 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8,
+ 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8,
+ 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8,
+ 0u8, 35u8, 0u8, 0u8, 0u8,
+ ])
+ },
+ pattern: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ }),
+ },
+]);
+static FR: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+]);
+static IT: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+]);
+static JA: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+]);
+static PT: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+]);
+static RU: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+]);
+static TR: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+]);
+static UND: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8)
+ },
+ special_case: None,
+ },
+]);
+static ZH_HANT: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+]);
+static ZH_ZH_HANS: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+ ::icu_list::provider::ConditionalListJoinerPattern {
+ default: unsafe {
+ ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8)
+ },
+ special_case: None,
+ },
+]);
diff --git a/compiler/rustc_baked_icu_data/src/data/list/mod.rs b/compiler/rustc_baked_icu_data/src/data/list/mod.rs
new file mode 100644
index 000000000..931822513
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/list/mod.rs
@@ -0,0 +1,2 @@
+// @generated
+pub mod and_v1;
diff --git a/compiler/rustc_baked_icu_data/src/data/mod.rs b/compiler/rustc_baked_icu_data/src/data/mod.rs
new file mode 100644
index 000000000..a6a71c79c
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/mod.rs
@@ -0,0 +1,90 @@
+// @generated
+mod fallback;
+mod list;
+/// This data provider was programmatically generated by [`icu_datagen`](
+/// https://unicode-org.github.io/icu4x-docs/doc/icu_datagen/enum.Out.html#variant.Module).
+#[non_exhaustive]
+pub struct BakedDataProvider;
+use ::icu_provider::prelude::*;
+impl DataProvider<::icu_list::provider::AndListV1Marker> for BakedDataProvider {
+ fn load(
+ &self,
+ req: DataRequest,
+ ) -> Result<DataResponse<::icu_list::provider::AndListV1Marker>, DataError> {
+ Ok(DataResponse {
+ metadata: Default::default(),
+ payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from(
+ *list::and_v1::DATA
+ .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse())
+ .ok_or_else(|| {
+ DataErrorKind::MissingLocale
+ .with_req(::icu_list::provider::AndListV1Marker::KEY, req)
+ })?,
+ ))),
+ })
+ }
+}
+impl DataProvider<::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker>
+ for BakedDataProvider
+{
+ fn load(
+ &self,
+ req: DataRequest,
+ ) -> Result<
+ DataResponse<
+ ::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker,
+ >,
+ DataError,
+ > {
+ Ok(DataResponse {
+ metadata: Default::default(),
+ payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from(
+ *fallback::supplement::co_v1::DATA.get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).ok_or_else(|| {
+ DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY, req)
+ })?,
+ ))),
+ })
+ }
+}
+impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker>
+ for BakedDataProvider
+{
+ fn load(
+ &self,
+ req: DataRequest,
+ ) -> Result<
+ DataResponse<
+ ::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker,
+ >,
+ DataError,
+ > {
+ Ok(DataResponse {
+ metadata: Default::default(),
+ payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from(
+ *fallback::likelysubtags_v1::DATA.get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).ok_or_else(|| {
+ DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY, req)
+ })?,
+ ))),
+ })
+ }
+}
+impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker>
+ for BakedDataProvider
+{
+ fn load(
+ &self,
+ req: DataRequest,
+ ) -> Result<
+ DataResponse<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker>,
+ DataError,
+ > {
+ Ok(DataResponse {
+ metadata: Default::default(),
+ payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from(
+ *fallback::parents_v1::DATA.get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).ok_or_else(|| {
+ DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY, req)
+ })?,
+ ))),
+ })
+ }
+}
diff --git a/compiler/rustc_baked_icu_data/src/lib.rs b/compiler/rustc_baked_icu_data/src/lib.rs
new file mode 100644
index 000000000..4651e03f7
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/lib.rs
@@ -0,0 +1,46 @@
+//! This crate contains pre-baked ICU4X data, generated by `icu4x-datagen`. The tool
+//! fetches locale data from CLDR and transforms them into const code in statics that
+//! ICU4X can load, via databake. `lib.rs` in this crate is manually written, but all
+//! other code is generated.
+//!
+//! This crate can be regenerated when there's a new CLDR version, though that is unlikely
+//! to result in changes in most cases (currently this only covers list formatting data, which
+//! is rather stable). It may need to be regenerated when updating ICU4X versions, especially
+//! across major versions, in case it fails to compile after an update.
+//!
+//! It must be regenerated when adding new locales to Rust, or if Rust's usage of ICU4X
+//! grows to need more kinds of data.
+//!
+//! To regenerate the data, run this command:
+//!
+//! ```text
+//! icu4x-datagen -W --pretty --fingerprint --use-separate-crates \
+//! --format mod -l en es fr it ja pt ru tr zh zh-Hans zh-Hant \
+//! -k list/and@1 fallback/likelysubtags@1 fallback/parents@1 fallback/supplement/co@1 \
+//! --cldr-tag latest --icuexport-tag latest -o src/data
+//! ```
+#![allow(elided_lifetimes_in_paths)]
+
+mod data {
+ include!("data/mod.rs");
+ include!("data/any.rs");
+}
+
+pub use data::BakedDataProvider;
+
+pub const fn baked_data_provider() -> BakedDataProvider {
+ data::BakedDataProvider
+}
+
+pub mod supported_locales {
+ pub const EN: icu_locid::Locale = icu_locid::locale!("en");
+ pub const ES: icu_locid::Locale = icu_locid::locale!("es");
+ pub const FR: icu_locid::Locale = icu_locid::locale!("fr");
+ pub const IT: icu_locid::Locale = icu_locid::locale!("it");
+ pub const JA: icu_locid::Locale = icu_locid::locale!("ja");
+ pub const PT: icu_locid::Locale = icu_locid::locale!("pt");
+ pub const RU: icu_locid::Locale = icu_locid::locale!("ru");
+ pub const TR: icu_locid::Locale = icu_locid::locale!("tr");
+ pub const ZH_HANS: icu_locid::Locale = icu_locid::locale!("zh-Hans");
+ pub const ZH_HANT: icu_locid::Locale = icu_locid::locale!("zh-Hant");
+}
diff --git a/compiler/rustc_borrowck/src/borrow_set.rs b/compiler/rustc_borrowck/src/borrow_set.rs
index 41279588e..5bb92a358 100644
--- a/compiler/rustc_borrowck/src/borrow_set.rs
+++ b/compiler/rustc_borrowck/src/borrow_set.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use crate::nll::ToRegionVid;
use crate::path_utils::allow_two_phase_borrow;
use crate::place_ext::PlaceExt;
@@ -196,7 +198,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
rvalue: &mir::Rvalue<'tcx>,
location: mir::Location,
) {
- if let mir::Rvalue::Ref(region, kind, ref borrowed_place) = *rvalue {
+ if let &mir::Rvalue::Ref(region, kind, borrowed_place) = rvalue {
if borrowed_place.ignore_borrow(self.tcx, self.body, &self.locals_state_at_exit) {
debug!("ignoring_borrow of {:?}", borrowed_place);
return;
@@ -209,7 +211,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
region,
reserve_location: location,
activation_location: TwoPhaseActivation::NotTwoPhase,
- borrowed_place: *borrowed_place,
+ borrowed_place,
assigned_place: *assigned_place,
};
let (idx, _) = self.location_map.insert_full(location, borrow);
@@ -271,14 +273,14 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
}
fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: mir::Location) {
- if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
+ if let &mir::Rvalue::Ref(region, kind, place) = rvalue {
// double-check that we already registered a BorrowData for this
let borrow_data = &self.location_map[&location];
assert_eq!(borrow_data.reserve_location, location);
assert_eq!(borrow_data.kind, kind);
assert_eq!(borrow_data.region, region.to_region_vid());
- assert_eq!(borrow_data.borrowed_place, *place);
+ assert_eq!(borrow_data.borrowed_place, place);
}
self.super_rvalue(rvalue, location)
diff --git a/compiler/rustc_borrowck/src/borrowck_errors.rs b/compiler/rustc_borrowck/src/borrowck_errors.rs
index 08ea00d71..01be37912 100644
--- a/compiler/rustc_borrowck/src/borrowck_errors.rs
+++ b/compiler/rustc_borrowck/src/borrowck_errors.rs
@@ -8,9 +8,18 @@ impl<'cx, 'tcx> crate::MirBorrowckCtxt<'cx, 'tcx> {
pub(crate) fn cannot_move_when_borrowed(
&self,
span: Span,
- desc: &str,
+ borrow_span: Span,
+ place: &str,
+ borrow_place: &str,
+ value_place: &str,
) -> DiagnosticBuilder<'cx, ErrorGuaranteed> {
- struct_span_err!(self, span, E0505, "cannot move out of {} because it is borrowed", desc,)
+ self.infcx.tcx.sess.create_err(crate::session_diagnostics::MoveBorrow {
+ place,
+ span,
+ borrow_place,
+ value_place,
+ borrow_span,
+ })
}
pub(crate) fn cannot_use_when_mutably_borrowed(
diff --git a/compiler/rustc_borrowck/src/constraint_generation.rs b/compiler/rustc_borrowck/src/constraint_generation.rs
index f185e402f..11b31c3f1 100644
--- a/compiler/rustc_borrowck/src/constraint_generation.rs
+++ b/compiler/rustc_borrowck/src/constraint_generation.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_infer::infer::InferCtxt;
use rustc_middle::mir::visit::TyContext;
use rustc_middle::mir::visit::Visitor;
diff --git a/compiler/rustc_borrowck/src/constraints/graph.rs b/compiler/rustc_borrowck/src/constraints/graph.rs
index 609fbc2bc..385f15317 100644
--- a/compiler/rustc_borrowck/src/constraints/graph.rs
+++ b/compiler/rustc_borrowck/src/constraints/graph.rs
@@ -163,6 +163,7 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> {
span: DUMMY_SP,
category: ConstraintCategory::Internal,
variance_info: VarianceDiagInfo::default(),
+ from_closure: false,
})
} else {
None
diff --git a/compiler/rustc_borrowck/src/constraints/mod.rs b/compiler/rustc_borrowck/src/constraints/mod.rs
index df0412813..84a93e5f7 100644
--- a/compiler/rustc_borrowck/src/constraints/mod.rs
+++ b/compiler/rustc_borrowck/src/constraints/mod.rs
@@ -1,3 +1,6 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
+
use rustc_data_structures::graph::scc::Sccs;
use rustc_index::vec::IndexVec;
use rustc_middle::mir::ConstraintCategory;
@@ -96,6 +99,9 @@ pub struct OutlivesConstraint<'tcx> {
/// Variance diagnostic information
pub variance_info: VarianceDiagInfo<'tcx>,
+
+ /// If this constraint is promoted from closure requirements.
+ pub from_closure: bool,
}
impl<'tcx> fmt::Debug for OutlivesConstraint<'tcx> {
diff --git a/compiler/rustc_borrowck/src/consumers.rs b/compiler/rustc_borrowck/src/consumers.rs
index b162095f8..86da767f3 100644
--- a/compiler/rustc_borrowck/src/consumers.rs
+++ b/compiler/rustc_borrowck/src/consumers.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
//! This file provides API for compiler consumers.
use rustc_hir::def_id::LocalDefId;
diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs
index 9f7a4d499..f825b1d8f 100644
--- a/compiler/rustc_borrowck/src/dataflow.rs
+++ b/compiler/rustc_borrowck/src/dataflow.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_data_structures::fx::FxHashMap;
use rustc_index::bit_set::BitSet;
use rustc_middle::mir::{self, BasicBlock, Body, Location, Place};
@@ -356,9 +358,9 @@ impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
stmt: &mir::Statement<'tcx>,
location: Location,
) {
- match stmt.kind {
- mir::StatementKind::Assign(box (lhs, ref rhs)) => {
- if let mir::Rvalue::Ref(_, _, place) = *rhs {
+ match &stmt.kind {
+ mir::StatementKind::Assign(box (lhs, rhs)) => {
+ if let mir::Rvalue::Ref(_, _, place) = rhs {
if place.ignore_borrow(
self.tcx,
self.body,
@@ -375,13 +377,13 @@ impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
// Make sure there are no remaining borrows for variables
// that are assigned over.
- self.kill_borrows_on_place(trans, lhs);
+ self.kill_borrows_on_place(trans, *lhs);
}
mir::StatementKind::StorageDead(local) => {
// Make sure there are no remaining borrows for locals that
// are gone out of scope.
- self.kill_borrows_on_place(trans, Place::from(local));
+ self.kill_borrows_on_place(trans, Place::from(*local));
}
mir::StatementKind::FakeRead(..)
diff --git a/compiler/rustc_borrowck/src/def_use.rs b/compiler/rustc_borrowck/src/def_use.rs
index a5c0d7742..8e62a0198 100644
--- a/compiler/rustc_borrowck/src/def_use.rs
+++ b/compiler/rustc_borrowck/src/def_use.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_middle::mir::visit::{
MutatingUseContext, NonMutatingUseContext, NonUseContext, PlaceContext,
};
diff --git a/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs
index 02071ed6b..1550958ab 100644
--- a/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/bound_region_errors.rs
@@ -1,3 +1,6 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
+
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_infer::infer::canonical::Canonical;
use rustc_infer::infer::error_reporting::nice_region_error::NiceRegionError;
@@ -5,14 +8,14 @@ use rustc_infer::infer::region_constraints::Constraint;
use rustc_infer::infer::region_constraints::RegionConstraintData;
use rustc_infer::infer::RegionVariableOrigin;
use rustc_infer::infer::{InferCtxt, RegionResolutionError, SubregionOrigin, TyCtxtInferExt as _};
-use rustc_infer::traits::{Normalized, ObligationCause, TraitEngine, TraitEngineExt};
+use rustc_infer::traits::ObligationCause;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::RegionVid;
use rustc_middle::ty::UniverseIndex;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc_span::Span;
use rustc_trait_selection::traits::query::type_op;
-use rustc_trait_selection::traits::{SelectionContext, TraitEngineExt as _};
+use rustc_trait_selection::traits::ObligationCtxt;
use rustc_traits::{type_op_ascribe_user_type_with_span, type_op_prove_predicate_with_cause};
use std::fmt;
@@ -158,6 +161,7 @@ trait TypeOpInfo<'tcx> {
error_region: Option<ty::Region<'tcx>>,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>>;
+ #[instrument(level = "debug", skip(self, mbcx))]
fn report_error(
&self,
mbcx: &mut MirBorrowckCtxt<'_, 'tcx>,
@@ -167,6 +171,7 @@ trait TypeOpInfo<'tcx> {
) {
let tcx = mbcx.infcx.tcx;
let base_universe = self.base_universe();
+ debug!(?base_universe);
let Some(adjusted_universe) =
placeholder.universe.as_u32().checked_sub(base_universe.as_u32())
@@ -238,11 +243,11 @@ impl<'tcx> TypeOpInfo<'tcx> for PredicateQuery<'tcx> {
placeholder_region: ty::Region<'tcx>,
error_region: Option<ty::Region<'tcx>>,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
- let (ref infcx, key, _) =
+ let (infcx, key, _) =
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
- type_op_prove_predicate_with_cause(infcx, &mut *fulfill_cx, key, cause);
- try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
+ let ocx = ObligationCtxt::new(&infcx);
+ type_op_prove_predicate_with_cause(&ocx, key, cause);
+ try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}
}
@@ -279,24 +284,20 @@ where
placeholder_region: ty::Region<'tcx>,
error_region: Option<ty::Region<'tcx>>,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
- let (ref infcx, key, _) =
+ let (infcx, key, _) =
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
-
- let mut selcx = SelectionContext::new(infcx);
+ let ocx = ObligationCtxt::new(&infcx);
// FIXME(lqd): Unify and de-duplicate the following with the actual
// `rustc_traits::type_op::type_op_normalize` query to allow the span we need in the
// `ObligationCause`. The normalization results are currently different between
- // `AtExt::normalize` used in the query and `normalize` called below: the former fails
- // to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test. Check
- // after #85499 lands to see if its fixes have erased this difference.
+ // `QueryNormalizeExt::query_normalize` used in the query and `normalize` called below:
+ // the former fails to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test.
+ // Check after #85499 lands to see if its fixes have erased this difference.
let (param_env, value) = key.into_parts();
- let Normalized { value: _, obligations } =
- rustc_trait_selection::traits::normalize(&mut selcx, param_env, cause, value.value);
- fulfill_cx.register_predicate_obligations(infcx, obligations);
+ let _ = ocx.normalize(&cause, param_env, value.value);
- try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
+ try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}
}
@@ -327,11 +328,11 @@ impl<'tcx> TypeOpInfo<'tcx> for AscribeUserTypeQuery<'tcx> {
placeholder_region: ty::Region<'tcx>,
error_region: Option<ty::Region<'tcx>>,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
- let (ref infcx, key, _) =
+ let (infcx, key, _) =
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
- type_op_ascribe_user_type_with_span(infcx, &mut *fulfill_cx, key, Some(cause.span)).ok()?;
- try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
+ let ocx = ObligationCtxt::new(&infcx);
+ type_op_ascribe_user_type_with_span(&ocx, key, Some(cause.span)).ok()?;
+ try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}
}
@@ -372,28 +373,28 @@ impl<'tcx> TypeOpInfo<'tcx> for crate::type_check::InstantiateOpaqueType<'tcx> {
}
}
-#[instrument(skip(fulfill_cx, infcx), level = "debug")]
+#[instrument(skip(ocx), level = "debug")]
fn try_extract_error_from_fulfill_cx<'tcx>(
- mut fulfill_cx: Box<dyn TraitEngine<'tcx> + 'tcx>,
- infcx: &InferCtxt<'tcx>,
+ ocx: &ObligationCtxt<'_, 'tcx>,
placeholder_region: ty::Region<'tcx>,
error_region: Option<ty::Region<'tcx>>,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
// We generally shouldn't have errors here because the query was
// already run, but there's no point using `delay_span_bug`
// when we're going to emit an error here anyway.
- let _errors = fulfill_cx.select_all_or_error(infcx);
- let region_constraints = infcx.with_region_constraints(|r| r.clone());
+ let _errors = ocx.select_all_or_error();
+ let region_constraints = ocx.infcx.with_region_constraints(|r| r.clone());
try_extract_error_from_region_constraints(
- infcx,
+ ocx.infcx,
placeholder_region,
error_region,
&region_constraints,
- |vid| infcx.region_var_origin(vid),
- |vid| infcx.universe_of_region(infcx.tcx.mk_region(ty::ReVar(vid))),
+ |vid| ocx.infcx.region_var_origin(vid),
+ |vid| ocx.infcx.universe_of_region(ocx.infcx.tcx.mk_region(ty::ReVar(vid))),
)
}
+#[instrument(level = "debug", skip(infcx, region_var_origin, universe_of_region))]
fn try_extract_error_from_region_constraints<'tcx>(
infcx: &InferCtxt<'tcx>,
placeholder_region: ty::Region<'tcx>,
diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
index 583bc2e28..5e3745f17 100644
--- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
@@ -7,7 +7,7 @@ use rustc_errors::{
};
use rustc_hir as hir;
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
-use rustc_hir::{AsyncGeneratorKind, GeneratorKind};
+use rustc_hir::{AsyncGeneratorKind, GeneratorKind, LangItem};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::ObligationCause;
use rustc_middle::mir::tcx::PlaceTy;
@@ -23,7 +23,6 @@ use rustc_span::hygiene::DesugaringKind;
use rustc_span::symbol::sym;
use rustc_span::{BytePos, Span, Symbol};
use rustc_trait_selection::infer::InferCtxtExt;
-use rustc_trait_selection::traits::TraitEngineExt as _;
use crate::borrow_set::TwoPhaseActivation;
use crate::borrowck_errors;
@@ -168,10 +167,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
);
}
- self.add_moved_or_invoked_closure_note(location, used_place, &mut err);
+ let closure = self.add_moved_or_invoked_closure_note(location, used_place, &mut err);
let mut is_loop_move = false;
let mut in_pattern = false;
+ let mut seen_spans = FxHashSet::default();
for move_site in &move_site_vec {
let move_out = self.move_data.moves[(*move_site).moi];
@@ -192,43 +192,28 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
is_loop_move = true;
}
- self.explain_captures(
- &mut err,
- span,
- move_span,
- move_spans,
- *moved_place,
- partially_str,
- loop_message,
- move_msg,
- is_loop_move,
- maybe_reinitialized_locations.is_empty(),
- );
-
- if let (UseSpans::PatUse(span), []) =
- (move_spans, &maybe_reinitialized_locations[..])
- {
- if maybe_reinitialized_locations.is_empty() {
- err.span_suggestion_verbose(
- span.shrink_to_lo(),
- &format!(
- "borrow this field in the pattern to avoid moving {}",
- self.describe_place(moved_place.as_ref())
- .map(|n| format!("`{}`", n))
- .unwrap_or_else(|| "the value".to_string())
- ),
- "ref ",
- Applicability::MachineApplicable,
- );
- in_pattern = true;
+ if !seen_spans.contains(&move_span) {
+ if !closure {
+ self.suggest_ref_or_clone(mpi, move_span, &mut err, &mut in_pattern);
}
+
+ self.explain_captures(
+ &mut err,
+ span,
+ move_span,
+ move_spans,
+ *moved_place,
+ partially_str,
+ loop_message,
+ move_msg,
+ is_loop_move,
+ maybe_reinitialized_locations.is_empty(),
+ );
}
+ seen_spans.insert(move_span);
}
- use_spans.var_span_label_path_only(
- &mut err,
- format!("{} occurs due to use{}", desired_action.as_noun(), use_spans.describe()),
- );
+ use_spans.var_path_only_subdiag(&mut err, desired_action);
if !is_loop_move {
err.span_label(
@@ -280,7 +265,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
DescribePlaceOpt { including_downcast: true, including_tuple_field: true },
);
let note_msg = match opt_name {
- Some(ref name) => format!("`{}`", name),
+ Some(name) => format!("`{}`", name),
None => "value".to_owned(),
};
if self.suggest_borrow_fn_like(&mut err, ty, &move_site_vec, &note_msg) {
@@ -321,6 +306,160 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
}
}
+ fn suggest_ref_or_clone(
+ &mut self,
+ mpi: MovePathIndex,
+ move_span: Span,
+ err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
+ in_pattern: &mut bool,
+ ) {
+ struct ExpressionFinder<'hir> {
+ expr_span: Span,
+ expr: Option<&'hir hir::Expr<'hir>>,
+ pat: Option<&'hir hir::Pat<'hir>>,
+ parent_pat: Option<&'hir hir::Pat<'hir>>,
+ }
+ impl<'hir> Visitor<'hir> for ExpressionFinder<'hir> {
+ fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) {
+ if e.span == self.expr_span {
+ self.expr = Some(e);
+ }
+ hir::intravisit::walk_expr(self, e);
+ }
+ fn visit_pat(&mut self, p: &'hir hir::Pat<'hir>) {
+ if p.span == self.expr_span {
+ self.pat = Some(p);
+ }
+ if let hir::PatKind::Binding(hir::BindingAnnotation::NONE, _, i, sub) = p.kind {
+ if i.span == self.expr_span || p.span == self.expr_span {
+ self.pat = Some(p);
+ }
+ // Check if we are in a situation of `ident @ ident` where we want to suggest
+ // `ref ident @ ref ident` or `ref ident @ Struct { ref ident }`.
+ if let Some(subpat) = sub && self.pat.is_none() {
+ self.visit_pat(subpat);
+ if self.pat.is_some() {
+ self.parent_pat = Some(p);
+ }
+ return;
+ }
+ }
+ hir::intravisit::walk_pat(self, p);
+ }
+ }
+ let hir = self.infcx.tcx.hir();
+ if let Some(hir::Node::Item(hir::Item {
+ kind: hir::ItemKind::Fn(_, _, body_id),
+ ..
+ })) = hir.find(hir.local_def_id_to_hir_id(self.mir_def_id()))
+ && let Some(hir::Node::Expr(expr)) = hir.find(body_id.hir_id)
+ {
+ let place = &self.move_data.move_paths[mpi].place;
+ let span = place.as_local()
+ .map(|local| self.body.local_decls[local].source_info.span);
+ let mut finder = ExpressionFinder {
+ expr_span: move_span,
+ expr: None,
+ pat: None,
+ parent_pat: None,
+ };
+ finder.visit_expr(expr);
+ if let Some(span) = span && let Some(expr) = finder.expr {
+ for (_, expr) in hir.parent_iter(expr.hir_id) {
+ if let hir::Node::Expr(expr) = expr {
+ if expr.span.contains(span) {
+ // If the let binding occurs within the same loop, then that
+ // loop isn't relevant, like in the following, the outermost `loop`
+ // doesn't play into `x` being moved.
+ // ```
+ // loop {
+ // let x = String::new();
+ // loop {
+ // foo(x);
+ // }
+ // }
+ // ```
+ break;
+ }
+ if let hir::ExprKind::Loop(.., loop_span) = expr.kind {
+ err.span_label(loop_span, "inside of this loop");
+ }
+ }
+ }
+ let typeck = self.infcx.tcx.typeck(self.mir_def_id());
+ let hir_id = hir.get_parent_node(expr.hir_id);
+ if let Some(parent) = hir.find(hir_id) {
+ let (def_id, args, offset) = if let hir::Node::Expr(parent_expr) = parent
+ && let hir::ExprKind::MethodCall(_, _, args, _) = parent_expr.kind
+ && let Some(def_id) = typeck.type_dependent_def_id(parent_expr.hir_id)
+ {
+ (def_id.as_local(), args, 1)
+ } else if let hir::Node::Expr(parent_expr) = parent
+ && let hir::ExprKind::Call(call, args) = parent_expr.kind
+ && let ty::FnDef(def_id, _) = typeck.node_type(call.hir_id).kind()
+ {
+ (def_id.as_local(), args, 0)
+ } else {
+ (None, &[][..], 0)
+ };
+ if let Some(def_id) = def_id
+ && let Some(node) = hir.find(hir.local_def_id_to_hir_id(def_id))
+ && let Some(fn_sig) = node.fn_sig()
+ && let Some(ident) = node.ident()
+ && let Some(pos) = args.iter().position(|arg| arg.hir_id == expr.hir_id)
+ && let Some(arg) = fn_sig.decl.inputs.get(pos + offset)
+ {
+ let mut span: MultiSpan = arg.span.into();
+ span.push_span_label(
+ arg.span,
+ "this parameter takes ownership of the value".to_string(),
+ );
+ let descr = match node.fn_kind() {
+ Some(hir::intravisit::FnKind::ItemFn(..)) | None => "function",
+ Some(hir::intravisit::FnKind::Method(..)) => "method",
+ Some(hir::intravisit::FnKind::Closure) => "closure",
+ };
+ span.push_span_label(
+ ident.span,
+ format!("in this {descr}"),
+ );
+ err.span_note(
+ span,
+ format!(
+ "consider changing this parameter type in {descr} `{ident}` to \
+ borrow instead if owning the value isn't necessary",
+ ),
+ );
+ }
+ let place = &self.move_data.move_paths[mpi].place;
+ let ty = place.ty(self.body, self.infcx.tcx).ty;
+ if let hir::Node::Expr(parent_expr) = parent
+ && let hir::ExprKind::Call(call_expr, _) = parent_expr.kind
+ && let hir::ExprKind::Path(
+ hir::QPath::LangItem(LangItem::IntoIterIntoIter, _, _)
+ ) = call_expr.kind
+ {
+ // Do not suggest `.clone()` in a `for` loop, we already suggest borrowing.
+ } else {
+ self.suggest_cloning(err, ty, move_span);
+ }
+ }
+ }
+ if let Some(pat) = finder.pat {
+ *in_pattern = true;
+ let mut sugg = vec![(pat.span.shrink_to_lo(), "ref ".to_string())];
+ if let Some(pat) = finder.parent_pat {
+ sugg.insert(0, (pat.span.shrink_to_lo(), "ref ".to_string()));
+ }
+ err.multipart_suggestion_verbose(
+ "borrow this binding in the pattern to avoid moving the value",
+ sugg,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+
fn report_use_of_uninitialized(
&self,
mpi: MovePathIndex,
@@ -405,10 +544,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let used = desired_action.as_general_verb_in_past_tense();
let mut err =
struct_span_err!(self, span, E0381, "{used} binding {desc}{isnt_initialized}");
- use_spans.var_span_label_path_only(
- &mut err,
- format!("{} occurs due to use{}", desired_action.as_noun(), use_spans.describe()),
- );
+ use_spans.var_path_only_subdiag(&mut err, desired_action);
if let InitializationRequiringAction::PartialAssignment
| InitializationRequiringAction::Assignment = desired_action
@@ -496,12 +632,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// but the type has region variables, so erase those.
tcx.infer_ctxt()
.build()
- .type_implements_trait(
- default_trait,
- tcx.erase_regions(ty),
- ty::List::empty(),
- param_env,
- )
+ .type_implements_trait(default_trait, [tcx.erase_regions(ty)], param_env)
.must_apply_modulo_regions()
};
@@ -535,15 +666,17 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let tcx = self.infcx.tcx;
// Find out if the predicates show that the type is a Fn or FnMut
- let find_fn_kind_from_did =
- |predicates: ty::EarlyBinder<&[(ty::Predicate<'tcx>, Span)]>, substs| {
- predicates.0.iter().find_map(|(pred, _)| {
+ let find_fn_kind_from_did = |predicates: ty::EarlyBinder<
+ &[(ty::Predicate<'tcx>, Span)],
+ >,
+ substs| {
+ predicates.0.iter().find_map(|(pred, _)| {
let pred = if let Some(substs) = substs {
predicates.rebind(*pred).subst(tcx, substs).kind().skip_binder()
} else {
pred.kind().skip_binder()
};
- if let ty::PredicateKind::Trait(pred) = pred && pred.self_ty() == ty {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) = pred && pred.self_ty() == ty {
if Some(pred.def_id()) == tcx.lang_items().fn_trait() {
return Some(hir::Mutability::Not);
} else if Some(pred.def_id()) == tcx.lang_items().fn_mut_trait() {
@@ -552,7 +685,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
}
None
})
- };
+ };
// If the type is opaque/param/closure, and it is Fn or FnMut, let's suggest (mutably)
// borrowing the type, since `&mut F: FnMut` iff `F: FnMut` and similarly for `Fn`.
@@ -583,25 +716,41 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let moved_place = &self.move_data.move_paths[move_out.path].place;
let move_spans = self.move_spans(moved_place.as_ref(), move_out.source);
let move_span = move_spans.args_or_use();
- let suggestion = if borrow_level == hir::Mutability::Mut {
- "&mut ".to_string()
- } else {
- "&".to_string()
- };
+ let suggestion = borrow_level.ref_prefix_str().to_owned();
(move_span.shrink_to_lo(), suggestion)
})
.collect();
err.multipart_suggestion_verbose(
- &format!(
- "consider {}borrowing {value_name}",
- if borrow_level == hir::Mutability::Mut { "mutably " } else { "" }
- ),
+ format!("consider {}borrowing {value_name}", borrow_level.mutably_str()),
sugg,
Applicability::MaybeIncorrect,
);
true
}
+ fn suggest_cloning(&self, err: &mut Diagnostic, ty: Ty<'tcx>, span: Span) {
+ let tcx = self.infcx.tcx;
+ // Try to find predicates on *generic params* that would allow copying `ty`
+ let infcx = tcx.infer_ctxt().build();
+
+ if let Some(clone_trait_def) = tcx.lang_items().clone_trait()
+ && infcx
+ .type_implements_trait(
+ clone_trait_def,
+ [tcx.erase_regions(ty)],
+ self.param_env,
+ )
+ .must_apply_modulo_regions()
+ {
+ err.span_suggestion_verbose(
+ span.shrink_to_hi(),
+ "consider cloning the value if the performance cost is acceptable",
+ ".clone()".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
fn suggest_adding_copy_bounds(&self, err: &mut Diagnostic, ty: Ty<'tcx>, span: Span) {
let tcx = self.infcx.tcx;
let generics = tcx.generics_of(self.mir_def_id());
@@ -613,36 +762,34 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
else { return; };
// Try to find predicates on *generic params* that would allow copying `ty`
let infcx = tcx.infer_ctxt().build();
- let mut fulfill_cx = <dyn rustc_infer::traits::TraitEngine<'_>>::new(infcx.tcx);
-
- let copy_did = infcx.tcx.lang_items().copy_trait().unwrap();
+ let copy_did = infcx.tcx.require_lang_item(LangItem::Copy, Some(span));
let cause = ObligationCause::new(
span,
self.mir_hir_id(),
rustc_infer::traits::ObligationCauseCode::MiscObligation,
);
- fulfill_cx.register_bound(
+ let errors = rustc_trait_selection::traits::fully_solve_bound(
&infcx,
+ cause,
self.param_env,
// Erase any region vids from the type, which may not be resolved
infcx.tcx.erase_regions(ty),
copy_did,
- cause,
);
- // Select all, including ambiguous predicates
- let errors = fulfill_cx.select_all_or_error(&infcx);
// Only emit suggestion if all required predicates are on generic
let predicates: Result<Vec<_>, _> = errors
.into_iter()
.map(|err| match err.obligation.predicate.kind().skip_binder() {
- PredicateKind::Trait(predicate) => match predicate.self_ty().kind() {
- ty::Param(param_ty) => Ok((
- generics.type_param(param_ty, tcx),
- predicate.trait_ref.print_only_trait_path().to_string(),
- )),
- _ => Err(()),
- },
+ PredicateKind::Clause(ty::Clause::Trait(predicate)) => {
+ match predicate.self_ty().kind() {
+ ty::Param(param_ty) => Ok((
+ generics.type_param(param_ty, tcx),
+ predicate.trait_ref.print_only_trait_path().to_string(),
+ )),
+ _ => Err(()),
+ }
+ }
_ => Err(()),
})
.collect();
@@ -678,16 +825,16 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let move_spans = self.move_spans(place.as_ref(), location);
let span = move_spans.args_or_use();
- let mut err =
- self.cannot_move_when_borrowed(span, &self.describe_any_place(place.as_ref()));
- err.span_label(borrow_span, format!("borrow of {} occurs here", borrow_msg));
- err.span_label(span, format!("move out of {} occurs here", value_msg));
-
- borrow_spans.var_span_label_path_only(
- &mut err,
- format!("borrow occurs due to use{}", borrow_spans.describe()),
+ let mut err = self.cannot_move_when_borrowed(
+ span,
+ borrow_span,
+ &self.describe_any_place(place.as_ref()),
+ &borrow_msg,
+ &value_msg,
);
+ borrow_spans.var_path_only_subdiag(&mut err, crate::InitializationRequiringAction::Borrow);
+
move_spans.var_span_label(
&mut err,
format!("move occurs due to use{}", move_spans.describe()),
@@ -729,16 +876,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
borrow_span,
&self.describe_any_place(borrow.borrowed_place.as_ref()),
);
-
- borrow_spans.var_span_label(
- &mut err,
- {
- let place = &borrow.borrowed_place;
- let desc_place = self.describe_any_place(place.as_ref());
- format!("borrow occurs due to use of {}{}", desc_place, borrow_spans.describe())
- },
- "mutable",
- );
+ borrow_spans.var_subdiag(&mut err, Some(borrow.kind), |kind, var_span| {
+ use crate::session_diagnostics::CaptureVarCause::*;
+ let place = &borrow.borrowed_place;
+ let desc_place = self.describe_any_place(place.as_ref());
+ match kind {
+ Some(_) => BorrowUsePlaceGenerator { place: desc_place, var_span },
+ None => BorrowUsePlaceClosure { place: desc_place, var_span },
+ }
+ });
self.explain_why_borrow_contains_point(location, borrow, None)
.add_explanation_to_diagnostic(
@@ -1271,7 +1417,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// then just use the normal error. The closure isn't escaping
// and `move` will not help here.
(
- Some(ref name),
+ Some(name),
BorrowExplanation::MustBeValidFor {
category:
category @ (ConstraintCategory::Return(_)
@@ -1292,7 +1438,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
&format!("`{}`", name),
),
(
- ref name,
+ name,
BorrowExplanation::MustBeValidFor {
category: ConstraintCategory::Assignment,
from_closure: false,
@@ -1304,7 +1450,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
span,
..
},
- ) => self.report_escaping_data(borrow_span, name, upvar_span, upvar_name, span),
+ ) => self.report_escaping_data(borrow_span, &name, upvar_span, upvar_name, span),
(Some(name), explanation) => self.report_local_value_does_not_live_long_enough(
location,
&name,
@@ -1556,7 +1702,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
}
let mut err = self.temporary_value_borrowed_for_too_long(proper_span);
- err.span_label(proper_span, "creates a temporary which is freed while still in use");
+ err.span_label(proper_span, "creates a temporary value which is freed while still in use");
err.span_label(drop_span, "temporary value is freed at the end of this statement");
match explanation {
@@ -1719,7 +1865,6 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
err.span_label(borrow_span, note);
let tcx = self.infcx.tcx;
- let ty_params = ty::List::empty();
let return_ty = self.regioncx.universal_regions().unnormalized_output_ty;
let return_ty = tcx.erase_regions(return_ty);
@@ -1728,7 +1873,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
if let Some(iter_trait) = tcx.get_diagnostic_item(sym::Iterator)
&& self
.infcx
- .type_implements_trait(iter_trait, return_ty, ty_params, self.param_env)
+ .type_implements_trait(iter_trait, [return_ty], self.param_env)
.must_apply_modulo_regions()
{
err.span_suggestion_hidden(
@@ -2307,7 +2452,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// and it'll make sense.
let location = borrow.reserve_location;
debug!("annotate_argument_and_return_for_borrow: location={:?}", location);
- if let Some(&Statement { kind: StatementKind::Assign(box (ref reservation, _)), .. }) =
+ if let Some(Statement { kind: StatementKind::Assign(box (reservation, _)), .. }) =
&self.body[location.block].statements.get(location.statement_index)
{
debug!("annotate_argument_and_return_for_borrow: reservation={:?}", reservation);
@@ -2335,8 +2480,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// Check if our `target` was captured by a closure.
if let Rvalue::Aggregate(
box AggregateKind::Closure(def_id, substs),
- ref operands,
- ) = *rvalue
+ operands,
+ ) = rvalue
{
for operand in operands {
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) = operand else {
@@ -2360,7 +2505,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// into a place then we should annotate the closure in
// case it ends up being assigned into the return place.
annotated_closure =
- self.annotate_fn_sig(def_id, substs.as_closure().sig());
+ self.annotate_fn_sig(*def_id, substs.as_closure().sig());
debug!(
"annotate_argument_and_return_for_borrow: \
annotated_closure={:?} assigned_from_local={:?} \
@@ -2527,7 +2672,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
if let hir::TyKind::Rptr(lifetime, _) = &fn_decl.inputs[index].kind {
// With access to the lifetime, we can get
// the span of it.
- arguments.push((*argument, lifetime.span));
+ arguments.push((*argument, lifetime.ident.span));
} else {
bug!("ty type is a ref but hir type is not");
}
@@ -2546,7 +2691,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let mut return_span = fn_decl.output.span();
if let hir::FnRetTy::Return(ty) = &fn_decl.output {
if let hir::TyKind::Rptr(lifetime, _) = ty.kind {
- return_span = lifetime.span;
+ return_span = lifetime.ident.span;
}
}
diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
index 582d683dd..304683618 100644
--- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
@@ -469,8 +469,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} else if self.was_captured_by_trait_object(borrow) {
LaterUseKind::TraitCapture
} else if location.statement_index == block.statements.len() {
- if let TerminatorKind::Call { ref func, from_hir_call: true, .. } =
- block.terminator().kind
+ if let TerminatorKind::Call { func, from_hir_call: true, .. } =
+ &block.terminator().kind
{
// Just point to the function, to reduce the chance of overlapping spans.
let function_span = match func {
@@ -515,19 +515,16 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// will only ever have one item at any given time, but by using a vector, we can pop from
// it which simplifies the termination logic.
let mut queue = vec![location];
- let mut target = if let Some(&Statement {
- kind: StatementKind::Assign(box (ref place, _)),
- ..
- }) = stmt
- {
- if let Some(local) = place.as_local() {
- local
+ let mut target =
+ if let Some(Statement { kind: StatementKind::Assign(box (place, _)), .. }) = stmt {
+ if let Some(local) = place.as_local() {
+ local
+ } else {
+ return false;
+ }
} else {
return false;
- }
- } else {
- return false;
- };
+ };
debug!("was_captured_by_trait: target={:?} queue={:?}", target, queue);
while let Some(current_location) = queue.pop() {
diff --git a/compiler/rustc_borrowck/src/diagnostics/find_all_local_uses.rs b/compiler/rustc_borrowck/src/diagnostics/find_all_local_uses.rs
index b3edc35dc..498e98343 100644
--- a/compiler/rustc_borrowck/src/diagnostics/find_all_local_uses.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/find_all_local_uses.rs
@@ -1,3 +1,6 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
+
use std::collections::BTreeSet;
use rustc_middle::mir::visit::{PlaceContext, Visitor};
diff --git a/compiler/rustc_borrowck/src/diagnostics/find_use.rs b/compiler/rustc_borrowck/src/diagnostics/find_use.rs
index b5a3081e5..15f42e26c 100644
--- a/compiler/rustc_borrowck/src/diagnostics/find_use.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/find_use.rs
@@ -1,3 +1,6 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
+
use std::collections::VecDeque;
use std::rc::Rc;
diff --git a/compiler/rustc_borrowck/src/diagnostics/mod.rs b/compiler/rustc_borrowck/src/diagnostics/mod.rs
index 534d9ecae..4e2271a30 100644
--- a/compiler/rustc_borrowck/src/diagnostics/mod.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/mod.rs
@@ -70,7 +70,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
location: Location,
place: PlaceRef<'tcx>,
diag: &mut Diagnostic,
- ) {
+ ) -> bool {
debug!("add_moved_or_invoked_closure_note: location={:?} place={:?}", location, place);
let mut target = place.local_or_deref_local();
for stmt in &self.body[location.block].statements[location.statement_index..] {
@@ -78,7 +78,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
if let StatementKind::Assign(box (into, Rvalue::Use(from))) = &stmt.kind {
debug!("add_fnonce_closure_note: into={:?} from={:?}", into, from);
match from {
- Operand::Copy(ref place) | Operand::Move(ref place)
+ Operand::Copy(place) | Operand::Move(place)
if target == place.local_or_deref_local() =>
{
target = into.local_or_deref_local()
@@ -101,12 +101,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
debug!("add_moved_or_invoked_closure_note: id={:?}", id);
if Some(self.infcx.tcx.parent(id)) == self.infcx.tcx.lang_items().fn_once_trait() {
let closure = match args.first() {
- Some(Operand::Copy(ref place)) | Some(Operand::Move(ref place))
+ Some(Operand::Copy(place) | Operand::Move(place))
if target == place.local_or_deref_local() =>
{
place.local_or_deref_local().unwrap()
}
- _ => return,
+ _ => return false,
};
debug!("add_moved_or_invoked_closure_note: closure={:?}", closure);
@@ -125,7 +125,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
ty::place_to_string_for_capture(self.infcx.tcx, hir_place)
),
);
- return;
+ return true;
}
}
}
@@ -149,9 +149,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
ty::place_to_string_for_capture(self.infcx.tcx, hir_place)
),
);
+ return true;
}
}
}
+ false
}
/// End-user visible description of `place` if one can be found.
@@ -350,7 +352,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} else {
def.non_enum_variant()
};
- if !including_tuple_field.0 && variant.ctor_kind == CtorKind::Fn {
+ if !including_tuple_field.0 && variant.ctor_kind() == Some(CtorKind::Fn) {
return None;
}
Some(variant.fields[field.index()].name.to_string())
@@ -437,9 +439,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
if !is_terminator {
continue;
} else if let Some(Terminator {
- kind: TerminatorKind::Call { ref func, from_hir_call: false, .. },
+ kind: TerminatorKind::Call { func, from_hir_call: false, .. },
..
- }) = bbd.terminator
+ }) = &bbd.terminator
{
if let Some(source) =
BorrowedContentSource::from_call(func.ty(self.body, tcx), tcx)
@@ -588,22 +590,45 @@ impl UseSpans<'_> {
}
}
- // Add a span label to the arguments of the closure, if it exists.
+ /// Add a span label to the arguments of the closure, if it exists.
pub(super) fn args_span_label(self, err: &mut Diagnostic, message: impl Into<String>) {
if let UseSpans::ClosureUse { args_span, .. } = self {
err.span_label(args_span, message);
}
}
- // Add a span label to the use of the captured variable, if it exists.
- // only adds label to the `path_span`
- pub(super) fn var_span_label_path_only(self, err: &mut Diagnostic, message: impl Into<String>) {
- if let UseSpans::ClosureUse { path_span, .. } = self {
- err.span_label(path_span, message);
+ /// Add a span label to the use of the captured variable, if it exists.
+ /// only adds label to the `path_span`
+ pub(super) fn var_path_only_subdiag(
+ self,
+ err: &mut Diagnostic,
+ action: crate::InitializationRequiringAction,
+ ) {
+ use crate::session_diagnostics::CaptureVarPathUseCause::*;
+ use crate::InitializationRequiringAction::*;
+ if let UseSpans::ClosureUse { generator_kind, path_span, .. } = self {
+ match generator_kind {
+ Some(_) => {
+ err.subdiagnostic(match action {
+ Borrow => BorrowInGenerator { path_span },
+ MatchOn | Use => UseInGenerator { path_span },
+ Assignment => AssignInGenerator { path_span },
+ PartialAssignment => AssignPartInGenerator { path_span },
+ });
+ }
+ None => {
+ err.subdiagnostic(match action {
+ Borrow => BorrowInClosure { path_span },
+ MatchOn | Use => UseInClosure { path_span },
+ Assignment => AssignInClosure { path_span },
+ PartialAssignment => AssignPartInClosure { path_span },
+ });
+ }
+ }
}
}
- // Add a span label to the use of the captured variable, if it exists.
+ /// Add a span label to the use of the captured variable, if it exists.
pub(super) fn var_span_label(
self,
err: &mut Diagnostic,
@@ -623,6 +648,35 @@ impl UseSpans<'_> {
}
}
+ /// Add a subdiagnostic to the use of the captured variable, if it exists.
+ pub(super) fn var_subdiag(
+ self,
+ err: &mut Diagnostic,
+ kind: Option<rustc_middle::mir::BorrowKind>,
+ f: impl Fn(Option<GeneratorKind>, Span) -> crate::session_diagnostics::CaptureVarCause,
+ ) {
+ use crate::session_diagnostics::CaptureVarKind::*;
+ if let UseSpans::ClosureUse { generator_kind, capture_kind_span, path_span, .. } = self {
+ if capture_kind_span != path_span {
+ err.subdiagnostic(match kind {
+ Some(kd) => match kd {
+ rustc_middle::mir::BorrowKind::Shared
+ | rustc_middle::mir::BorrowKind::Shallow
+ | rustc_middle::mir::BorrowKind::Unique => {
+ Immute { kind_span: capture_kind_span }
+ }
+
+ rustc_middle::mir::BorrowKind::Mut { .. } => {
+ Mut { kind_span: capture_kind_span }
+ }
+ },
+ None => Move { kind_span: capture_kind_span },
+ });
+ };
+ err.subdiagnostic(f(generator_kind, path_span));
+ }
+ }
+
/// Returns `false` if this place is not used in a closure.
pub(super) fn for_closure(&self) -> bool {
match *self {
@@ -757,33 +811,30 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
};
debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);
- if let StatementKind::Assign(box (_, Rvalue::Aggregate(ref kind, ref places))) = stmt.kind {
- match **kind {
- AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => {
- debug!("move_spans: def_id={:?} places={:?}", def_id, places);
- if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
- self.closure_span(def_id, moved_place, places)
- {
- return ClosureUse {
- generator_kind,
- args_span,
- capture_kind_span,
- path_span,
- };
- }
- }
- _ => {}
+ if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind
+ && let AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) = **kind
+ {
+ debug!("move_spans: def_id={:?} places={:?}", def_id, places);
+ if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
+ self.closure_span(def_id, moved_place, places)
+ {
+ return ClosureUse {
+ generator_kind,
+ args_span,
+ capture_kind_span,
+ path_span,
+ };
}
}
// StatementKind::FakeRead only contains a def_id if they are introduced as a result
// of pattern matching within a closure.
- if let StatementKind::FakeRead(box (cause, ref place)) = stmt.kind {
+ if let StatementKind::FakeRead(box (cause, place)) = stmt.kind {
match cause {
FakeReadCause::ForMatchedPlace(Some(closure_def_id))
| FakeReadCause::ForLet(Some(closure_def_id)) => {
debug!("move_spans: def_id={:?} place={:?}", closure_def_id, place);
- let places = &[Operand::Move(*place)];
+ let places = &[Operand::Move(place)];
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
self.closure_span(closure_def_id, moved_place, places)
{
@@ -870,7 +921,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
debug!("borrow_spans: use_span={:?} location={:?}", use_span, location);
let target = match self.body[location.block].statements.get(location.statement_index) {
- Some(&Statement { kind: StatementKind::Assign(box (ref place, _)), .. }) => {
+ Some(Statement { kind: StatementKind::Assign(box (place, _)), .. }) => {
if let Some(local) = place.as_local() {
local
} else {
@@ -886,9 +937,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
}
for stmt in &self.body[location.block].statements[location.statement_index + 1..] {
- if let StatementKind::Assign(box (_, Rvalue::Aggregate(ref kind, ref places))) =
- stmt.kind
- {
+ if let StatementKind::Assign(box (_, Rvalue::Aggregate(kind, places))) = &stmt.kind {
let (&def_id, is_generator) = match kind {
box AggregateKind::Closure(def_id, _) => (def_id, false),
box AggregateKind::Generator(def_id, _, _) => (def_id, true),
diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs
index 8ad40c0aa..3319a8068 100644
--- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs
@@ -1,6 +1,4 @@
-use rustc_errors::{
- Applicability, Diagnostic, DiagnosticBuilder, EmissionGuarantee, ErrorGuaranteed,
-};
+use rustc_errors::{Applicability, Diagnostic};
use rustc_hir as hir;
use rustc_hir::intravisit::Visitor;
use rustc_hir::Node;
@@ -221,8 +219,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
PlaceRef {
local,
projection:
- &[
- ref proj_base @ ..,
+ [
+ proj_base @ ..,
ProjectionElem::Deref,
ProjectionElem::Field(field, _),
ProjectionElem::Deref,
@@ -233,7 +231,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
if let Some(span) = get_mut_span_in_struct_field(
self.infcx.tcx,
Place::ty_from(local, proj_base, self.body, self.infcx.tcx).ty,
- field,
+ *field,
) {
err.span_suggestion_verbose(
span,
@@ -391,13 +389,13 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
// diagnostic: if the span starts with a mutable borrow of
// a local variable, then just suggest the user remove it.
PlaceRef { local: _, projection: [] }
- if {
- if let Ok(snippet) = self.infcx.tcx.sess.source_map().span_to_snippet(span) {
- snippet.starts_with("&mut ")
- } else {
- false
- }
- } =>
+ if self
+ .infcx
+ .tcx
+ .sess
+ .source_map()
+ .span_to_snippet(span)
+ .map_or(false, |snippet| snippet.starts_with("&mut ")) =>
{
err.span_label(span, format!("cannot {ACT}", ACT = act));
err.span_suggestion(
@@ -629,25 +627,20 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
self.buffer_error(err);
}
- fn suggest_map_index_mut_alternatives(
- &self,
- ty: Ty<'_>,
- err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
- span: Span,
- ) {
+ fn suggest_map_index_mut_alternatives(&self, ty: Ty<'tcx>, err: &mut Diagnostic, span: Span) {
let Some(adt) = ty.ty_adt_def() else { return };
let did = adt.did();
if self.infcx.tcx.is_diagnostic_item(sym::HashMap, did)
|| self.infcx.tcx.is_diagnostic_item(sym::BTreeMap, did)
{
- struct V<'a, 'b, 'tcx, G: EmissionGuarantee> {
+ struct V<'a, 'tcx> {
assign_span: Span,
- err: &'a mut DiagnosticBuilder<'b, G>,
+ err: &'a mut Diagnostic,
ty: Ty<'tcx>,
suggested: bool,
}
- impl<'a, 'b: 'a, 'hir, 'tcx, G: EmissionGuarantee> Visitor<'hir> for V<'a, 'b, 'tcx, G> {
- fn visit_stmt(&mut self, stmt: &'hir hir::Stmt<'hir>) {
+ impl<'a, 'tcx> Visitor<'tcx> for V<'a, 'tcx> {
+ fn visit_stmt(&mut self, stmt: &'tcx hir::Stmt<'tcx>) {
hir::intravisit::walk_stmt(self, stmt);
let expr = match stmt.kind {
hir::StmtKind::Semi(expr) | hir::StmtKind::Expr(expr) => expr,
@@ -705,7 +698,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
),
(rv.span.shrink_to_hi(), ")".to_string()),
],
- ].into_iter(),
+ ],
Applicability::MachineApplicable,
);
self.suggested = true;
@@ -1218,7 +1211,7 @@ fn get_mut_span_in_struct_field<'tcx>(
&& let hir::Node::Field(field) = node
&& let hir::TyKind::Rptr(lt, hir::MutTy { mutbl: hir::Mutability::Not, ty }) = field.ty.kind
{
- return Some(lt.span.between(ty.span));
+ return Some(lt.ident.span.between(ty.span));
}
None
diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
index 15230718d..9bc2e79e2 100644
--- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
@@ -2,7 +2,7 @@
#![deny(rustc::diagnostic_outside_of_impl)]
//! Error reporting machinery for lifetime errors.
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::Visitor;
@@ -21,7 +21,7 @@ use rustc_middle::ty::subst::InternalSubsts;
use rustc_middle::ty::Region;
use rustc_middle::ty::TypeVisitor;
use rustc_middle::ty::{self, RegionVid, Ty};
-use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
use crate::borrowck_errors;
@@ -181,7 +181,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
// Try to convert the lower-bound region into something named we can print for the user.
let lower_bound_region = self.to_error_region(type_test.lower_bound);
- let type_test_span = type_test.locations.span(&self.body);
+ let type_test_span = type_test.span;
if let Some(lower_bound_region) = lower_bound_region {
let generic_ty = type_test.generic_kind.to_ty(self.infcx.tcx);
@@ -276,7 +276,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
fn get_impl_ident_and_self_ty_from_trait(
&self,
def_id: DefId,
- trait_objects: &FxHashSet<DefId>,
+ trait_objects: &FxIndexSet<DefId>,
) -> Option<(Ident, &'tcx hir::Ty<'tcx>)> {
let tcx = self.infcx.tcx;
match tcx.hir().get_if_local(def_id) {
@@ -514,9 +514,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
span: *span,
ty_err: match output_ty.kind() {
ty::Closure(_, _) => FnMutReturnTypeErr::ReturnClosure { span: *span },
- ty::Adt(def, _)
- if self.infcx.tcx.is_diagnostic_item(sym::gen_future, def.did()) =>
- {
+ ty::Generator(def, ..) if self.infcx.tcx.generator_is_async(*def) => {
FnMutReturnTypeErr::ReturnAsyncBlock { span: *span }
}
_ => FnMutReturnTypeErr::ReturnRef { span: *span },
@@ -830,7 +828,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
};
debug!(?param);
- let mut visitor = TraitObjectVisitor(FxHashSet::default());
+ let mut visitor = TraitObjectVisitor(FxIndexSet::default());
visitor.visit_ty(param.param_ty);
let Some((ident, self_ty)) =
@@ -843,7 +841,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
fn suggest_constrain_dyn_trait_in_impl(
&self,
err: &mut Diagnostic,
- found_dids: &FxHashSet<DefId>,
+ found_dids: &FxIndexSet<DefId>,
ident: Ident,
self_ty: &hir::Ty<'_>,
) -> bool {
@@ -923,14 +921,18 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
}
}
hir::ExprKind::Block(blk, _) => {
- if let Some(ref expr) = blk.expr {
+ if let Some(expr) = blk.expr {
// only when the block is a closure
if let hir::ExprKind::Closure(hir::Closure {
capture_clause: hir::CaptureBy::Ref,
+ body,
..
}) = expr.kind
{
- closure_span = Some(expr.span.shrink_to_lo());
+ let body = map.body(*body);
+ if !matches!(body.generator_kind, Some(hir::GeneratorKind::Async(..))) {
+ closure_span = Some(expr.span.shrink_to_lo());
+ }
}
}
}
diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs
index c044dbaba..171e62d91 100644
--- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs
@@ -254,7 +254,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
.or_else(|| self.give_name_if_anonymous_region_appears_in_impl_signature(fr))
.or_else(|| self.give_name_if_anonymous_region_appears_in_arg_position_impl_trait(fr));
- if let Some(ref value) = value {
+ if let Some(value) = &value {
self.region_names.try_borrow_mut().unwrap().insert(fr, value.clone());
}
@@ -355,7 +355,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
})
}
- ty::BoundRegionKind::BrAnon(_) => None,
+ ty::BoundRegionKind::BrAnon(..) => None,
},
ty::ReLateBound(..) | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReErased => None,
@@ -576,30 +576,10 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
let args = last_segment.args.as_ref()?;
let lifetime =
self.try_match_adt_and_generic_args(substs, needle_fr, args, search_stack)?;
- match lifetime.name {
- hir::LifetimeName::Param(_, hir::ParamName::Plain(_) | hir::ParamName::Error)
- | hir::LifetimeName::Error
- | hir::LifetimeName::Static => {
- let lifetime_span = lifetime.span;
- Some(RegionNameHighlight::MatchedAdtAndSegment(lifetime_span))
- }
-
- hir::LifetimeName::Param(_, hir::ParamName::Fresh)
- | hir::LifetimeName::ImplicitObjectLifetimeDefault
- | hir::LifetimeName::Infer => {
- // In this case, the user left off the lifetime; so
- // they wrote something like:
- //
- // ```
- // x: Foo<T>
- // ```
- //
- // where the fully elaborated form is `Foo<'_, '1,
- // T>`. We don't consider this a match; instead we let
- // the "fully elaborated" type fallback above handle
- // it.
- None
- }
+ if lifetime.is_anonymous() {
+ None
+ } else {
+ Some(RegionNameHighlight::MatchedAdtAndSegment(lifetime.ident.span))
}
}
@@ -959,8 +939,8 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
{
predicates.iter().any(|pred| {
match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(data) if data.self_ty() == ty => {}
- ty::PredicateKind::Projection(data) if data.projection_ty.self_ty() == ty => {}
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) if data.self_ty() == ty => {}
+ ty::PredicateKind::Clause(ty::Clause::Projection(data)) if data.projection_ty.self_ty() == ty => {}
_ => return false,
}
tcx.any_free_region_meets(pred, |r| {
diff --git a/compiler/rustc_borrowck/src/diagnostics/var_name.rs b/compiler/rustc_borrowck/src/diagnostics/var_name.rs
index 9ba29f04b..b385f95b6 100644
--- a/compiler/rustc_borrowck/src/diagnostics/var_name.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/var_name.rs
@@ -1,3 +1,6 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
+
use crate::Upvar;
use crate::{nll::ToRegionVid, region_infer::RegionInferenceContext};
use rustc_index::vec::{Idx, IndexVec};
diff --git a/compiler/rustc_borrowck/src/facts.rs b/compiler/rustc_borrowck/src/facts.rs
index 22134d5a7..51ed27c16 100644
--- a/compiler/rustc_borrowck/src/facts.rs
+++ b/compiler/rustc_borrowck/src/facts.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use crate::location::{LocationIndex, LocationTable};
use crate::BorrowIndex;
use polonius_engine::AllFacts as PoloniusFacts;
diff --git a/compiler/rustc_borrowck/src/invalidation.rs b/compiler/rustc_borrowck/src/invalidation.rs
index 3157f861d..f66a7ab3c 100644
--- a/compiler/rustc_borrowck/src/invalidation.rs
+++ b/compiler/rustc_borrowck/src/invalidation.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_data_structures::graph::dominators::Dominators;
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::{self, BasicBlock, Body, Location, NonDivergingIntrinsic, Place, Rvalue};
@@ -67,9 +69,9 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
self.consume_operand(location, op);
}
StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(mir::CopyNonOverlapping {
- ref src,
- ref dst,
- ref count,
+ src,
+ dst,
+ count,
})) => {
self.consume_operand(location, src);
self.consume_operand(location, dst);
@@ -104,7 +106,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
self.check_activations(location);
match &terminator.kind {
- TerminatorKind::SwitchInt { ref discr, switch_ty: _, targets: _ } => {
+ TerminatorKind::SwitchInt { discr, switch_ty: _, targets: _ } => {
self.consume_operand(location, discr);
}
TerminatorKind::Drop { place: drop_place, target: _, unwind: _ } => {
@@ -117,7 +119,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
}
TerminatorKind::DropAndReplace {
place: drop_place,
- value: ref new_value,
+ value: new_value,
target: _,
unwind: _,
} => {
@@ -125,8 +127,8 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
self.consume_operand(location, new_value);
}
TerminatorKind::Call {
- ref func,
- ref args,
+ func,
+ args,
destination,
target: _,
cleanup: _,
@@ -139,15 +141,15 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
}
self.mutate_place(location, *destination, Deep);
}
- TerminatorKind::Assert { ref cond, expected: _, ref msg, target: _, cleanup: _ } => {
+ TerminatorKind::Assert { cond, expected: _, msg, target: _, cleanup: _ } => {
self.consume_operand(location, cond);
use rustc_middle::mir::AssertKind;
- if let AssertKind::BoundsCheck { ref len, ref index } = *msg {
+ if let AssertKind::BoundsCheck { len, index } = msg {
self.consume_operand(location, len);
self.consume_operand(location, index);
}
}
- TerminatorKind::Yield { ref value, resume, resume_arg, drop: _ } => {
+ TerminatorKind::Yield { value, resume, resume_arg, drop: _ } => {
self.consume_operand(location, value);
// Invalidate all borrows of local places
@@ -173,25 +175,25 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
}
TerminatorKind::InlineAsm {
template: _,
- ref operands,
+ operands,
options: _,
line_spans: _,
destination: _,
cleanup: _,
} => {
for op in operands {
- match *op {
- InlineAsmOperand::In { reg: _, ref value } => {
+ match op {
+ InlineAsmOperand::In { reg: _, value } => {
self.consume_operand(location, value);
}
InlineAsmOperand::Out { reg: _, late: _, place, .. } => {
- if let Some(place) = place {
+ if let &Some(place) = place {
self.mutate_place(location, place, Shallow(None));
}
}
- InlineAsmOperand::InOut { reg: _, late: _, ref in_value, out_place } => {
+ InlineAsmOperand::InOut { reg: _, late: _, in_value, out_place } => {
self.consume_operand(location, in_value);
- if let Some(out_place) = out_place {
+ if let &Some(out_place) = out_place {
self.mutate_place(location, out_place, Shallow(None));
}
}
@@ -250,8 +252,8 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
// Simulates consumption of an rvalue
fn consume_rvalue(&mut self, location: Location, rvalue: &Rvalue<'tcx>) {
- match *rvalue {
- Rvalue::Ref(_ /*rgn*/, bk, place) => {
+ match rvalue {
+ &Rvalue::Ref(_ /*rgn*/, bk, place) => {
let access_kind = match bk {
BorrowKind::Shallow => {
(Shallow(Some(ArtificialField::ShallowBorrow)), Read(ReadKind::Borrow(bk)))
@@ -270,7 +272,7 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
self.access_place(location, place, access_kind, LocalMutationIsAllowed::No);
}
- Rvalue::AddressOf(mutability, place) => {
+ &Rvalue::AddressOf(mutability, place) => {
let access_kind = match mutability {
Mutability::Mut => (
Deep,
@@ -286,20 +288,19 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
Rvalue::ThreadLocalRef(_) => {}
- Rvalue::Use(ref operand)
- | Rvalue::Repeat(ref operand, _)
- | Rvalue::UnaryOp(_ /*un_op*/, ref operand)
- | Rvalue::Cast(_ /*cast_kind*/, ref operand, _ /*ty*/)
- | Rvalue::ShallowInitBox(ref operand, _ /*ty*/) => {
- self.consume_operand(location, operand)
- }
- Rvalue::CopyForDeref(ref place) => {
- let op = &Operand::Copy(*place);
+ Rvalue::Use(operand)
+ | Rvalue::Repeat(operand, _)
+ | Rvalue::UnaryOp(_ /*un_op*/, operand)
+ | Rvalue::Cast(_ /*cast_kind*/, operand, _ /*ty*/)
+ | Rvalue::ShallowInitBox(operand, _ /*ty*/) => self.consume_operand(location, operand),
+
+ &Rvalue::CopyForDeref(place) => {
+ let op = &Operand::Copy(place);
self.consume_operand(location, op);
}
- Rvalue::Len(place) | Rvalue::Discriminant(place) => {
- let af = match *rvalue {
+ &(Rvalue::Len(place) | Rvalue::Discriminant(place)) => {
+ let af = match rvalue {
Rvalue::Len(..) => Some(ArtificialField::ArrayLength),
Rvalue::Discriminant(..) => None,
_ => unreachable!(),
@@ -312,15 +313,15 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
);
}
- Rvalue::BinaryOp(_bin_op, box (ref operand1, ref operand2))
- | Rvalue::CheckedBinaryOp(_bin_op, box (ref operand1, ref operand2)) => {
+ Rvalue::BinaryOp(_bin_op, box (operand1, operand2))
+ | Rvalue::CheckedBinaryOp(_bin_op, box (operand1, operand2)) => {
self.consume_operand(location, operand1);
self.consume_operand(location, operand2);
}
Rvalue::NullaryOp(_op, _ty) => {}
- Rvalue::Aggregate(_, ref operands) => {
+ Rvalue::Aggregate(_, operands) => {
for operand in operands {
self.consume_operand(location, operand);
}
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index abfe253d4..74b4e4a0c 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -18,7 +18,8 @@ extern crate tracing;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::graph::dominators::Dominators;
-use rustc_errors::{Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
+use rustc_data_structures::vec_map::VecMap;
+use rustc_errors::{Diagnostic, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_index::bit_set::ChunkedBitSet;
@@ -82,7 +83,7 @@ mod type_check;
mod universal_regions;
mod used_muts;
-// A public API provided for the Rust compiler consumers.
+/// A public API provided for the Rust compiler consumers.
pub mod consumers;
use borrow_set::{BorrowData, BorrowSet};
@@ -129,6 +130,19 @@ fn mir_borrowck<'tcx>(
) -> &'tcx BorrowCheckResult<'tcx> {
let (input_body, promoted) = tcx.mir_promoted(def);
debug!("run query mir_borrowck: {}", tcx.def_path_str(def.did.to_def_id()));
+
+ if input_body.borrow().should_skip() {
+ debug!("Skipping borrowck because of injected body");
+ // Let's make up a borrowck result! Fun times!
+ let result = BorrowCheckResult {
+ concrete_opaque_types: VecMap::new(),
+ closure_requirements: None,
+ used_mut_upvars: SmallVec::new(),
+ tainted_by_errors: None,
+ };
+ return tcx.arena.alloc(result);
+ }
+
let hir_owner = tcx.hir().local_def_id_to_hir_id(def.did).owner;
let infcx =
@@ -178,13 +192,13 @@ fn do_mir_borrowck<'tcx>(
}
}
- let mut errors = error::BorrowckErrors::new();
+ let mut errors = error::BorrowckErrors::new(infcx.tcx);
// Gather the upvars of a closure, if any.
let tables = tcx.typeck_opt_const_arg(def);
- if let Some(ErrorGuaranteed { .. }) = tables.tainted_by_errors {
- infcx.set_tainted_by_errors();
- errors.set_tainted_by_errors();
+ if let Some(e) = tables.tainted_by_errors {
+ infcx.set_tainted_by_errors(e);
+ errors.set_tainted_by_errors(e);
}
let upvars: Vec<_> = tables
.closure_min_captures_flattened(def.did)
@@ -564,12 +578,12 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
self.check_activations(location, span, flow_state);
match &stmt.kind {
- StatementKind::Assign(box (lhs, ref rhs)) => {
+ StatementKind::Assign(box (lhs, rhs)) => {
self.consume_rvalue(location, (rhs, span), flow_state);
self.mutate_place(location, (*lhs, span), Shallow(None), flow_state);
}
- StatementKind::FakeRead(box (_, ref place)) => {
+ StatementKind::FakeRead(box (_, place)) => {
// Read for match doesn't access any memory and is used to
// assert that a place is safe and live. So we don't have to
// do any checks here.
@@ -587,7 +601,7 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
flow_state,
);
}
- StatementKind::Intrinsic(box ref kind) => match kind {
+ StatementKind::Intrinsic(box kind) => match kind {
NonDivergingIntrinsic::Assume(op) => self.consume_operand(location, (op, span), flow_state),
NonDivergingIntrinsic::CopyNonOverlapping(..) => span_bug!(
span,
@@ -629,8 +643,8 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
self.check_activations(loc, span, flow_state);
- match term.kind {
- TerminatorKind::SwitchInt { ref discr, switch_ty: _, targets: _ } => {
+ match &term.kind {
+ TerminatorKind::SwitchInt { discr, switch_ty: _, targets: _ } => {
self.consume_operand(loc, (discr, span), flow_state);
}
TerminatorKind::Drop { place, target: _, unwind: _ } => {
@@ -642,7 +656,7 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
self.access_place(
loc,
- (place, span),
+ (*place, span),
(AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)),
LocalMutationIsAllowed::Yes,
flow_state,
@@ -650,16 +664,16 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
}
TerminatorKind::DropAndReplace {
place: drop_place,
- value: ref new_value,
+ value: new_value,
target: _,
unwind: _,
} => {
- self.mutate_place(loc, (drop_place, span), Deep, flow_state);
+ self.mutate_place(loc, (*drop_place, span), Deep, flow_state);
self.consume_operand(loc, (new_value, span), flow_state);
}
TerminatorKind::Call {
- ref func,
- ref args,
+ func,
+ args,
destination,
target: _,
cleanup: _,
@@ -670,43 +684,43 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
for arg in args {
self.consume_operand(loc, (arg, span), flow_state);
}
- self.mutate_place(loc, (destination, span), Deep, flow_state);
+ self.mutate_place(loc, (*destination, span), Deep, flow_state);
}
- TerminatorKind::Assert { ref cond, expected: _, ref msg, target: _, cleanup: _ } => {
+ TerminatorKind::Assert { cond, expected: _, msg, target: _, cleanup: _ } => {
self.consume_operand(loc, (cond, span), flow_state);
use rustc_middle::mir::AssertKind;
- if let AssertKind::BoundsCheck { ref len, ref index } = *msg {
+ if let AssertKind::BoundsCheck { len, index } = msg {
self.consume_operand(loc, (len, span), flow_state);
self.consume_operand(loc, (index, span), flow_state);
}
}
- TerminatorKind::Yield { ref value, resume: _, resume_arg, drop: _ } => {
+ TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
self.consume_operand(loc, (value, span), flow_state);
- self.mutate_place(loc, (resume_arg, span), Deep, flow_state);
+ self.mutate_place(loc, (*resume_arg, span), Deep, flow_state);
}
TerminatorKind::InlineAsm {
template: _,
- ref operands,
+ operands,
options: _,
line_spans: _,
destination: _,
cleanup: _,
} => {
for op in operands {
- match *op {
- InlineAsmOperand::In { reg: _, ref value } => {
+ match op {
+ InlineAsmOperand::In { reg: _, value } => {
self.consume_operand(loc, (value, span), flow_state);
}
InlineAsmOperand::Out { reg: _, late: _, place, .. } => {
if let Some(place) = place {
- self.mutate_place(loc, (place, span), Shallow(None), flow_state);
+ self.mutate_place(loc, (*place, span), Shallow(None), flow_state);
}
}
- InlineAsmOperand::InOut { reg: _, late: _, ref in_value, out_place } => {
+ InlineAsmOperand::InOut { reg: _, late: _, in_value, out_place } => {
self.consume_operand(loc, (in_value, span), flow_state);
- if let Some(out_place) = out_place {
+ if let &Some(out_place) = out_place {
self.mutate_place(
loc,
(out_place, span),
@@ -1150,8 +1164,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
(rvalue, span): (&'cx Rvalue<'tcx>, Span),
flow_state: &Flows<'cx, 'tcx>,
) {
- match *rvalue {
- Rvalue::Ref(_ /*rgn*/, bk, place) => {
+ match rvalue {
+ &Rvalue::Ref(_ /*rgn*/, bk, place) => {
let access_kind = match bk {
BorrowKind::Shallow => {
(Shallow(Some(ArtificialField::ShallowBorrow)), Read(ReadKind::Borrow(bk)))
@@ -1189,7 +1203,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
);
}
- Rvalue::AddressOf(mutability, place) => {
+ &Rvalue::AddressOf(mutability, place) => {
let access_kind = match mutability {
Mutability::Mut => (
Deep,
@@ -1218,14 +1232,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
Rvalue::ThreadLocalRef(_) => {}
- Rvalue::Use(ref operand)
- | Rvalue::Repeat(ref operand, _)
- | Rvalue::UnaryOp(_ /*un_op*/, ref operand)
- | Rvalue::Cast(_ /*cast_kind*/, ref operand, _ /*ty*/)
- | Rvalue::ShallowInitBox(ref operand, _ /*ty*/) => {
+ Rvalue::Use(operand)
+ | Rvalue::Repeat(operand, _)
+ | Rvalue::UnaryOp(_ /*un_op*/, operand)
+ | Rvalue::Cast(_ /*cast_kind*/, operand, _ /*ty*/)
+ | Rvalue::ShallowInitBox(operand, _ /*ty*/) => {
self.consume_operand(location, (operand, span), flow_state)
}
- Rvalue::CopyForDeref(place) => {
+
+ &Rvalue::CopyForDeref(place) => {
self.access_place(
location,
(place, span),
@@ -1243,7 +1258,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
);
}
- Rvalue::Len(place) | Rvalue::Discriminant(place) => {
+ &(Rvalue::Len(place) | Rvalue::Discriminant(place)) => {
let af = match *rvalue {
Rvalue::Len(..) => Some(ArtificialField::ArrayLength),
Rvalue::Discriminant(..) => None,
@@ -1264,8 +1279,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
);
}
- Rvalue::BinaryOp(_bin_op, box (ref operand1, ref operand2))
- | Rvalue::CheckedBinaryOp(_bin_op, box (ref operand1, ref operand2)) => {
+ Rvalue::BinaryOp(_bin_op, box (operand1, operand2))
+ | Rvalue::CheckedBinaryOp(_bin_op, box (operand1, operand2)) => {
self.consume_operand(location, (operand1, span), flow_state);
self.consume_operand(location, (operand2, span), flow_state);
}
@@ -1274,7 +1289,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// nullary ops take no dynamic input; no borrowck effect.
}
- Rvalue::Aggregate(ref aggregate_kind, ref operands) => {
+ Rvalue::Aggregate(aggregate_kind, operands) => {
// We need to report back the list of mutable upvars that were
// moved into the closure and subsequently used by the closure,
// in order to populate our used_mut set.
@@ -2246,6 +2261,7 @@ mod error {
use super::*;
pub struct BorrowckErrors<'tcx> {
+ tcx: TyCtxt<'tcx>,
/// This field keeps track of move errors that are to be reported for given move indices.
///
/// There are situations where many errors can be reported for a single move out (see #53807)
@@ -2268,19 +2284,24 @@ mod error {
tainted_by_errors: Option<ErrorGuaranteed>,
}
- impl BorrowckErrors<'_> {
- pub fn new() -> Self {
+ impl<'tcx> BorrowckErrors<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>) -> Self {
BorrowckErrors {
+ tcx,
buffered_move_errors: BTreeMap::new(),
buffered: Default::default(),
tainted_by_errors: None,
}
}
- // FIXME(eddyb) this is a suboptimal API because `tainted_by_errors` is
- // set before any emission actually happens (weakening the guarantee).
pub fn buffer_error(&mut self, t: DiagnosticBuilder<'_, ErrorGuaranteed>) {
- self.tainted_by_errors = Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
+ if let None = self.tainted_by_errors {
+ self.tainted_by_errors = Some(
+ self.tcx
+ .sess
+ .delay_span_bug(t.span.clone(), "diagnostic buffered but not emitted"),
+ )
+ }
t.buffer(&mut self.buffered);
}
@@ -2288,8 +2309,8 @@ mod error {
t.buffer(&mut self.buffered);
}
- pub fn set_tainted_by_errors(&mut self) {
- self.tainted_by_errors = Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
+ pub fn set_tainted_by_errors(&mut self, e: ErrorGuaranteed) {
+ self.tainted_by_errors = Some(e);
}
}
diff --git a/compiler/rustc_borrowck/src/location.rs b/compiler/rustc_borrowck/src/location.rs
index 877944d3d..9fa7e218b 100644
--- a/compiler/rustc_borrowck/src/location.rs
+++ b/compiler/rustc_borrowck/src/location.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::{BasicBlock, Body, Location};
diff --git a/compiler/rustc_borrowck/src/member_constraints.rs b/compiler/rustc_borrowck/src/member_constraints.rs
index 43253a2aa..b5e00f471 100644
--- a/compiler/rustc_borrowck/src/member_constraints.rs
+++ b/compiler/rustc_borrowck/src/member_constraints.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::FxHashMap;
use rustc_index::vec::IndexVec;
@@ -9,6 +11,7 @@ use std::ops::Index;
/// Compactly stores a set of `R0 member of [R1...Rn]` constraints,
/// indexed by the region `R0`.
+#[derive(Debug)]
pub(crate) struct MemberConstraintSet<'tcx, R>
where
R: Copy + Eq,
@@ -29,6 +32,7 @@ where
}
/// Represents a `R0 member of [R1..Rn]` constraint
+#[derive(Debug)]
pub(crate) struct NllMemberConstraint<'tcx> {
next_constraint: Option<NllMemberConstraintIndex>,
diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs
index 08fdd28eb..e379e6470 100644
--- a/compiler/rustc_borrowck/src/nll.rs
+++ b/compiler/rustc_borrowck/src/nll.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
//! The entry point of the NLL borrow checker.
use rustc_data_structures::vec_map::VecMap;
@@ -71,7 +73,7 @@ pub(crate) fn replace_regions_in_mir<'tcx>(
// Replace all remaining regions with fresh inference variables.
renumber::renumber_mir(infcx, body, promoted);
- dump_mir(infcx.tcx, None, "renumber", &0, body, |_, _| Ok(()));
+ dump_mir(infcx.tcx, false, "renumber", &0, body, |_, _| Ok(()));
universal_regions
}
@@ -242,7 +244,6 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
mut liveness_constraints,
outlives_constraints,
member_constraints,
- closure_bounds_mapping,
universe_causes,
type_tests,
} = constraints;
@@ -264,7 +265,6 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
universal_region_relations,
outlives_constraints,
member_constraints,
- closure_bounds_mapping,
universe_causes,
type_tests,
liveness_constraints,
@@ -303,7 +303,10 @@ pub(crate) fn compute_regions<'cx, 'tcx>(
if !nll_errors.is_empty() {
// Suppress unhelpful extra errors in `infer_opaque_types`.
- infcx.set_tainted_by_errors();
+ infcx.set_tainted_by_errors(infcx.tcx.sess.delay_span_bug(
+ body.span,
+ "`compute_regions` tainted `infcx` with errors but did not emit any errors",
+ ));
}
let remapped_opaque_tys = regioncx.infer_opaque_types(&infcx, opaque_type_values);
@@ -328,7 +331,7 @@ pub(super) fn dump_mir_results<'tcx>(
return;
}
- dump_mir(infcx.tcx, None, "nll", &0, body, |pass_where, out| {
+ dump_mir(infcx.tcx, false, "nll", &0, body, |pass_where, out| {
match pass_where {
// Before the CFG, dump out the values for each region variable.
PassWhere::BeforeCFG => {
@@ -355,15 +358,13 @@ pub(super) fn dump_mir_results<'tcx>(
// Also dump the inference graph constraints as a graphviz file.
let _: io::Result<()> = try {
- let mut file =
- create_dump_file(infcx.tcx, "regioncx.all.dot", None, "nll", &0, body.source)?;
+ let mut file = create_dump_file(infcx.tcx, "regioncx.all.dot", false, "nll", &0, body)?;
regioncx.dump_graphviz_raw_constraints(&mut file)?;
};
// Also dump the inference graph constraints as a graphviz file.
let _: io::Result<()> = try {
- let mut file =
- create_dump_file(infcx.tcx, "regioncx.scc.dot", None, "nll", &0, body.source)?;
+ let mut file = create_dump_file(infcx.tcx, "regioncx.scc.dot", false, "nll", &0, body)?;
regioncx.dump_graphviz_scc_constraints(&mut file)?;
};
}
diff --git a/compiler/rustc_borrowck/src/path_utils.rs b/compiler/rustc_borrowck/src/path_utils.rs
index b2c8dfc82..f8a99a269 100644
--- a/compiler/rustc_borrowck/src/path_utils.rs
+++ b/compiler/rustc_borrowck/src/path_utils.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use crate::borrow_set::{BorrowData, BorrowSet, TwoPhaseActivation};
use crate::places_conflict;
use crate::AccessDepth;
diff --git a/compiler/rustc_borrowck/src/place_ext.rs b/compiler/rustc_borrowck/src/place_ext.rs
index 93d202e49..9f6b1fdfc 100644
--- a/compiler/rustc_borrowck/src/place_ext.rs
+++ b/compiler/rustc_borrowck/src/place_ext.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use crate::borrow_set::LocalsStateAtExit;
use rustc_hir as hir;
use rustc_middle::mir::ProjectionElem;
diff --git a/compiler/rustc_borrowck/src/places_conflict.rs b/compiler/rustc_borrowck/src/places_conflict.rs
index 0e71efd6f..89ac0dfa4 100644
--- a/compiler/rustc_borrowck/src/places_conflict.rs
+++ b/compiler/rustc_borrowck/src/places_conflict.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use crate::ArtificialField;
use crate::Overlap;
use crate::{AccessDepth, Deep, Shallow};
@@ -318,16 +320,10 @@ fn place_projection_conflict<'tcx>(
debug!("place_element_conflict: DISJOINT-OR-EQ-DEREF");
Overlap::EqualOrDisjoint
}
- (ProjectionElem::OpaqueCast(v1), ProjectionElem::OpaqueCast(v2)) => {
- if v1 == v2 {
- // same type - recur.
- debug!("place_element_conflict: DISJOINT-OR-EQ-OPAQUE");
- Overlap::EqualOrDisjoint
- } else {
- // Different types. Disjoint!
- debug!("place_element_conflict: DISJOINT-OPAQUE");
- Overlap::Disjoint
- }
+ (ProjectionElem::OpaqueCast(_), ProjectionElem::OpaqueCast(_)) => {
+ // casts to other types may always conflict irrespective of the type being cast to.
+ debug!("place_element_conflict: DISJOINT-OR-EQ-OPAQUE");
+ Overlap::EqualOrDisjoint
}
(ProjectionElem::Field(f1, _), ProjectionElem::Field(f2, _)) => {
if f1 == f2 {
diff --git a/compiler/rustc_borrowck/src/prefixes.rs b/compiler/rustc_borrowck/src/prefixes.rs
index 2b50cbac9..6f2813498 100644
--- a/compiler/rustc_borrowck/src/prefixes.rs
+++ b/compiler/rustc_borrowck/src/prefixes.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
//! From the NLL RFC: "The deep [aka 'supporting'] prefixes for an
//! place are formed by stripping away fields and derefs, except that
//! we stop when we reach the deref of a shared reference. [...] "
diff --git a/compiler/rustc_borrowck/src/region_infer/dump_mir.rs b/compiler/rustc_borrowck/src/region_infer/dump_mir.rs
index fe5193102..6524b594e 100644
--- a/compiler/rustc_borrowck/src/region_infer/dump_mir.rs
+++ b/compiler/rustc_borrowck/src/region_infer/dump_mir.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
//! As part of generating the regions, if you enable `-Zdump-mir=nll`,
//! we will generate an annotated copy of the MIR that includes the
//! state of region inference. This code handles emitting the region
@@ -74,8 +76,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let mut constraints: Vec<_> = self.constraints.outlives().iter().collect();
constraints.sort_by_key(|c| (c.sup, c.sub));
for constraint in &constraints {
- let OutlivesConstraint { sup, sub, locations, category, span, variance_info: _ } =
- constraint;
+ let OutlivesConstraint { sup, sub, locations, category, span, .. } = constraint;
let (name, arg) = match locations {
Locations::All(span) => {
("All", tcx.sess.source_map().span_to_embeddable_string(*span))
diff --git a/compiler/rustc_borrowck/src/region_infer/graphviz.rs b/compiler/rustc_borrowck/src/region_infer/graphviz.rs
index f31ccd74c..2e15586e0 100644
--- a/compiler/rustc_borrowck/src/region_infer/graphviz.rs
+++ b/compiler/rustc_borrowck/src/region_infer/graphviz.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
//! This module provides linkage between RegionInferenceContext and
//! `rustc_graphviz` traits, specialized to attaching borrowck analysis
//! data to rendered labels.
diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs
index 8b63294fb..90e2b6b69 100644
--- a/compiler/rustc_borrowck/src/region_infer/mod.rs
+++ b/compiler/rustc_borrowck/src/region_infer/mod.rs
@@ -6,10 +6,9 @@ use rustc_data_structures::frozen::Frozen;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::graph::scc::Sccs;
use rustc_errors::Diagnostic;
-use rustc_hir::def_id::{DefId, CRATE_DEF_ID};
+use rustc_hir::def_id::CRATE_DEF_ID;
use rustc_hir::CRATE_HIR_ID;
use rustc_index::vec::IndexVec;
-use rustc_infer::infer::canonical::QueryOutlivesConstraint;
use rustc_infer::infer::outlives::test_type_match;
use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound, VerifyIfEq};
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin};
@@ -19,9 +18,7 @@ use rustc_middle::mir::{
};
use rustc_middle::traits::ObligationCause;
use rustc_middle::traits::ObligationCauseCode;
-use rustc_middle::ty::{
- self, subst::SubstsRef, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitable,
-};
+use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable, TypeVisitable};
use rustc_span::Span;
use crate::{
@@ -89,10 +86,6 @@ pub struct RegionInferenceContext<'tcx> {
/// `member_region_scc`.
member_constraints_applied: Vec<AppliedMemberConstraint>,
- /// Map closure bounds to a `Span` that should be used for error reporting.
- closure_bounds_mapping:
- FxHashMap<Location, FxHashMap<(RegionVid, RegionVid), (ConstraintCategory<'tcx>, Span)>>,
-
/// Map universe indexes to information on why we created it.
universe_causes: FxHashMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
@@ -135,6 +128,7 @@ pub struct RegionInferenceContext<'tcx> {
/// adds a new lower bound to the SCC it is analyzing: so you wind up
/// with `'R: 'O` where `'R` is the pick-region and `'O` is the
/// minimal viable option.
+#[derive(Debug)]
pub(crate) struct AppliedMemberConstraint {
/// The SCC that was affected. (The "member region".)
///
@@ -221,8 +215,8 @@ pub struct TypeTest<'tcx> {
/// The region `'x` that the type must outlive.
pub lower_bound: RegionVid,
- /// Where did this constraint arise and why?
- pub locations: Locations,
+ /// The span to blame.
+ pub span: Span,
/// A test which, if met by the region `'x`, proves that this type
/// constraint is satisfied.
@@ -265,10 +259,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
outlives_constraints: OutlivesConstraintSet<'tcx>,
member_constraints_in: MemberConstraintSet<'tcx, RegionVid>,
- closure_bounds_mapping: FxHashMap<
- Location,
- FxHashMap<(RegionVid, RegionVid), (ConstraintCategory<'tcx>, Span)>,
- >,
universe_causes: FxHashMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
type_tests: Vec<TypeTest<'tcx>>,
liveness_constraints: LivenessValues<RegionVid>,
@@ -310,7 +300,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
rev_scc_graph: None,
member_constraints,
member_constraints_applied: Vec::new(),
- closure_bounds_mapping,
universe_causes,
scc_universes,
scc_representatives,
@@ -882,13 +871,13 @@ impl<'tcx> RegionInferenceContext<'tcx> {
if deduplicate_errors.insert((
erased_generic_kind,
type_test.lower_bound,
- type_test.locations,
+ type_test.span,
)) {
debug!(
"check_type_test: reporting error for erased_generic_kind={:?}, \
lower_bound_region={:?}, \
- type_test.locations={:?}",
- erased_generic_kind, type_test.lower_bound, type_test.locations,
+ type_test.span={:?}",
+ erased_generic_kind, type_test.lower_bound, type_test.span,
);
errors_buffer.push(RegionErrorKind::TypeTestError { type_test: type_test.clone() });
@@ -931,7 +920,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
) -> bool {
let tcx = infcx.tcx;
- let TypeTest { generic_kind, lower_bound, locations, verify_bound: _ } = type_test;
+ let TypeTest { generic_kind, lower_bound, span: _, verify_bound: _ } = type_test;
let generic_ty = generic_kind.to_ty(tcx);
let Some(subject) = self.try_promote_type_test_subject(infcx, generic_ty) else {
@@ -959,7 +948,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
propagated_outlives_requirements.push(ClosureOutlivesRequirement {
subject,
outlived_free_region: static_r,
- blame_span: locations.span(body),
+ blame_span: type_test.span,
category: ConstraintCategory::Boring,
});
@@ -1011,7 +1000,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let requirement = ClosureOutlivesRequirement {
subject,
outlived_free_region: upper_bound,
- blame_span: locations.span(body),
+ blame_span: type_test.span,
category: ConstraintCategory::Boring,
};
debug!("try_promote_type_test: pushing {:#?}", requirement);
@@ -1716,6 +1705,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
});
}
+ #[instrument(level = "debug", skip(self, infcx, errors_buffer))]
fn check_member_constraints(
&self,
infcx: &InferCtxt<'tcx>,
@@ -1723,22 +1713,21 @@ impl<'tcx> RegionInferenceContext<'tcx> {
) {
let member_constraints = self.member_constraints.clone();
for m_c_i in member_constraints.all_indices() {
- debug!("check_member_constraint(m_c_i={:?})", m_c_i);
+ debug!(?m_c_i);
let m_c = &member_constraints[m_c_i];
let member_region_vid = m_c.member_region_vid;
debug!(
- "check_member_constraint: member_region_vid={:?} with value {}",
- member_region_vid,
- self.region_value_str(member_region_vid),
+ ?member_region_vid,
+ value = ?self.region_value_str(member_region_vid),
);
let choice_regions = member_constraints.choice_regions(m_c_i);
- debug!("check_member_constraint: choice_regions={:?}", choice_regions);
+ debug!(?choice_regions);
// Did the member region wind up equal to any of the option regions?
if let Some(o) =
choice_regions.iter().find(|&&o_r| self.eval_equal(o_r, m_c.member_region_vid))
{
- debug!("check_member_constraint: evaluated as equal to {:?}", o);
+ debug!("evaluated as equal to {:?}", o);
continue;
}
@@ -1804,18 +1793,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
}
}
- pub(crate) fn retrieve_closure_constraint_info(
- &self,
- constraint: OutlivesConstraint<'tcx>,
- ) -> Option<(ConstraintCategory<'tcx>, Span)> {
- match constraint.locations {
- Locations::All(_) => None,
- Locations::Single(loc) => {
- self.closure_bounds_mapping[&loc].get(&(constraint.sup, constraint.sub)).copied()
- }
- }
- }
-
/// Finds a good `ObligationCause` to blame for the fact that `fr1` outlives `fr2`.
pub(crate) fn find_outlives_blame_span(
&self,
@@ -1921,6 +1898,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
span: p_c.definition_span,
category: ConstraintCategory::OpaqueType,
variance_info: ty::VarianceDiagInfo::default(),
+ from_closure: false,
};
handle_constraint(constraint);
}
@@ -2066,31 +2044,12 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// Classify each of the constraints along the path.
let mut categorized_path: Vec<BlameConstraint<'tcx>> = path
.iter()
- .map(|constraint| {
- let (category, span, from_closure, cause_code) =
- if constraint.category == ConstraintCategory::ClosureBounds {
- if let Some((category, span)) =
- self.retrieve_closure_constraint_info(*constraint)
- {
- (category, span, true, ObligationCauseCode::MiscObligation)
- } else {
- (
- constraint.category,
- constraint.span,
- false,
- ObligationCauseCode::MiscObligation,
- )
- }
- } else {
- (constraint.category, constraint.span, false, cause_code.clone())
- };
- BlameConstraint {
- category,
- from_closure,
- cause: ObligationCause::new(span, CRATE_HIR_ID, cause_code),
- variance_info: constraint.variance_info,
- outlives_constraint: *constraint,
- }
+ .map(|constraint| BlameConstraint {
+ category: constraint.category,
+ from_closure: constraint.from_closure,
+ cause: ObligationCause::new(constraint.span, CRATE_HIR_ID, cause_code.clone()),
+ variance_info: constraint.variance_info,
+ outlives_constraint: *constraint,
})
.collect();
debug!("categorized_path={:#?}", categorized_path);
@@ -2274,92 +2233,6 @@ impl<'tcx> RegionDefinition<'tcx> {
}
}
-pub trait ClosureRegionRequirementsExt<'tcx> {
- fn apply_requirements(
- &self,
- tcx: TyCtxt<'tcx>,
- closure_def_id: DefId,
- closure_substs: SubstsRef<'tcx>,
- ) -> Vec<QueryOutlivesConstraint<'tcx>>;
-}
-
-impl<'tcx> ClosureRegionRequirementsExt<'tcx> for ClosureRegionRequirements<'tcx> {
- /// Given an instance T of the closure type, this method
- /// instantiates the "extra" requirements that we computed for the
- /// closure into the inference context. This has the effect of
- /// adding new outlives obligations to existing variables.
- ///
- /// As described on `ClosureRegionRequirements`, the extra
- /// requirements are expressed in terms of regionvids that index
- /// into the free regions that appear on the closure type. So, to
- /// do this, we first copy those regions out from the type T into
- /// a vector. Then we can just index into that vector to extract
- /// out the corresponding region from T and apply the
- /// requirements.
- fn apply_requirements(
- &self,
- tcx: TyCtxt<'tcx>,
- closure_def_id: DefId,
- closure_substs: SubstsRef<'tcx>,
- ) -> Vec<QueryOutlivesConstraint<'tcx>> {
- debug!(
- "apply_requirements(closure_def_id={:?}, closure_substs={:?})",
- closure_def_id, closure_substs
- );
-
- // Extract the values of the free regions in `closure_substs`
- // into a vector. These are the regions that we will be
- // relating to one another.
- let closure_mapping = &UniversalRegions::closure_mapping(
- tcx,
- closure_substs,
- self.num_external_vids,
- tcx.typeck_root_def_id(closure_def_id),
- );
- debug!("apply_requirements: closure_mapping={:?}", closure_mapping);
-
- // Create the predicates.
- self.outlives_requirements
- .iter()
- .map(|outlives_requirement| {
- let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
-
- match outlives_requirement.subject {
- ClosureOutlivesSubject::Region(region) => {
- let region = closure_mapping[region];
- debug!(
- "apply_requirements: region={:?} \
- outlived_region={:?} \
- outlives_requirement={:?}",
- region, outlived_region, outlives_requirement,
- );
- (
- ty::Binder::dummy(ty::OutlivesPredicate(
- region.into(),
- outlived_region,
- )),
- ConstraintCategory::BoringNoLocation,
- )
- }
-
- ClosureOutlivesSubject::Ty(ty) => {
- debug!(
- "apply_requirements: ty={:?} \
- outlived_region={:?} \
- outlives_requirement={:?}",
- ty, outlived_region, outlives_requirement,
- );
- (
- ty::Binder::dummy(ty::OutlivesPredicate(ty.into(), outlived_region)),
- ConstraintCategory::BoringNoLocation,
- )
- }
- }
- })
- .collect()
- }
-}
-
#[derive(Clone, Debug)]
pub struct BlameConstraint<'tcx> {
pub category: ConstraintCategory<'tcx>,
diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs
index 465f353aa..516a08077 100644
--- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs
+++ b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs
@@ -1,18 +1,16 @@
-use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_data_structures::vec_map::VecMap;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::OpaqueTyOrigin;
use rustc_infer::infer::TyCtxtInferExt as _;
use rustc_infer::infer::{DefiningAnchor, InferCtxt};
-use rustc_infer::traits::{Obligation, ObligationCause, TraitEngine};
+use rustc_infer::traits::{Obligation, ObligationCause};
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
use rustc_middle::ty::visit::TypeVisitable;
-use rustc_middle::ty::{
- self, OpaqueHiddenType, OpaqueTypeKey, ToPredicate, Ty, TyCtxt, TypeFoldable,
-};
+use rustc_middle::ty::{self, OpaqueHiddenType, OpaqueTypeKey, Ty, TyCtxt, TypeFoldable};
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
-use rustc_trait_selection::traits::TraitEngineExt as _;
+use rustc_trait_selection::traits::ObligationCtxt;
use super::RegionInferenceContext;
@@ -63,17 +61,21 @@ impl<'tcx> RegionInferenceContext<'tcx> {
opaque_ty_decls: VecMap<OpaqueTypeKey<'tcx>, (OpaqueHiddenType<'tcx>, OpaqueTyOrigin)>,
) -> VecMap<LocalDefId, OpaqueHiddenType<'tcx>> {
let mut result: VecMap<LocalDefId, OpaqueHiddenType<'tcx>> = VecMap::new();
+
+ let member_constraints: FxHashMap<_, _> = self
+ .member_constraints
+ .all_indices()
+ .map(|ci| (self.member_constraints[ci].key, ci))
+ .collect();
+ debug!(?member_constraints);
+
for (opaque_type_key, (concrete_type, origin)) in opaque_ty_decls {
let substs = opaque_type_key.substs;
debug!(?concrete_type, ?substs);
let mut subst_regions = vec![self.universal_regions.fr_static];
- let universal_substs = infcx.tcx.fold_regions(substs, |region, _| {
- if let ty::RePlaceholder(..) = region.kind() {
- // Higher kinded regions don't need remapping, they don't refer to anything outside of this the substs.
- return region;
- }
- let vid = self.to_region_vid(region);
+
+ let to_universal_region = |vid, subst_regions: &mut Vec<_>| {
trace!(?vid);
let scc = self.constraint_sccs.scc(vid);
trace!(?scc);
@@ -94,10 +96,33 @@ impl<'tcx> RegionInferenceContext<'tcx> {
infcx.tcx.lifetimes.re_static
}
}
+ };
+
+ // Start by inserting universal regions from the member_constraint choice regions.
+ // This will ensure they get precedence when folding the regions in the concrete type.
+ if let Some(&ci) = member_constraints.get(&opaque_type_key) {
+ for &vid in self.member_constraints.choice_regions(ci) {
+ to_universal_region(vid, &mut subst_regions);
+ }
+ }
+ debug!(?subst_regions);
+
+ // Next, insert universal regions from substs, so we can translate regions that appear
+ // in them but are not subject to member constraints, for instance closure substs.
+ let universal_substs = infcx.tcx.fold_regions(substs, |region, _| {
+ if let ty::RePlaceholder(..) = region.kind() {
+ // Higher kinded regions don't need remapping, they don't refer to anything outside of this the substs.
+ return region;
+ }
+ let vid = self.to_region_vid(region);
+ to_universal_region(vid, &mut subst_regions)
});
+ debug!(?universal_substs);
+ debug!(?subst_regions);
- subst_regions.sort();
- subst_regions.dedup();
+ // Deduplicate the set of regions while keeping the chosen order.
+ let subst_regions = subst_regions.into_iter().collect::<FxIndexSet<_>>();
+ debug!(?subst_regions);
let universal_concrete_type =
infcx.tcx.fold_regions(concrete_type, |region, _| match *region {
@@ -108,8 +133,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
.unwrap_or(infcx.tcx.lifetimes.re_erased),
_ => region,
});
-
- debug!(?universal_concrete_type, ?universal_substs);
+ debug!(?universal_concrete_type);
let opaque_type_key =
OpaqueTypeKey { def_id: opaque_type_key.def_id, substs: universal_substs };
@@ -221,12 +245,12 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
instantiated_ty: OpaqueHiddenType<'tcx>,
origin: OpaqueTyOrigin,
) -> Ty<'tcx> {
- if self.is_tainted_by_errors() {
- return self.tcx.ty_error();
+ if let Some(e) = self.tainted_by_errors() {
+ return self.tcx.ty_error_with_guaranteed(e);
}
let definition_ty = instantiated_ty
- .remap_generic_params_to_declaration_params(opaque_type_key, self.tcx, false, origin)
+ .remap_generic_params_to_declaration_params(opaque_type_key, self.tcx, false)
.ty;
if !check_opaque_type_parameter_valid(
@@ -252,48 +276,45 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
// type-alias-impl-trait/issue-67844-nested-opaque.rs
let infcx =
self.tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).build();
+ let ocx = ObligationCtxt::new(&infcx);
// Require the hidden type to be well-formed with only the generics of the opaque type.
// Defining use functions may have more bounds than the opaque type, which is ok, as long as the
// hidden type is well formed even without those bounds.
- let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(definition_ty.into()))
- .to_predicate(infcx.tcx);
- let mut fulfillment_cx = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
+ let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(definition_ty.into()));
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id.to_def_id());
// Require that the hidden type actually fulfills all the bounds of the opaque type, even without
// the bounds that the function supplies.
let opaque_ty = self.tcx.mk_opaque(def_id.to_def_id(), id_substs);
- match infcx
- .at(&ObligationCause::misc(instantiated_ty.span, body_id), param_env)
- .eq(opaque_ty, definition_ty)
- {
- Ok(infer_ok) => {
- for obligation in infer_ok.obligations {
- fulfillment_cx.register_predicate_obligation(&infcx, obligation);
- }
- }
- Err(err) => {
- infcx
- .err_ctxt()
- .report_mismatched_types(
- &ObligationCause::misc(instantiated_ty.span, body_id),
- opaque_ty,
- definition_ty,
- err,
- )
- .emit();
- }
+ if let Err(err) = ocx.eq(
+ &ObligationCause::misc(instantiated_ty.span, body_id),
+ param_env,
+ opaque_ty,
+ definition_ty,
+ ) {
+ infcx
+ .err_ctxt()
+ .report_mismatched_types(
+ &ObligationCause::misc(instantiated_ty.span, body_id),
+ opaque_ty,
+ definition_ty,
+ err,
+ )
+ .emit();
}
- fulfillment_cx.register_predicate_obligation(
- &infcx,
- Obligation::misc(instantiated_ty.span, body_id, param_env, predicate),
- );
+ ocx.register_obligation(Obligation::misc(
+ infcx.tcx,
+ instantiated_ty.span,
+ body_id,
+ param_env,
+ predicate,
+ ));
// Check that all obligations are satisfied by the implementation's
// version.
- let errors = fulfillment_cx.select_all_or_error(&infcx);
+ let errors = ocx.select_all_or_error();
// This is still required for many(half of the tests in ui/type-alias-impl-trait)
// tests to pass
@@ -302,8 +323,8 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
if errors.is_empty() {
definition_ty
} else {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
- self.tcx.ty_error()
+ let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None);
+ self.tcx.ty_error_with_guaranteed(reported)
}
}
}
diff --git a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs
index 1e6798eee..167f66460 100644
--- a/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs
+++ b/compiler/rustc_borrowck/src/region_infer/reverse_sccs.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use crate::constraints::ConstraintSccIndex;
use crate::RegionInferenceContext;
use itertools::Itertools;
diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs
index de20a4bb4..7498ddccf 100644
--- a/compiler/rustc_borrowck/src/region_infer/values.rs
+++ b/compiler/rustc_borrowck/src/region_infer/values.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_data_structures::fx::FxIndexSet;
use rustc_index::bit_set::SparseBitMatrix;
use rustc_index::interval::IntervalSet;
diff --git a/compiler/rustc_borrowck/src/renumber.rs b/compiler/rustc_borrowck/src/renumber.rs
index f30237690..084754830 100644
--- a/compiler/rustc_borrowck/src/renumber.rs
+++ b/compiler/rustc_borrowck/src/renumber.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_index::vec::IndexVec;
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
use rustc_middle::mir::visit::{MutVisitor, TyContext};
diff --git a/compiler/rustc_borrowck/src/session_diagnostics.rs b/compiler/rustc_borrowck/src/session_diagnostics.rs
index cff3089c3..577332c07 100644
--- a/compiler/rustc_borrowck/src/session_diagnostics.rs
+++ b/compiler/rustc_borrowck/src/session_diagnostics.rs
@@ -49,7 +49,7 @@ pub(crate) struct GenericDoesNotLiveLongEnough {
#[derive(LintDiagnostic)]
#[diag(borrowck_var_does_not_need_mut)]
pub(crate) struct VarNeedNotMut {
- #[suggestion_short(applicability = "machine-applicable", code = "")]
+ #[suggestion(style = "short", applicability = "machine-applicable", code = "")]
pub span: Span,
}
#[derive(Diagnostic)]
@@ -148,3 +148,95 @@ pub(crate) enum RequireStaticErr {
multi_span: MultiSpan,
},
}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum CaptureVarPathUseCause {
+ #[label(borrowck_borrow_due_to_use_generator)]
+ BorrowInGenerator {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_use_due_to_use_generator)]
+ UseInGenerator {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_assign_due_to_use_generator)]
+ AssignInGenerator {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_assign_part_due_to_use_generator)]
+ AssignPartInGenerator {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_borrow_due_to_use_closure)]
+ BorrowInClosure {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_use_due_to_use_closure)]
+ UseInClosure {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_assign_due_to_use_closure)]
+ AssignInClosure {
+ #[primary_span]
+ path_span: Span,
+ },
+ #[label(borrowck_assign_part_due_to_use_closure)]
+ AssignPartInClosure {
+ #[primary_span]
+ path_span: Span,
+ },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum CaptureVarKind {
+ #[label(borrowck_capture_immute)]
+ Immute {
+ #[primary_span]
+ kind_span: Span,
+ },
+ #[label(borrowck_capture_mut)]
+ Mut {
+ #[primary_span]
+ kind_span: Span,
+ },
+ #[label(borrowck_capture_move)]
+ Move {
+ #[primary_span]
+ kind_span: Span,
+ },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum CaptureVarCause {
+ #[label(borrowck_var_borrow_by_use_place_in_generator)]
+ BorrowUsePlaceGenerator {
+ place: String,
+ #[primary_span]
+ var_span: Span,
+ },
+ #[label(borrowck_var_borrow_by_use_place_in_closure)]
+ BorrowUsePlaceClosure {
+ place: String,
+ #[primary_span]
+ var_span: Span,
+ },
+}
+
+#[derive(Diagnostic)]
+#[diag(borrowck_cannot_move_when_borrowed, code = "E0505")]
+pub(crate) struct MoveBorrow<'a> {
+ pub place: &'a str,
+ pub borrow_place: &'a str,
+ pub value_place: &'a str,
+ #[primary_span]
+ #[label(move_label)]
+ pub span: Span,
+ #[label]
+ pub borrow_span: Span,
+}
diff --git a/compiler/rustc_borrowck/src/type_check/canonical.rs b/compiler/rustc_borrowck/src/type_check/canonical.rs
index a581726a1..3617bf58b 100644
--- a/compiler/rustc_borrowck/src/type_check/canonical.rs
+++ b/compiler/rustc_borrowck/src/type_check/canonical.rs
@@ -88,12 +88,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
category: ConstraintCategory<'tcx>,
) {
self.prove_predicate(
- ty::Binder::dummy(ty::PredicateKind::Trait(ty::TraitPredicate {
+ ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
trait_ref,
constness: ty::BoundConstness::NotConst,
polarity: ty::ImplPolarity::Positive,
- }))
- .to_predicate(self.tcx()),
+ }))),
locations,
category,
);
@@ -122,14 +121,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
pub(super) fn prove_predicates(
&mut self,
- predicates: impl IntoIterator<Item = impl ToPredicate<'tcx>>,
+ predicates: impl IntoIterator<Item = impl ToPredicate<'tcx> + std::fmt::Debug>,
locations: Locations,
category: ConstraintCategory<'tcx>,
) {
for predicate in predicates {
- let predicate = predicate.to_predicate(self.tcx());
- debug!("prove_predicates(predicate={:?}, locations={:?})", predicate, locations,);
-
self.prove_predicate(predicate, locations, category);
}
}
@@ -137,11 +133,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
#[instrument(skip(self), level = "debug")]
pub(super) fn prove_predicate(
&mut self,
- predicate: ty::Predicate<'tcx>,
+ predicate: impl ToPredicate<'tcx> + std::fmt::Debug,
locations: Locations,
category: ConstraintCategory<'tcx>,
) {
let param_env = self.param_env;
+ let predicate = predicate.to_predicate(self.tcx());
self.fully_perform_op(
locations,
category,
diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs
index d5bfc2f52..ce7f857e2 100644
--- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs
+++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs
@@ -1,10 +1,10 @@
-use rustc_infer::infer::canonical::QueryOutlivesConstraint;
+use rustc_hir::def_id::DefId;
use rustc_infer::infer::canonical::QueryRegionConstraints;
use rustc_infer::infer::outlives::env::RegionBoundPairs;
use rustc_infer::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
use rustc_infer::infer::region_constraints::{GenericKind, VerifyBound};
use rustc_infer::infer::{self, InferCtxt, SubregionOrigin};
-use rustc_middle::mir::ConstraintCategory;
+use rustc_middle::mir::{ClosureOutlivesSubject, ClosureRegionRequirements, ConstraintCategory};
use rustc_middle::ty::subst::GenericArgKind;
use rustc_middle::ty::TypeFoldable;
use rustc_middle::ty::{self, TyCtxt};
@@ -38,6 +38,7 @@ pub(crate) struct ConstraintConversion<'a, 'tcx> {
locations: Locations,
span: Span,
category: ConstraintCategory<'tcx>,
+ from_closure: bool,
constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
}
@@ -64,6 +65,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
span,
category,
constraints,
+ from_closure: false,
}
}
@@ -81,12 +83,62 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
}
self.constraints.member_constraints = tmp;
- for query_constraint in outlives {
- self.convert(query_constraint);
+ for (predicate, constraint_category) in outlives {
+ // At the moment, we never generate any "higher-ranked"
+ // region constraints like `for<'a> 'a: 'b`. At some point
+ // when we move to universes, we will, and this assertion
+ // will start to fail.
+ let predicate = predicate.no_bound_vars().unwrap_or_else(|| {
+ bug!("query_constraint {:?} contained bound vars", predicate,);
+ });
+
+ self.convert(predicate, *constraint_category);
}
}
- fn convert(&mut self, query_constraint: &QueryOutlivesConstraint<'tcx>) {
+ /// Given an instance of the closure type, this method instantiates the "extra" requirements
+ /// that we computed for the closure. This has the effect of adding new outlives obligations
+ /// to existing region variables in `closure_substs`.
+ #[instrument(skip(self), level = "debug")]
+ pub fn apply_closure_requirements(
+ &mut self,
+ closure_requirements: &ClosureRegionRequirements<'tcx>,
+ closure_def_id: DefId,
+ closure_substs: ty::SubstsRef<'tcx>,
+ ) {
+ // Extract the values of the free regions in `closure_substs`
+ // into a vector. These are the regions that we will be
+ // relating to one another.
+ let closure_mapping = &UniversalRegions::closure_mapping(
+ self.tcx,
+ closure_substs,
+ closure_requirements.num_external_vids,
+ closure_def_id.expect_local(),
+ );
+ debug!(?closure_mapping);
+
+ // Create the predicates.
+ let backup = (self.category, self.span, self.from_closure);
+ self.from_closure = true;
+ for outlives_requirement in &closure_requirements.outlives_requirements {
+ let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
+ let subject = match outlives_requirement.subject {
+ ClosureOutlivesSubject::Region(re) => closure_mapping[re].into(),
+ ClosureOutlivesSubject::Ty(ty) => ty.into(),
+ };
+
+ self.category = outlives_requirement.category;
+ self.span = outlives_requirement.blame_span;
+ self.convert(ty::OutlivesPredicate(subject, outlived_region), self.category);
+ }
+ (self.category, self.span, self.from_closure) = backup;
+ }
+
+ fn convert(
+ &mut self,
+ predicate: ty::OutlivesPredicate<ty::GenericArg<'tcx>, ty::Region<'tcx>>,
+ constraint_category: ConstraintCategory<'tcx>,
+ ) {
debug!("generate: constraints at: {:#?}", self.locations);
// Extract out various useful fields we'll need below.
@@ -94,17 +146,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
tcx, region_bound_pairs, implicit_region_bound, param_env, ..
} = *self;
- // At the moment, we never generate any "higher-ranked"
- // region constraints like `for<'a> 'a: 'b`. At some point
- // when we move to universes, we will, and this assertion
- // will start to fail.
- let ty::OutlivesPredicate(k1, r2) =
- query_constraint.0.no_bound_vars().unwrap_or_else(|| {
- bug!("query_constraint {:?} contained bound vars", query_constraint,);
- });
-
- let constraint_category = query_constraint.1;
-
+ let ty::OutlivesPredicate(k1, r2) = predicate;
match k1.unpack() {
GenericArgKind::Lifetime(r1) => {
let r1_vid = self.to_region_vid(r1);
@@ -127,10 +169,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
.type_must_outlive(origin, t1, r2, constraint_category);
}
- GenericArgKind::Const(_) => {
- // Consts cannot outlive one another, so we
- // don't need to handle any relations here.
- }
+ GenericArgKind::Const(_) => unreachable!(),
}
}
@@ -160,7 +199,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
verify_bound: VerifyBound<'tcx>,
) -> TypeTest<'tcx> {
let lower_bound = self.to_region_vid(region);
- TypeTest { generic_kind, lower_bound, locations: self.locations, verify_bound }
+ TypeTest { generic_kind, lower_bound, span: self.span, verify_bound }
}
fn to_region_vid(&mut self, r: ty::Region<'tcx>) -> ty::RegionVid {
@@ -188,6 +227,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
sub,
sup,
variance_info: ty::VarianceDiagInfo::default(),
+ from_closure: self.from_closure,
});
}
diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs
index 029095926..14cfc3613 100644
--- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs
+++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs
@@ -247,12 +247,13 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> {
.and(type_op::normalize::Normalize::new(ty))
.fully_perform(self.infcx)
.unwrap_or_else(|_| {
- self.infcx
+ let reported = self
+ .infcx
.tcx
.sess
.delay_span_bug(span, &format!("failed to normalize {:?}", ty));
TypeOpOutput {
- output: self.infcx.tcx.ty_error(),
+ output: self.infcx.tcx.ty_error_with_guaranteed(reported),
constraints: None,
error_info: None,
}
diff --git a/compiler/rustc_borrowck/src/type_check/input_output.rs b/compiler/rustc_borrowck/src/type_check/input_output.rs
index a66ddd27d..62c6f9581 100644
--- a/compiler/rustc_borrowck/src/type_check/input_output.rs
+++ b/compiler/rustc_borrowck/src/type_check/input_output.rs
@@ -42,8 +42,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
user_provided_sig = None;
} else {
let typeck_results = self.tcx().typeck(mir_def_id);
- user_provided_sig = typeck_results.user_provided_sigs.get(&mir_def_id.to_def_id()).map(
- |user_provided_poly_sig| {
+ user_provided_sig =
+ typeck_results.user_provided_sigs.get(&mir_def_id).map(|user_provided_poly_sig| {
// Instantiate the canonicalized variables from
// user-provided signature (e.g., the `_` in the code
// above) with fresh variables.
@@ -60,8 +60,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
LateBoundRegionConversionTime::FnCall,
poly_sig,
)
- },
- );
+ });
}
debug!(?normalized_input_tys, ?body.local_decls);
diff --git a/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs b/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs
index bc76a465e..b344ab46a 100644
--- a/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs
+++ b/compiler/rustc_borrowck/src/type_check/liveness/polonius.rs
@@ -121,8 +121,8 @@ pub(super) fn populate_access_facts<'a, 'tcx>(
}
}
-// For every potentially drop()-touched region `region` in `local`'s type
-// (`kind`), emit a Polonius `use_of_var_derefs_origin(local, origin)` fact.
+/// For every potentially drop()-touched region `region` in `local`'s type
+/// (`kind`), emit a Polonius `use_of_var_derefs_origin(local, origin)` fact.
pub(super) fn add_drop_of_var_derefs_origin<'tcx>(
typeck: &mut TypeChecker<'_, 'tcx>,
local: Local,
diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs
index 3c1c3ab45..6d4ec6b72 100644
--- a/compiler/rustc_borrowck/src/type_check/mod.rs
+++ b/compiler/rustc_borrowck/src/type_check/mod.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
//! This pass type-checks the MIR to ensure it is not broken.
use std::rc::Rc;
@@ -27,12 +29,11 @@ use rustc_middle::mir::AssertKind;
use rustc_middle::mir::*;
use rustc_middle::ty::adjustment::PointerCast;
use rustc_middle::ty::cast::CastTy;
-use rustc_middle::ty::subst::{GenericArgKind, SubstsRef, UserSubsts};
+use rustc_middle::ty::subst::{SubstsRef, UserSubsts};
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{
self, Binder, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations, Dynamic,
- OpaqueHiddenType, OpaqueTypeKey, RegionVid, ToPredicate, Ty, TyCtxt, UserType,
- UserTypeAnnotationIndex,
+ OpaqueHiddenType, OpaqueTypeKey, RegionVid, Ty, TyCtxt, UserType, UserTypeAnnotationIndex,
};
use rustc_span::def_id::CRATE_DEF_ID;
use rustc_span::{Span, DUMMY_SP};
@@ -61,7 +62,7 @@ use crate::{
region_infer::values::{
LivenessValues, PlaceholderIndex, PlaceholderIndices, RegionValueElements,
},
- region_infer::{ClosureRegionRequirementsExt, TypeTest},
+ region_infer::TypeTest,
type_check::free_region_relations::{CreateResult, UniversalRegionRelations},
universal_regions::{DefiningTy, UniversalRegions},
Upvar,
@@ -144,7 +145,6 @@ pub(crate) fn type_check<'mir, 'tcx>(
liveness_constraints: LivenessValues::new(elements.clone()),
outlives_constraints: OutlivesConstraintSet::default(),
member_constraints: MemberConstraintSet::default(),
- closure_bounds_mapping: Default::default(),
type_tests: Vec::default(),
universe_causes: FxHashMap::default(),
};
@@ -234,11 +234,11 @@ pub(crate) fn type_check<'mir, 'tcx>(
let mut hidden_type = infcx.resolve_vars_if_possible(decl.hidden_type);
trace!("finalized opaque type {:?} to {:#?}", opaque_type_key, hidden_type.ty.kind());
if hidden_type.has_non_region_infer() {
- infcx.tcx.sess.delay_span_bug(
+ let reported = infcx.tcx.sess.delay_span_bug(
decl.hidden_type.span,
&format!("could not resolve {:#?}", hidden_type.ty.kind()),
);
- hidden_type.ty = infcx.tcx.ty_error();
+ hidden_type.ty = infcx.tcx.ty_error_with_guaranteed(reported);
}
(opaque_type_key, (hidden_type, decl.origin))
@@ -547,10 +547,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
if let PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy) = context {
let tcx = self.tcx();
- let trait_ref = ty::TraitRef {
- def_id: tcx.require_lang_item(LangItem::Copy, Some(self.last_span)),
- substs: tcx.mk_substs_trait(place_ty.ty, &[]),
- };
+ let trait_ref = tcx.at(self.last_span).mk_trait_ref(LangItem::Copy, [place_ty.ty]);
// To have a `Copy` operand, the type `T` of the
// value must be `Copy`. Note that we prove that `T: Copy`,
@@ -584,8 +581,6 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
// modify their locations.
let all_facts = &mut None;
let mut constraints = Default::default();
- let mut type_tests = Default::default();
- let mut closure_bounds = Default::default();
let mut liveness_constraints =
LivenessValues::new(Rc::new(RegionValueElements::new(&promoted_body)));
// Don't try to add borrow_region facts for the promoted MIR
@@ -596,11 +591,6 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
&mut this.cx.borrowck_context.constraints.outlives_constraints,
&mut constraints,
);
- mem::swap(&mut this.cx.borrowck_context.constraints.type_tests, &mut type_tests);
- mem::swap(
- &mut this.cx.borrowck_context.constraints.closure_bounds_mapping,
- &mut closure_bounds,
- );
mem::swap(
&mut this.cx.borrowck_context.constraints.liveness_constraints,
&mut liveness_constraints,
@@ -621,13 +611,6 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
swap_constraints(self);
let locations = location.to_locations();
-
- // Use location of promoted const in collected constraints
- for type_test in type_tests.iter() {
- let mut type_test = type_test.clone();
- type_test.locations = locations;
- self.cx.borrowck_context.constraints.type_tests.push(type_test)
- }
for constraint in constraints.outlives().iter() {
let mut constraint = constraint.clone();
constraint.locations = locations;
@@ -653,18 +636,6 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
.add_element(region, location);
}
}
-
- if !closure_bounds.is_empty() {
- let combined_bounds_mapping =
- closure_bounds.into_iter().flat_map(|(_, value)| value).collect();
- let existing = self
- .cx
- .borrowck_context
- .constraints
- .closure_bounds_mapping
- .insert(location, combined_bounds_mapping);
- assert!(existing.is_none(), "Multiple promoteds/closures at the same location.");
- }
}
fn sanitize_projection(
@@ -941,9 +912,6 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> {
pub(crate) member_constraints: MemberConstraintSet<'tcx, RegionVid>,
- pub(crate) closure_bounds_mapping:
- FxHashMap<Location, FxHashMap<(RegionVid, RegionVid), (ConstraintCategory<'tcx>, Span)>>,
-
pub(crate) universe_causes: FxHashMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
pub(crate) type_tests: Vec<TypeTest<'tcx>>,
@@ -1097,8 +1065,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
self.prove_predicate(
- ty::Binder::dummy(ty::PredicateKind::WellFormed(inferred_ty.into()))
- .to_predicate(self.tcx()),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(inferred_ty.into())),
Locations::All(span),
ConstraintCategory::TypeAnnotation,
);
@@ -1222,8 +1189,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
fn check_stmt(&mut self, body: &Body<'tcx>, stmt: &Statement<'tcx>, location: Location) {
let tcx = self.tcx();
debug!("stmt kind: {:?}", stmt.kind);
- match stmt.kind {
- StatementKind::Assign(box (ref place, ref rv)) => {
+ match &stmt.kind {
+ StatementKind::Assign(box (place, rv)) => {
// Assignments to temporaries are not "interesting";
// they are not caused by the user, but rather artifacts
// of lowering. Assignments to other sorts of places *are* interesting
@@ -1303,10 +1270,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
self.check_rvalue(body, rv, location);
if !self.unsized_feature_enabled() {
- let trait_ref = ty::TraitRef {
- def_id: tcx.require_lang_item(LangItem::Sized, Some(self.last_span)),
- substs: tcx.mk_substs_trait(place_ty, &[]),
- };
+ let trait_ref =
+ tcx.at(self.last_span).mk_trait_ref(LangItem::Sized, [place_ty]);
self.prove_trait_ref(
trait_ref,
location.to_locations(),
@@ -1314,11 +1279,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}
}
- StatementKind::AscribeUserType(box (ref place, ref projection), variance) => {
+ StatementKind::AscribeUserType(box (place, projection), variance) => {
let place_ty = place.ty(body, tcx).ty;
if let Err(terr) = self.relate_type_and_user_type(
place_ty,
- variance,
+ *variance,
projection,
Locations::All(stmt.source_info.span),
ConstraintCategory::TypeAnnotation,
@@ -1335,7 +1300,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}
}
- StatementKind::Intrinsic(box ref kind) => match kind {
+ StatementKind::Intrinsic(box kind) => match kind {
NonDivergingIntrinsic::Assume(op) => self.check_operand(op, location),
NonDivergingIntrinsic::CopyNonOverlapping(..) => span_bug!(
stmt.source_info.span,
@@ -1363,7 +1328,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
) {
let tcx = self.tcx();
debug!("terminator kind: {:?}", term.kind);
- match term.kind {
+ match &term.kind {
TerminatorKind::Goto { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort
@@ -1377,7 +1342,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// no checks needed for these
}
- TerminatorKind::DropAndReplace { ref place, ref value, target: _, unwind: _ } => {
+ TerminatorKind::DropAndReplace { place, value, target: _, unwind: _ } => {
let place_ty = place.ty(body, tcx).ty;
let rv_ty = value.ty(body, tcx);
@@ -1395,13 +1360,13 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}
}
- TerminatorKind::SwitchInt { ref discr, switch_ty, .. } => {
+ TerminatorKind::SwitchInt { discr, switch_ty, .. } => {
self.check_operand(discr, term_location);
let discr_ty = discr.ty(body, tcx);
if let Err(terr) = self.sub_types(
discr_ty,
- switch_ty,
+ *switch_ty,
term_location.to_locations(),
ConstraintCategory::Assignment,
) {
@@ -1419,14 +1384,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
// FIXME: check the values
}
- TerminatorKind::Call {
- ref func,
- ref args,
- ref destination,
- from_hir_call,
- target,
- ..
- } => {
+ TerminatorKind::Call { func, args, destination, from_hir_call, target, .. } => {
self.check_operand(func, term_location);
for arg in args {
self.check_operand(arg, term_location);
@@ -1466,7 +1424,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
ConstraintCategory::Boring,
);
let sig = self.normalize(sig, term_location);
- self.check_call_dest(body, term, &sig, *destination, target, term_location);
+ self.check_call_dest(body, term, &sig, *destination, *target, term_location);
// The ordinary liveness rules will ensure that all
// regions in the type of the callee are live here. We
@@ -1484,9 +1442,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
.add_element(region_vid, term_location);
}
- self.check_call_inputs(body, term, &sig, args, term_location, from_hir_call);
+ self.check_call_inputs(body, term, &sig, args, term_location, *from_hir_call);
}
- TerminatorKind::Assert { ref cond, ref msg, .. } => {
+ TerminatorKind::Assert { cond, msg, .. } => {
self.check_operand(cond, term_location);
let cond_ty = cond.ty(body, tcx);
@@ -1494,7 +1452,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
span_mirbug!(self, term, "bad Assert ({:?}, not bool", cond_ty);
}
- if let AssertKind::BoundsCheck { ref len, ref index } = *msg {
+ if let AssertKind::BoundsCheck { len, index } = msg {
if len.ty(body, tcx) != tcx.types.usize {
span_mirbug!(self, len, "bounds-check length non-usize {:?}", len)
}
@@ -1503,7 +1461,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
}
}
- TerminatorKind::Yield { ref value, .. } => {
+ TerminatorKind::Yield { value, .. } => {
self.check_operand(value, term_location);
let value_ty = value.ty(body, tcx);
@@ -1594,10 +1552,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
}
None => {
- if !self
- .tcx()
- .conservative_is_privately_uninhabited(self.param_env.and(sig.output()))
- {
+ if !sig.output().is_privately_uninhabited(self.tcx(), self.param_env) {
span_mirbug!(self, term, "call to converging function {:?} w/o dest", sig);
}
}
@@ -1873,6 +1828,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
#[instrument(skip(self, body), level = "debug")]
fn check_rvalue(&mut self, body: &Body<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
let tcx = self.tcx();
+ let span = body.source_info(location).span;
match rvalue {
Rvalue::Aggregate(ak, ops) => {
@@ -1896,12 +1852,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
Operand::Move(place) => {
// Make sure that repeated elements implement `Copy`.
- let span = body.source_info(location).span;
let ty = place.ty(body, tcx).ty;
- let trait_ref = ty::TraitRef::new(
- tcx.require_lang_item(LangItem::Copy, Some(span)),
- tcx.mk_substs_trait(ty, &[]),
- );
+ let trait_ref = tcx.at(span).mk_trait_ref(LangItem::Copy, [ty]);
self.prove_trait_ref(
trait_ref,
@@ -1914,10 +1866,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
&Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, ty) => {
- let trait_ref = ty::TraitRef {
- def_id: tcx.require_lang_item(LangItem::Sized, Some(self.last_span)),
- substs: tcx.mk_substs_trait(ty, &[]),
- };
+ let trait_ref = tcx.at(span).mk_trait_ref(LangItem::Sized, [ty]);
self.prove_trait_ref(
trait_ref,
@@ -1929,10 +1878,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
Rvalue::ShallowInitBox(operand, ty) => {
self.check_operand(operand, location);
- let trait_ref = ty::TraitRef {
- def_id: tcx.require_lang_item(LangItem::Sized, Some(self.last_span)),
- substs: tcx.mk_substs_trait(*ty, &[]),
- };
+ let trait_ref = tcx.at(span).mk_trait_ref(LangItem::Sized, [*ty]);
self.prove_trait_ref(
trait_ref,
@@ -2029,11 +1975,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
CastKind::Pointer(PointerCast::Unsize) => {
let &ty = ty;
- let trait_ref = ty::TraitRef {
- def_id: tcx
- .require_lang_item(LangItem::CoerceUnsized, Some(self.last_span)),
- substs: tcx.mk_substs_trait(op.ty(body, tcx), &[ty.into()]),
- };
+ let trait_ref = tcx
+ .at(span)
+ .mk_trait_ref(LangItem::CoerceUnsized, [op.ty(body, tcx), ty]);
self.prove_trait_ref(
trait_ref,
@@ -2062,8 +2006,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
let outlives_predicate =
- tcx.mk_predicate(Binder::dummy(ty::PredicateKind::TypeOutlives(
- ty::OutlivesPredicate(self_ty, *region),
+ tcx.mk_predicate(Binder::dummy(ty::PredicateKind::Clause(
+ ty::Clause::TypeOutlives(ty::OutlivesPredicate(self_ty, *region)),
)));
self.prove_predicate(
outlives_predicate,
@@ -2562,6 +2506,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
span: location.to_locations().span(body),
category,
variance_info: ty::VarianceDiagInfo::default(),
+ from_closure: false,
});
match mutbl {
@@ -2636,7 +2581,6 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
// For closures, we have some **extra requirements** we
- //
// have to check. In particular, in their upvars and
// signatures, closures often reference various regions
// from the surrounding function -- we call those the
@@ -2679,62 +2623,22 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
substs: SubstsRef<'tcx>,
location: Location,
) -> ty::InstantiatedPredicates<'tcx> {
- if let Some(ref closure_region_requirements) = tcx.mir_borrowck(def_id).closure_requirements
- {
- let closure_constraints = QueryRegionConstraints {
- outlives: closure_region_requirements.apply_requirements(
- tcx,
- def_id.to_def_id(),
- substs,
- ),
-
- // Presently, closures never propagate member
- // constraints to their parents -- they are enforced
- // locally. This is largely a non-issue as member
- // constraints only come from `-> impl Trait` and
- // friends which don't appear (thus far...) in
- // closures.
- member_constraints: vec![],
- };
-
- let bounds_mapping = closure_constraints
- .outlives
- .iter()
- .enumerate()
- .filter_map(|(idx, constraint)| {
- let ty::OutlivesPredicate(k1, r2) =
- constraint.0.no_bound_vars().unwrap_or_else(|| {
- bug!("query_constraint {:?} contained bound vars", constraint,);
- });
-
- match k1.unpack() {
- GenericArgKind::Lifetime(r1) => {
- // constraint is r1: r2
- let r1_vid = self.borrowck_context.universal_regions.to_region_vid(r1);
- let r2_vid = self.borrowck_context.universal_regions.to_region_vid(r2);
- let outlives_requirements =
- &closure_region_requirements.outlives_requirements[idx];
- Some((
- (r1_vid, r2_vid),
- (outlives_requirements.category, outlives_requirements.blame_span),
- ))
- }
- GenericArgKind::Type(_) | GenericArgKind::Const(_) => None,
- }
- })
- .collect();
-
- let existing = self
- .borrowck_context
- .constraints
- .closure_bounds_mapping
- .insert(location, bounds_mapping);
- assert!(existing.is_none(), "Multiple closures at the same location.");
-
- self.push_region_constraints(
+ if let Some(closure_requirements) = &tcx.mir_borrowck(def_id).closure_requirements {
+ constraint_conversion::ConstraintConversion::new(
+ self.infcx,
+ self.borrowck_context.universal_regions,
+ self.region_bound_pairs,
+ self.implicit_region_bound,
+ self.param_env,
location.to_locations(),
- ConstraintCategory::ClosureBounds,
- &closure_constraints,
+ DUMMY_SP, // irrelevant; will be overrided.
+ ConstraintCategory::Boring, // same as above.
+ &mut self.borrowck_context.constraints,
+ )
+ .apply_closure_requirements(
+ &closure_requirements,
+ def_id.to_def_id(),
+ substs,
);
}
diff --git a/compiler/rustc_borrowck/src/type_check/relate_tys.rs b/compiler/rustc_borrowck/src/type_check/relate_tys.rs
index 4f2dc263b..b2702eafd 100644
--- a/compiler/rustc_borrowck/src/type_check/relate_tys.rs
+++ b/compiler/rustc_borrowck/src/type_check/relate_tys.rs
@@ -2,9 +2,8 @@ use rustc_infer::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRe
use rustc_infer::infer::NllRegionVariableOrigin;
use rustc_infer::traits::PredicateObligations;
use rustc_middle::mir::ConstraintCategory;
-use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::relate::TypeRelation;
-use rustc_middle::ty::{self, Const, Ty};
+use rustc_middle::ty::{self, Ty};
use rustc_span::Span;
use rustc_trait_selection::traits::query::Fallible;
@@ -136,17 +135,11 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx>
span: self.locations.span(self.type_checker.body),
category: self.category,
variance_info: info,
+ from_closure: false,
},
);
}
- // We don't have to worry about the equality of consts during borrow checking
- // as consts always have a static lifetime.
- // FIXME(oli-obk): is this really true? We can at least have HKL and with
- // inline consts we may have further lifetimes that may be unsound to treat as
- // 'static.
- fn const_equate(&mut self, _a: Const<'tcx>, _b: Const<'tcx>) {}
-
fn normalization() -> NormalizationStrategy {
NormalizationStrategy::Eager
}
@@ -155,10 +148,7 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx>
true
}
- fn register_opaque_type_obligations(
- &mut self,
- obligations: PredicateObligations<'tcx>,
- ) -> Result<(), TypeError<'tcx>> {
+ fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) {
self.type_checker
.fully_perform_op(
self.locations,
@@ -171,6 +161,5 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx>
},
)
.unwrap();
- Ok(())
}
}
diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs
index 2beb5e0ab..a4a0c5b90 100644
--- a/compiler/rustc_borrowck/src/universal_regions.rs
+++ b/compiler/rustc_borrowck/src/universal_regions.rs
@@ -22,7 +22,9 @@ use rustc_hir::{BodyOwnerKind, HirId};
use rustc_index::vec::{Idx, IndexVec};
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
use rustc_middle::ty::fold::TypeFoldable;
-use rustc_middle::ty::{self, InlineConstSubsts, InlineConstSubstsParts, RegionVid, Ty, TyCtxt};
+use rustc_middle::ty::{
+ self, DefIdTree, InlineConstSubsts, InlineConstSubstsParts, RegionVid, Ty, TyCtxt,
+};
use rustc_middle::ty::{InternalSubsts, SubstsRef};
use std::iter;
@@ -241,7 +243,7 @@ impl<'tcx> UniversalRegions<'tcx> {
tcx: TyCtxt<'tcx>,
closure_substs: SubstsRef<'tcx>,
expected_num_vars: usize,
- typeck_root_def_id: DefId,
+ closure_def_id: LocalDefId,
) -> IndexVec<RegionVid, ty::Region<'tcx>> {
let mut region_mapping = IndexVec::with_capacity(expected_num_vars);
region_mapping.push(tcx.lifetimes.re_static);
@@ -249,7 +251,7 @@ impl<'tcx> UniversalRegions<'tcx> {
region_mapping.push(fr);
});
- for_each_late_bound_region_defined_on(tcx, typeck_root_def_id, |r| {
+ for_each_late_bound_region_in_recursive_scope(tcx, tcx.local_parent(closure_def_id), |r| {
region_mapping.push(r);
});
@@ -339,9 +341,8 @@ impl<'tcx> UniversalRegions<'tcx> {
// tests, and the resulting print-outs include def-ids
// and other things that are not stable across tests!
// So we just include the region-vid. Annoying.
- let typeck_root_def_id = tcx.typeck_root_def_id(def_id);
- for_each_late_bound_region_defined_on(tcx, typeck_root_def_id, |r| {
- err.note(&format!("late-bound region is {:?}", self.to_region_vid(r),));
+ for_each_late_bound_region_in_recursive_scope(tcx, def_id.expect_local(), |r| {
+ err.note(&format!("late-bound region is {:?}", self.to_region_vid(r)));
});
}
DefiningTy::Generator(def_id, substs, _) => {
@@ -354,9 +355,8 @@ impl<'tcx> UniversalRegions<'tcx> {
// FIXME: As above, we'd like to print out the region
// `r` but doing so is not stable across architectures
// and so forth.
- let typeck_root_def_id = tcx.typeck_root_def_id(def_id);
- for_each_late_bound_region_defined_on(tcx, typeck_root_def_id, |r| {
- err.note(&format!("late-bound region is {:?}", self.to_region_vid(r),));
+ for_each_late_bound_region_in_recursive_scope(tcx, def_id.expect_local(), |r| {
+ err.note(&format!("late-bound region is {:?}", self.to_region_vid(r)));
});
}
DefiningTy::FnDef(def_id, substs) => {
@@ -421,13 +421,24 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
first_extern_index
} else {
// If this is a closure, generator, or inline-const, then the late-bound regions from the enclosing
- // function are actually external regions to us. For example, here, 'a is not local
+ // function/closures are actually external regions to us. For example, here, 'a is not local
// to the closure c (although it is local to the fn foo):
// fn foo<'a>() {
// let c = || { let x: &'a u32 = ...; }
// }
- self.infcx
- .replace_late_bound_regions_with_nll_infer_vars(self.mir_def.did, &mut indices);
+ for_each_late_bound_region_in_recursive_scope(
+ self.infcx.tcx,
+ self.infcx.tcx.local_parent(self.mir_def.did),
+ |r| {
+ debug!(?r);
+ if !indices.indices.contains_key(&r) {
+ let region_vid = self.infcx.next_nll_region_var(FR);
+ debug!(?region_vid);
+ indices.insert_late_bound_region(r, region_vid.to_region_vid());
+ }
+ },
+ );
+
// Any regions created during the execution of `defining_ty` or during the above
// late-bound region replacement are all considered 'extern' regions
self.infcx.num_region_vars()
@@ -444,12 +455,16 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
bound_inputs_and_output,
&mut indices,
);
- // Converse of above, if this is a function then the late-bound regions declared on its
- // signature are local to the fn.
- if self.mir_def.did.to_def_id() == typeck_root_def_id {
- self.infcx
- .replace_late_bound_regions_with_nll_infer_vars(self.mir_def.did, &mut indices);
- }
+ // Converse of above, if this is a function/closure then the late-bound regions declared on its
+ // signature are local.
+ for_each_late_bound_region_in_item(self.infcx.tcx, self.mir_def.did, |r| {
+ debug!(?r);
+ if !indices.indices.contains_key(&r) {
+ let region_vid = self.infcx.next_nll_region_var(FR);
+ debug!(?region_vid);
+ indices.insert_late_bound_region(r, region_vid.to_region_vid());
+ }
+ });
let (unnormalized_output_ty, mut unnormalized_input_tys) =
inputs_and_output.split_last().unwrap();
@@ -572,9 +587,9 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
let typeck_root_def_id = tcx.typeck_root_def_id(self.mir_def.did.to_def_id());
let identity_substs = InternalSubsts::identity_for_item(tcx, typeck_root_def_id);
let fr_substs = match defining_ty {
- DefiningTy::Closure(_, ref substs)
- | DefiningTy::Generator(_, ref substs, _)
- | DefiningTy::InlineConst(_, ref substs) => {
+ DefiningTy::Closure(_, substs)
+ | DefiningTy::Generator(_, substs, _)
+ | DefiningTy::InlineConst(_, substs) => {
// In the case of closures, we rely on the fact that
// the first N elements in the ClosureSubsts are
// inherited from the `typeck_root_def_id`.
@@ -692,7 +707,13 @@ trait InferCtxtExt<'tcx> {
where
T: TypeFoldable<'tcx>;
- fn replace_late_bound_regions_with_nll_infer_vars(
+ fn replace_late_bound_regions_with_nll_infer_vars_in_recursive_scope(
+ &self,
+ mir_def_id: LocalDefId,
+ indices: &mut UniversalRegionIndices<'tcx>,
+ );
+
+ fn replace_late_bound_regions_with_nll_infer_vars_in_item(
&self,
mir_def_id: LocalDefId,
indices: &mut UniversalRegionIndices<'tcx>,
@@ -746,13 +767,28 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
/// set of late-bound regions and checks for any that we have not yet seen, adding them to the
/// inputs vector.
#[instrument(skip(self, indices))]
- fn replace_late_bound_regions_with_nll_infer_vars(
+ fn replace_late_bound_regions_with_nll_infer_vars_in_recursive_scope(
+ &self,
+ mir_def_id: LocalDefId,
+ indices: &mut UniversalRegionIndices<'tcx>,
+ ) {
+ for_each_late_bound_region_in_recursive_scope(self.tcx, mir_def_id, |r| {
+ debug!(?r);
+ if !indices.indices.contains_key(&r) {
+ let region_vid = self.next_nll_region_var(FR);
+ debug!(?region_vid);
+ indices.insert_late_bound_region(r, region_vid.to_region_vid());
+ }
+ });
+ }
+
+ #[instrument(skip(self, indices))]
+ fn replace_late_bound_regions_with_nll_infer_vars_in_item(
&self,
mir_def_id: LocalDefId,
indices: &mut UniversalRegionIndices<'tcx>,
) {
- let typeck_root_def_id = self.tcx.typeck_root_def_id(mir_def_id.to_def_id());
- for_each_late_bound_region_defined_on(self.tcx, typeck_root_def_id, |r| {
+ for_each_late_bound_region_in_item(self.tcx, mir_def_id, |r| {
debug!(?r);
if !indices.indices.contains_key(&r) {
let region_vid = self.next_nll_region_var(FR);
@@ -803,21 +839,44 @@ impl<'tcx> UniversalRegionIndices<'tcx> {
}
}
-/// Iterates over the late-bound regions defined on fn_def_id and
-/// invokes `f` with the liberated form of each one.
-fn for_each_late_bound_region_defined_on<'tcx>(
+/// Iterates over the late-bound regions defined on `mir_def_id` and all of its
+/// parents, up to the typeck root, and invokes `f` with the liberated form
+/// of each one.
+fn for_each_late_bound_region_in_recursive_scope<'tcx>(
tcx: TyCtxt<'tcx>,
- fn_def_id: DefId,
+ mut mir_def_id: LocalDefId,
mut f: impl FnMut(ty::Region<'tcx>),
) {
- if let Some(late_bounds) = tcx.is_late_bound_map(fn_def_id.expect_local()) {
- for &region_def_id in late_bounds.iter() {
- let name = tcx.item_name(region_def_id.to_def_id());
- let liberated_region = tcx.mk_region(ty::ReFree(ty::FreeRegion {
- scope: fn_def_id,
- bound_region: ty::BoundRegionKind::BrNamed(region_def_id.to_def_id(), name),
- }));
- f(liberated_region);
+ let typeck_root_def_id = tcx.typeck_root_def_id(mir_def_id.to_def_id());
+
+ // Walk up the tree, collecting late-bound regions until we hit the typeck root
+ loop {
+ for_each_late_bound_region_in_item(tcx, mir_def_id, &mut f);
+
+ if mir_def_id.to_def_id() == typeck_root_def_id {
+ break;
+ } else {
+ mir_def_id = tcx.local_parent(mir_def_id);
}
}
}
+
+/// Iterates over the late-bound regions defined on `mir_def_id` and all of its
+/// parents, up to the typeck root, and invokes `f` with the liberated form
+/// of each one.
+fn for_each_late_bound_region_in_item<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ mir_def_id: LocalDefId,
+ mut f: impl FnMut(ty::Region<'tcx>),
+) {
+ if !tcx.def_kind(mir_def_id).is_fn_like() {
+ return;
+ }
+
+ for bound_var in tcx.late_bound_vars(tcx.hir().local_def_id_to_hir_id(mir_def_id)) {
+ let ty::BoundVariableKind::Region(bound_region) = bound_var else { continue; };
+ let liberated_region = tcx
+ .mk_region(ty::ReFree(ty::FreeRegion { scope: mir_def_id.to_def_id(), bound_region }));
+ f(liberated_region);
+ }
+}
diff --git a/compiler/rustc_borrowck/src/used_muts.rs b/compiler/rustc_borrowck/src/used_muts.rs
index 8833753b1..e297b1230 100644
--- a/compiler/rustc_borrowck/src/used_muts.rs
+++ b/compiler/rustc_borrowck/src/used_muts.rs
@@ -1,3 +1,5 @@
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
use rustc_data_structures::fx::FxHashSet;
use rustc_middle::mir::visit::{PlaceContext, Visitor};
use rustc_middle::mir::{
diff --git a/compiler/rustc_builtin_macros/Cargo.toml b/compiler/rustc_builtin_macros/Cargo.toml
index 6469d0d7b..467fa932a 100644
--- a/compiler/rustc_builtin_macros/Cargo.toml
+++ b/compiler/rustc_builtin_macros/Cargo.toml
@@ -23,5 +23,5 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
diff --git a/compiler/rustc_builtin_macros/src/alloc_error_handler.rs b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs
new file mode 100644
index 000000000..95e38e4b0
--- /dev/null
+++ b/compiler/rustc_builtin_macros/src/alloc_error_handler.rs
@@ -0,0 +1,93 @@
+use crate::util::check_builtin_macro_attribute;
+
+use rustc_ast::ptr::P;
+use rustc_ast::{self as ast, FnHeader, FnSig, Generics, StmtKind};
+use rustc_ast::{Fn, ItemKind, Stmt, TyKind, Unsafe};
+use rustc_expand::base::{Annotatable, ExtCtxt};
+use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::Span;
+use thin_vec::thin_vec;
+
+pub fn expand(
+ ecx: &mut ExtCtxt<'_>,
+ _span: Span,
+ meta_item: &ast::MetaItem,
+ item: Annotatable,
+) -> Vec<Annotatable> {
+ check_builtin_macro_attribute(ecx, meta_item, sym::alloc_error_handler);
+
+ let orig_item = item.clone();
+
+ // Allow using `#[alloc_error_handler]` on an item statement
+ // FIXME - if we get deref patterns, use them to reduce duplication here
+ let (item, is_stmt, sig_span) =
+ if let Annotatable::Item(item) = &item
+ && let ItemKind::Fn(fn_kind) = &item.kind
+ {
+ (item, false, ecx.with_def_site_ctxt(fn_kind.sig.span))
+ } else if let Annotatable::Stmt(stmt) = &item
+ && let StmtKind::Item(item) = &stmt.kind
+ && let ItemKind::Fn(fn_kind) = &item.kind
+ {
+ (item, true, ecx.with_def_site_ctxt(fn_kind.sig.span))
+ } else {
+ ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "alloc_error_handler must be a function");
+ return vec![orig_item.clone()];
+ };
+
+ // Generate a bunch of new items using the AllocFnFactory
+ let span = ecx.with_def_site_ctxt(item.span);
+
+ // Generate item statements for the allocator methods.
+ let stmts = vec![generate_handler(ecx, item.ident, span, sig_span)];
+
+ // Generate anonymous constant serving as container for the allocator methods.
+ let const_ty = ecx.ty(sig_span, TyKind::Tup(Vec::new()));
+ let const_body = ecx.expr_block(ecx.block(span, stmts));
+ let const_item = ecx.item_const(span, Ident::new(kw::Underscore, span), const_ty, const_body);
+ let const_item = if is_stmt {
+ Annotatable::Stmt(P(ecx.stmt_item(span, const_item)))
+ } else {
+ Annotatable::Item(const_item)
+ };
+
+ // Return the original item and the new methods.
+ vec![orig_item, const_item]
+}
+
+// #[rustc_std_internal_symbol]
+// unsafe fn __rg_oom(size: usize, align: usize) -> ! {
+// handler(core::alloc::Layout::from_size_align_unchecked(size, align))
+// }
+fn generate_handler(cx: &ExtCtxt<'_>, handler: Ident, span: Span, sig_span: Span) -> Stmt {
+ let usize = cx.path_ident(span, Ident::new(sym::usize, span));
+ let ty_usize = cx.ty_path(usize);
+ let size = Ident::from_str_and_span("size", span);
+ let align = Ident::from_str_and_span("align", span);
+
+ let layout_new = cx.std_path(&[sym::alloc, sym::Layout, sym::from_size_align_unchecked]);
+ let layout_new = cx.expr_path(cx.path(span, layout_new));
+ let layout =
+ cx.expr_call(span, layout_new, vec![cx.expr_ident(span, size), cx.expr_ident(span, align)]);
+
+ let call = cx.expr_call_ident(sig_span, handler, vec![layout]);
+
+ let never = ast::FnRetTy::Ty(cx.ty(span, TyKind::Never));
+ let params = vec![cx.param(span, size, ty_usize.clone()), cx.param(span, align, ty_usize)];
+ let decl = cx.fn_decl(params, never);
+ let header = FnHeader { unsafety: Unsafe::Yes(span), ..FnHeader::default() };
+ let sig = FnSig { decl, header, span: span };
+
+ let body = Some(cx.block_expr(call));
+ let kind = ItemKind::Fn(Box::new(Fn {
+ defaultness: ast::Defaultness::Final,
+ sig,
+ generics: Generics::default(),
+ body,
+ }));
+
+ let attrs = thin_vec![cx.attr_word(sym::rustc_std_internal_symbol, span)];
+
+ let item = cx.item(span, Ident::from_str_and_span("__rg_oom", span), attrs, kind);
+ cx.stmt_item(sig_span, item)
+}
diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs
index a1051d990..900c44274 100644
--- a/compiler/rustc_builtin_macros/src/asm.rs
+++ b/compiler/rustc_builtin_macros/src/asm.rs
@@ -172,7 +172,11 @@ pub fn parse_asm_args<'a>(
// If it can't possibly expand to a string, provide diagnostics here to include other
// things it could have been.
match template.kind {
- ast::ExprKind::Lit(ast::Lit { kind: ast::LitKind::Str(..), .. }) => {}
+ ast::ExprKind::Lit(token_lit)
+ if matches!(
+ token_lit.kind,
+ token::LitKind::Str | token::LitKind::StrRaw(_)
+ ) => {}
ast::ExprKind::MacCall(..) => {}
_ => {
let errstr = if is_global_asm {
@@ -560,7 +564,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
let template_snippet = ecx.source_map().span_to_snippet(template_sp).ok();
template_strs.push((
template_str,
- template_snippet.as_ref().map(|s| Symbol::intern(s)),
+ template_snippet.as_deref().map(Symbol::intern),
template_sp,
));
let template_str = template_str.as_str();
diff --git a/compiler/rustc_builtin_macros/src/assert.rs b/compiler/rustc_builtin_macros/src/assert.rs
index 119724b50..8555c3593 100644
--- a/compiler/rustc_builtin_macros/src/assert.rs
+++ b/compiler/rustc_builtin_macros/src/assert.rs
@@ -4,7 +4,7 @@ use crate::edition_panic::use_panic_2021;
use rustc_ast::ptr::P;
use rustc_ast::token;
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
-use rustc_ast::{Expr, ExprKind, MacArgs, MacCall, MacDelimiter, Path, PathSegment, UnOp};
+use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, MacDelimiter, Path, PathSegment, UnOp};
use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, PResult};
use rustc_expand::base::{DummyResult, ExtCtxt, MacEager, MacResult};
@@ -54,11 +54,11 @@ pub fn expand_assert<'cx>(
call_site_span,
ExprKind::MacCall(P(MacCall {
path: panic_path(),
- args: P(MacArgs::Delimited(
- DelimSpan::from_single(call_site_span),
- MacDelimiter::Parenthesis,
+ args: P(DelimArgs {
+ dspan: DelimSpan::from_single(call_site_span),
+ delim: MacDelimiter::Parenthesis,
tokens,
- )),
+ }),
prior_type_ascription: None,
})),
);
diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs
index bb6839360..93b07801e 100644
--- a/compiler/rustc_builtin_macros/src/assert/context.rs
+++ b/compiler/rustc_builtin_macros/src/assert/context.rs
@@ -1,10 +1,9 @@
use rustc_ast::{
- attr,
ptr::P,
token,
tokenstream::{DelimSpan, TokenStream, TokenTree},
- BinOpKind, BorrowKind, Expr, ExprKind, ItemKind, MacArgs, MacCall, MacDelimiter, Mutability,
- Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
+ BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MacDelimiter, MethodCall,
+ Mutability, Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashSet;
@@ -107,7 +106,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
(
UseTree {
prefix: this.cx.path(this.span, vec![Ident::with_dummy_span(sym)]),
- kind: UseTreeKind::Simple(None, DUMMY_NODE_ID, DUMMY_NODE_ID),
+ kind: UseTreeKind::Simple(None),
span: this.span,
},
DUMMY_NODE_ID,
@@ -118,10 +117,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
self.cx.item(
self.span,
Ident::empty(),
- thin_vec![self.cx.attribute(attr::mk_list_item(
- Ident::new(sym::allow, self.span),
- vec![attr::mk_nested_word_item(Ident::new(sym::unused_imports, self.span))],
- ))],
+ thin_vec![self.cx.attr_nested_word(sym::allow, sym::unused_imports, self.span)],
ItemKind::Use(UseTree {
prefix: self.cx.path(self.span, self.cx.std_path(&[sym::asserting])),
kind: UseTreeKind::Nested(vec![
@@ -181,11 +177,11 @@ impl<'cx, 'a> Context<'cx, 'a> {
self.span,
ExprKind::MacCall(P(MacCall {
path: panic_path,
- args: P(MacArgs::Delimited(
- DelimSpan::from_single(self.span),
- MacDelimiter::Parenthesis,
- initial.into_iter().chain(captures).collect::<TokenStream>(),
- )),
+ args: P(DelimArgs {
+ dspan: DelimSpan::from_single(self.span),
+ delim: MacDelimiter::Parenthesis,
+ tokens: initial.into_iter().chain(captures).collect::<TokenStream>(),
+ }),
prior_type_ascription: None,
})),
)
@@ -195,19 +191,19 @@ impl<'cx, 'a> Context<'cx, 'a> {
///
/// See [Self::manage_initial_capture] and [Self::manage_try_capture]
fn manage_cond_expr(&mut self, expr: &mut P<Expr>) {
- match (*expr).kind {
- ExprKind::AddrOf(_, mutability, ref mut local_expr) => {
+ match &mut expr.kind {
+ ExprKind::AddrOf(_, mutability, local_expr) => {
self.with_is_consumed_management(
matches!(mutability, Mutability::Mut),
|this| this.manage_cond_expr(local_expr)
);
}
- ExprKind::Array(ref mut local_exprs) => {
+ ExprKind::Array(local_exprs) => {
for local_expr in local_exprs {
self.manage_cond_expr(local_expr);
}
}
- ExprKind::Binary(ref op, ref mut lhs, ref mut rhs) => {
+ ExprKind::Binary(op, lhs, rhs) => {
self.with_is_consumed_management(
matches!(
op.node,
@@ -230,56 +226,56 @@ impl<'cx, 'a> Context<'cx, 'a> {
}
);
}
- ExprKind::Call(_, ref mut local_exprs) => {
+ ExprKind::Call(_, local_exprs) => {
for local_expr in local_exprs {
self.manage_cond_expr(local_expr);
}
}
- ExprKind::Cast(ref mut local_expr, _) => {
+ ExprKind::Cast(local_expr, _) => {
self.manage_cond_expr(local_expr);
}
- ExprKind::Index(ref mut prefix, ref mut suffix) => {
+ ExprKind::Index(prefix, suffix) => {
self.manage_cond_expr(prefix);
self.manage_cond_expr(suffix);
}
- ExprKind::MethodCall(_, _,ref mut local_exprs, _) => {
- for local_expr in local_exprs.iter_mut() {
- self.manage_cond_expr(local_expr);
+ ExprKind::MethodCall(call) => {
+ for arg in &mut call.args {
+ self.manage_cond_expr(arg);
}
}
- ExprKind::Path(_, Path { ref segments, .. }) if let &[ref path_segment] = &segments[..] => {
+ ExprKind::Path(_, Path { segments, .. }) if let [path_segment] = &segments[..] => {
let path_ident = path_segment.ident;
self.manage_initial_capture(expr, path_ident);
}
- ExprKind::Paren(ref mut local_expr) => {
+ ExprKind::Paren(local_expr) => {
self.manage_cond_expr(local_expr);
}
- ExprKind::Range(ref mut prefix, ref mut suffix, _) => {
- if let Some(ref mut elem) = prefix {
+ ExprKind::Range(prefix, suffix, _) => {
+ if let Some(elem) = prefix {
self.manage_cond_expr(elem);
}
- if let Some(ref mut elem) = suffix {
+ if let Some(elem) = suffix {
self.manage_cond_expr(elem);
}
}
- ExprKind::Repeat(ref mut local_expr, ref mut elem) => {
+ ExprKind::Repeat(local_expr, elem) => {
self.manage_cond_expr(local_expr);
self.manage_cond_expr(&mut elem.value);
}
- ExprKind::Struct(ref mut elem) => {
+ ExprKind::Struct(elem) => {
for field in &mut elem.fields {
self.manage_cond_expr(&mut field.expr);
}
- if let StructRest::Base(ref mut local_expr) = elem.rest {
+ if let StructRest::Base(local_expr) = &mut elem.rest {
self.manage_cond_expr(local_expr);
}
}
- ExprKind::Tup(ref mut local_exprs) => {
+ ExprKind::Tup(local_exprs) => {
for local_expr in local_exprs {
self.manage_cond_expr(local_expr);
}
}
- ExprKind::Unary(un_op, ref mut local_expr) => {
+ ExprKind::Unary(un_op, local_expr) => {
self.with_is_consumed_management(
matches!(un_op, UnOp::Neg | UnOp::Not),
|this| this.manage_cond_expr(local_expr)
@@ -296,17 +292,18 @@ impl<'cx, 'a> Context<'cx, 'a> {
| ExprKind::Block(_, _)
| ExprKind::Box(_)
| ExprKind::Break(_, _)
- | ExprKind::Closure(_, _, _, _, _, _, _)
+ | ExprKind::Closure(_)
| ExprKind::ConstBlock(_)
| ExprKind::Continue(_)
| ExprKind::Err
| ExprKind::Field(_, _)
| ExprKind::ForLoop(_, _, _, _)
| ExprKind::If(_, _, _)
+ | ExprKind::IncludedBytes(..)
| ExprKind::InlineAsm(_)
| ExprKind::Let(_, _, _)
| ExprKind::Lit(_)
- | ExprKind::Loop(_, _)
+ | ExprKind::Loop(_, _, _)
| ExprKind::MacCall(_)
| ExprKind::Match(_, _)
| ExprKind::Path(_, _)
@@ -441,12 +438,12 @@ fn expr_addr_of_mut(cx: &ExtCtxt<'_>, sp: Span, e: P<Expr>) -> P<Expr> {
fn expr_method_call(
cx: &ExtCtxt<'_>,
- path: PathSegment,
+ seg: PathSegment,
receiver: P<Expr>,
args: Vec<P<Expr>>,
span: Span,
) -> P<Expr> {
- cx.expr(span, ExprKind::MethodCall(path, receiver, args, span))
+ cx.expr(span, ExprKind::MethodCall(Box::new(MethodCall { seg, receiver, args, span })))
}
fn expr_paren(cx: &ExtCtxt<'_>, sp: Span, e: P<Expr>) -> P<Expr> {
diff --git a/compiler/rustc_builtin_macros/src/cfg_accessible.rs b/compiler/rustc_builtin_macros/src/cfg_accessible.rs
index cb5359dd1..4e4cafc71 100644
--- a/compiler/rustc_builtin_macros/src/cfg_accessible.rs
+++ b/compiler/rustc_builtin_macros/src/cfg_accessible.rs
@@ -34,12 +34,13 @@ impl MultiItemModifier for Expander {
span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
+ _is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
let template = AttributeTemplate { list: Some("path"), ..Default::default() };
- let attr = &ecx.attribute(meta_item.clone());
- validate_attr::check_builtin_attribute(
+ validate_attr::check_builtin_meta_item(
&ecx.sess.parse_sess,
- attr,
+ &meta_item,
+ ast::AttrStyle::Outer,
sym::cfg_accessible,
template,
);
diff --git a/compiler/rustc_builtin_macros/src/concat.rs b/compiler/rustc_builtin_macros/src/concat.rs
index 41f4e8c23..e2d71825d 100644
--- a/compiler/rustc_builtin_macros/src/concat.rs
+++ b/compiler/rustc_builtin_macros/src/concat.rs
@@ -1,6 +1,7 @@
use rustc_ast as ast;
use rustc_ast::tokenstream::TokenStream;
use rustc_expand::base::{self, DummyResult};
+use rustc_session::errors::report_lit_error;
use rustc_span::symbol::Symbol;
use std::string::String;
@@ -18,31 +19,34 @@ pub fn expand_concat(
let mut has_errors = false;
for e in es {
match e.kind {
- ast::ExprKind::Lit(ref lit) => match lit.kind {
- ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => {
+ ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
+ Ok(ast::LitKind::Str(s, _) | ast::LitKind::Float(s, _)) => {
accumulator.push_str(s.as_str());
}
- ast::LitKind::Char(c) => {
+ Ok(ast::LitKind::Char(c)) => {
accumulator.push(c);
}
- ast::LitKind::Int(
- i,
- ast::LitIntType::Unsigned(_)
- | ast::LitIntType::Signed(_)
- | ast::LitIntType::Unsuffixed,
- ) => {
+ Ok(ast::LitKind::Int(i, _)) => {
accumulator.push_str(&i.to_string());
}
- ast::LitKind::Bool(b) => {
+ Ok(ast::LitKind::Bool(b)) => {
accumulator.push_str(&b.to_string());
}
- ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..) => {
+ Ok(ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..)) => {
cx.span_err(e.span, "cannot concatenate a byte string literal");
+ has_errors = true;
+ }
+ Ok(ast::LitKind::Err) => {
+ has_errors = true;
}
- ast::LitKind::Err => {
+ Err(err) => {
+ report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span);
has_errors = true;
}
},
+ ast::ExprKind::IncludedBytes(..) => {
+ cx.span_err(e.span, "cannot concatenate a byte string literal")
+ }
ast::ExprKind::Err => {
has_errors = true;
}
diff --git a/compiler/rustc_builtin_macros/src/concat_bytes.rs b/compiler/rustc_builtin_macros/src/concat_bytes.rs
index 66e86bf21..d1124145d 100644
--- a/compiler/rustc_builtin_macros/src/concat_bytes.rs
+++ b/compiler/rustc_builtin_macros/src/concat_bytes.rs
@@ -2,18 +2,22 @@ use rustc_ast as ast;
use rustc_ast::{ptr::P, tokenstream::TokenStream};
use rustc_errors::Applicability;
use rustc_expand::base::{self, DummyResult};
+use rustc_session::errors::report_lit_error;
+use rustc_span::Span;
/// Emits errors for literal expressions that are invalid inside and outside of an array.
-fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_nested: bool) {
- let ast::ExprKind::Lit(lit) = &expr.kind else {
- unreachable!();
- };
- match lit.kind {
- ast::LitKind::Char(_) => {
- let mut err = cx.struct_span_err(expr.span, "cannot concatenate character literals");
- if let Ok(snippet) = cx.sess.source_map().span_to_snippet(expr.span) {
+fn invalid_type_err(
+ cx: &mut base::ExtCtxt<'_>,
+ token_lit: ast::token::Lit,
+ span: Span,
+ is_nested: bool,
+) {
+ match ast::LitKind::from_token_lit(token_lit) {
+ Ok(ast::LitKind::Char(_)) => {
+ let mut err = cx.struct_span_err(span, "cannot concatenate character literals");
+ if let Ok(snippet) = cx.sess.source_map().span_to_snippet(span) {
err.span_suggestion(
- expr.span,
+ span,
"try using a byte character",
format!("b{}", snippet),
Applicability::MachineApplicable,
@@ -21,13 +25,13 @@ fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_ne
.emit();
}
}
- ast::LitKind::Str(_, _) => {
- let mut err = cx.struct_span_err(expr.span, "cannot concatenate string literals");
+ Ok(ast::LitKind::Str(_, _)) => {
+ let mut err = cx.struct_span_err(span, "cannot concatenate string literals");
// suggestion would be invalid if we are nested
if !is_nested {
- if let Ok(snippet) = cx.sess.source_map().span_to_snippet(expr.span) {
+ if let Ok(snippet) = cx.sess.source_map().span_to_snippet(span) {
err.span_suggestion(
- expr.span,
+ span,
"try using a byte string",
format!("b{}", snippet),
Applicability::MachineApplicable,
@@ -36,18 +40,18 @@ fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_ne
}
err.emit();
}
- ast::LitKind::Float(_, _) => {
- cx.span_err(expr.span, "cannot concatenate float literals");
+ Ok(ast::LitKind::Float(_, _)) => {
+ cx.span_err(span, "cannot concatenate float literals");
}
- ast::LitKind::Bool(_) => {
- cx.span_err(expr.span, "cannot concatenate boolean literals");
+ Ok(ast::LitKind::Bool(_)) => {
+ cx.span_err(span, "cannot concatenate boolean literals");
}
- ast::LitKind::Err => {}
- ast::LitKind::Int(_, _) if !is_nested => {
- let mut err = cx.struct_span_err(expr.span, "cannot concatenate numeric literals");
- if let Ok(snippet) = cx.sess.source_map().span_to_snippet(expr.span) {
+ Ok(ast::LitKind::Err) => {}
+ Ok(ast::LitKind::Int(_, _)) if !is_nested => {
+ let mut err = cx.struct_span_err(span, "cannot concatenate numeric literals");
+ if let Ok(snippet) = cx.sess.source_map().span_to_snippet(span) {
err.span_suggestion(
- expr.span,
+ span,
"try wrapping the number in an array",
format!("[{}]", snippet),
Applicability::MachineApplicable,
@@ -55,17 +59,20 @@ fn invalid_type_err(cx: &mut base::ExtCtxt<'_>, expr: &P<rustc_ast::Expr>, is_ne
}
err.emit();
}
- ast::LitKind::Int(
+ Ok(ast::LitKind::Int(
val,
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
- ) => {
+ )) => {
assert!(val > u8::MAX.into()); // must be an error
- cx.span_err(expr.span, "numeric literal is out of bounds");
+ cx.span_err(span, "numeric literal is out of bounds");
+ }
+ Ok(ast::LitKind::Int(_, _)) => {
+ cx.span_err(span, "numeric literal is not a `u8`");
}
- ast::LitKind::Int(_, _) => {
- cx.span_err(expr.span, "numeric literal is not a `u8`");
+ Ok(ast::LitKind::ByteStr(_) | ast::LitKind::Byte(_)) => unreachable!(),
+ Err(err) => {
+ report_lit_error(&cx.sess.parse_sess, err, token_lit, span);
}
- _ => unreachable!(),
}
}
@@ -83,14 +90,14 @@ fn handle_array_element(
*has_errors = true;
None
}
- ast::ExprKind::Lit(ref lit) => match lit.kind {
- ast::LitKind::Int(
+ ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
+ Ok(ast::LitKind::Int(
val,
ast::LitIntType::Unsuffixed | ast::LitIntType::Unsigned(ast::UintTy::U8),
- ) if val <= u8::MAX.into() => Some(val as u8),
+ )) if val <= u8::MAX.into() => Some(val as u8),
- ast::LitKind::Byte(val) => Some(val),
- ast::LitKind::ByteStr(_) => {
+ Ok(ast::LitKind::Byte(val)) => Some(val),
+ Ok(ast::LitKind::ByteStr(_)) => {
if !*has_errors {
cx.struct_span_err(expr.span, "cannot concatenate doubly nested array")
.note("byte strings are treated as arrays of bytes")
@@ -102,12 +109,22 @@ fn handle_array_element(
}
_ => {
if !*has_errors {
- invalid_type_err(cx, expr, true);
+ invalid_type_err(cx, token_lit, expr.span, true);
}
*has_errors = true;
None
}
},
+ ast::ExprKind::IncludedBytes(..) => {
+ if !*has_errors {
+ cx.struct_span_err(expr.span, "cannot concatenate doubly nested array")
+ .note("byte strings are treated as arrays of bytes")
+ .help("try flattening the array")
+ .emit();
+ }
+ *has_errors = true;
+ None
+ }
_ => {
missing_literals.push(expr.span);
None
@@ -127,8 +144,8 @@ pub fn expand_concat_bytes(
let mut missing_literals = vec![];
let mut has_errors = false;
for e in es {
- match e.kind {
- ast::ExprKind::Array(ref exprs) => {
+ match &e.kind {
+ ast::ExprKind::Array(exprs) => {
for expr in exprs {
if let Some(elem) =
handle_array_element(cx, &mut has_errors, &mut missing_literals, expr)
@@ -137,10 +154,10 @@ pub fn expand_concat_bytes(
}
}
}
- ast::ExprKind::Repeat(ref expr, ref count) => {
- if let ast::ExprKind::Lit(ast::Lit {
- kind: ast::LitKind::Int(count_val, _), ..
- }) = count.value.kind
+ ast::ExprKind::Repeat(expr, count) => {
+ if let ast::ExprKind::Lit(token_lit) = count.value.kind
+ && let Ok(ast::LitKind::Int(count_val, _)) =
+ ast::LitKind::from_token_lit(token_lit)
{
if let Some(elem) =
handle_array_element(cx, &mut has_errors, &mut missing_literals, expr)
@@ -153,20 +170,23 @@ pub fn expand_concat_bytes(
cx.span_err(count.value.span, "repeat count is not a positive number");
}
}
- ast::ExprKind::Lit(ref lit) => match lit.kind {
- ast::LitKind::Byte(val) => {
+ &ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
+ Ok(ast::LitKind::Byte(val)) => {
accumulator.push(val);
}
- ast::LitKind::ByteStr(ref bytes) => {
+ Ok(ast::LitKind::ByteStr(ref bytes)) => {
accumulator.extend_from_slice(&bytes);
}
_ => {
if !has_errors {
- invalid_type_err(cx, &e, false);
+ invalid_type_err(cx, token_lit, e.span, false);
}
has_errors = true;
}
},
+ ast::ExprKind::IncludedBytes(bytes) => {
+ accumulator.extend_from_slice(bytes);
+ }
ast::ExprKind::Err => {
has_errors = true;
}
diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs
index e0fb7affb..fa5a45730 100644
--- a/compiler/rustc_builtin_macros/src/derive.rs
+++ b/compiler/rustc_builtin_macros/src/derive.rs
@@ -1,7 +1,7 @@
use crate::cfg_eval::cfg_eval;
use rustc_ast as ast;
-use rustc_ast::{attr, token, GenericParamKind, ItemKind, MetaItemKind, NestedMetaItem, StmtKind};
+use rustc_ast::{token, GenericParamKind, ItemKind, MetaItemKind, NestedMetaItem, StmtKind};
use rustc_errors::{struct_span_err, Applicability};
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, Indeterminate, MultiItemModifier};
use rustc_feature::AttributeTemplate;
@@ -10,7 +10,7 @@ use rustc_session::Session;
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
-pub(crate) struct Expander;
+pub(crate) struct Expander(pub bool);
impl MultiItemModifier for Expander {
fn expand(
@@ -19,6 +19,7 @@ impl MultiItemModifier for Expander {
span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
+ _: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
let sess = ecx.sess;
if report_bad_target(sess, &item, span) {
@@ -32,46 +33,48 @@ impl MultiItemModifier for Expander {
ecx.resolver.resolve_derives(ecx.current_expansion.id, ecx.force_mode, &|| {
let template =
AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() };
- let attr =
- attr::mk_attr_outer(&sess.parse_sess.attr_id_generator, meta_item.clone());
- validate_attr::check_builtin_attribute(
+ validate_attr::check_builtin_meta_item(
&sess.parse_sess,
- &attr,
+ &meta_item,
+ ast::AttrStyle::Outer,
sym::derive,
template,
);
- let mut resolutions: Vec<_> = attr
- .meta_item_list()
- .unwrap_or_default()
- .into_iter()
- .filter_map(|nested_meta| match nested_meta {
- NestedMetaItem::MetaItem(meta) => Some(meta),
- NestedMetaItem::Literal(lit) => {
- // Reject `#[derive("Debug")]`.
- report_unexpected_literal(sess, &lit);
- None
- }
- })
- .map(|meta| {
- // Reject `#[derive(Debug = "value", Debug(abc))]`, but recover the paths.
- report_path_args(sess, &meta);
- meta.path
- })
- .map(|path| (path, dummy_annotatable(), None))
- .collect();
+ let mut resolutions = match &meta_item.kind {
+ MetaItemKind::List(list) => {
+ list.iter()
+ .filter_map(|nested_meta| match nested_meta {
+ NestedMetaItem::MetaItem(meta) => Some(meta),
+ NestedMetaItem::Lit(lit) => {
+ // Reject `#[derive("Debug")]`.
+ report_unexpected_meta_item_lit(sess, &lit);
+ None
+ }
+ })
+ .map(|meta| {
+ // Reject `#[derive(Debug = "value", Debug(abc))]`, but recover the
+ // paths.
+ report_path_args(sess, &meta);
+ meta.path.clone()
+ })
+ .map(|path| (path, dummy_annotatable(), None, self.0))
+ .collect()
+ }
+ _ => vec![],
+ };
// Do not configure or clone items unless necessary.
match &mut resolutions[..] {
[] => {}
- [(_, first_item, _), others @ ..] => {
+ [(_, first_item, ..), others @ ..] => {
*first_item = cfg_eval(
sess,
features,
item.clone(),
ecx.current_expansion.lint_node_id,
);
- for (_, item, _) in others {
+ for (_, item, _, _) in others {
*item = first_item.clone();
}
}
@@ -126,7 +129,7 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
bad_target
}
-fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
+fn report_unexpected_meta_item_lit(sess: &Session, lit: &ast::MetaItemLit) {
let help_msg = match lit.token_lit.kind {
token::Str if rustc_lexer::is_ident(lit.token_lit.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token_lit.symbol)
diff --git a/compiler/rustc_builtin_macros/src/deriving/bounds.rs b/compiler/rustc_builtin_macros/src/deriving/bounds.rs
index 7bd344467..240167146 100644
--- a/compiler/rustc_builtin_macros/src/deriving/bounds.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/bounds.rs
@@ -1,4 +1,3 @@
-use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
@@ -12,16 +11,17 @@ pub fn expand_deriving_copy(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
let trait_def = TraitDef {
span,
path: path_std!(marker::Copy),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: true,
methods: Vec::new(),
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push);
diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs
index fa8685f5f..d59b3b8c8 100644
--- a/compiler/rustc_builtin_macros/src/deriving/clone.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs
@@ -14,6 +14,7 @@ pub fn expand_deriving_clone(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
// The simple form is `fn clone(&self) -> Self { *self }`, possibly with
// some additional `AssertParamIsClone` assertions.
@@ -31,10 +32,10 @@ pub fn expand_deriving_clone(
let bounds;
let substructure;
let is_simple;
- match *item {
- Annotatable::Item(ref annitem) => match annitem.kind {
- ItemKind::Struct(_, Generics { ref params, .. })
- | ItemKind::Enum(_, Generics { ref params, .. }) => {
+ match item {
+ Annotatable::Item(annitem) => match &annitem.kind {
+ ItemKind::Struct(_, Generics { params, .. })
+ | ItemKind::Enum(_, Generics { params, .. }) => {
let container_id = cx.current_expansion.id.expn_data().parent.expect_local();
let has_derive_copy = cx.resolver.has_derive_copy(container_id);
if has_derive_copy
@@ -67,14 +68,12 @@ pub fn expand_deriving_clone(
_ => cx.span_bug(span, "`#[derive(Clone)]` on trait item or impl item"),
}
- let inline = cx.meta_word(span, sym::inline);
- let attrs = thin_vec![cx.attribute(inline)];
+ let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let trait_def = TraitDef {
span,
path: path_std!(clone::Clone),
skip_path_as_bound: false,
additional_bounds: bounds,
- generics: Bounds::empty(),
supports_unions: true,
methods: vec![MethodDef {
name: sym::clone,
@@ -87,6 +86,7 @@ pub fn expand_deriving_clone(
combine_substructure: substructure,
}],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand_ext(cx, mitem, item, push, is_simple)
@@ -166,13 +166,13 @@ fn cs_clone(
};
let vdata;
- match *substr.fields {
- Struct(vdata_, ref af) => {
+ match substr.fields {
+ Struct(vdata_, af) => {
ctor_path = cx.path(trait_span, vec![substr.type_ident]);
all_fields = af;
- vdata = vdata_;
+ vdata = *vdata_;
}
- EnumMatching(.., variant, ref af) => {
+ EnumMatching(.., variant, af) => {
ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.ident]);
all_fields = af;
vdata = &variant.data;
diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs
index eab67b0d3..f861d47ed 100644
--- a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs
@@ -5,7 +5,7 @@ use crate::deriving::path_std;
use rustc_ast::{self as ast, MetaItem};
use rustc_data_structures::fx::FxHashSet;
use rustc_expand::base::{Annotatable, ExtCtxt};
-use rustc_span::symbol::{sym, Ident};
+use rustc_span::symbol::sym;
use rustc_span::Span;
use thin_vec::thin_vec;
@@ -15,19 +15,19 @@ pub fn expand_deriving_eq(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
let span = cx.with_def_site_ctxt(span);
- let inline = cx.meta_word(span, sym::inline);
- let hidden = rustc_ast::attr::mk_nested_word_item(Ident::new(sym::hidden, span));
- let doc = rustc_ast::attr::mk_list_item(Ident::new(sym::doc, span), vec![hidden]);
- let no_coverage = cx.meta_word(span, sym::no_coverage);
- let attrs = thin_vec![cx.attribute(inline), cx.attribute(doc), cx.attribute(no_coverage)];
+ let attrs = thin_vec![
+ cx.attr_word(sym::inline, span),
+ cx.attr_nested_word(sym::doc, sym::hidden, span),
+ cx.attr_word(sym::no_coverage, span)
+ ];
let trait_def = TraitDef {
span,
path: path_std!(cmp::Eq),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: true,
methods: vec![MethodDef {
name: sym::assert_receiver_is_total_eq,
@@ -42,6 +42,7 @@ pub fn expand_deriving_eq(
})),
}],
associated_types: Vec::new(),
+ is_const,
};
super::inject_impl_of_structural_trait(cx, span, item, path_std!(marker::StructuralEq), push);
diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs
index 7f117981a..96d18c7af 100644
--- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs
@@ -13,15 +13,14 @@ pub fn expand_deriving_ord(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
- let inline = cx.meta_word(span, sym::inline);
- let attrs = thin_vec![cx.attribute(inline)];
+ let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let trait_def = TraitDef {
span,
path: path_std!(cmp::Ord),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![MethodDef {
name: sym::cmp,
@@ -34,6 +33,7 @@ pub fn expand_deriving_ord(
combine_substructure: combine_substructure(Box::new(|a, b, c| cs_cmp(a, b, c))),
}],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs
index 236cbccaf..7f95551fc 100644
--- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs
@@ -14,6 +14,7 @@ pub fn expand_deriving_partial_eq(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let base = true;
@@ -67,8 +68,7 @@ pub fn expand_deriving_partial_eq(
// No need to generate `ne`, the default suffices, and not generating it is
// faster.
- let inline = cx.meta_word(span, sym::inline);
- let attrs = thin_vec![cx.attribute(inline)];
+ let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let methods = vec![MethodDef {
name: sym::eq,
generics: Bounds::empty(),
@@ -85,10 +85,10 @@ pub fn expand_deriving_partial_eq(
path: path_std!(cmp::PartialEq),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods,
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
}
diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs
index 4173403a1..5c4e5b7f8 100644
--- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs
@@ -13,13 +13,13 @@ pub fn expand_deriving_partial_ord(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
let ordering_ty = Path(path_std!(cmp::Ordering));
let ret_ty =
Path(Path::new_(pathvec_std!(option::Option), vec![Box::new(ordering_ty)], PathKind::Std));
- let inline = cx.meta_word(span, sym::inline);
- let attrs = thin_vec![cx.attribute(inline)];
+ let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let partial_cmp_def = MethodDef {
name: sym::partial_cmp,
@@ -39,10 +39,10 @@ pub fn expand_deriving_partial_ord(
path: path_std!(cmp::PartialOrd),
skip_path_as_bound: false,
additional_bounds: vec![],
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![partial_cmp_def],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
}
diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs
index 2cf614ed9..544d971b2 100644
--- a/compiler/rustc_builtin_macros/src/deriving/debug.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs
@@ -13,6 +13,7 @@ pub fn expand_deriving_debug(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
// &mut ::std::fmt::Formatter
let fmtr = Ref(Box::new(Path(path_std!(fmt::Formatter))), ast::Mutability::Mut);
@@ -22,7 +23,6 @@ pub fn expand_deriving_debug(
path: path_std!(fmt::Debug),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![MethodDef {
name: sym::fmt,
@@ -37,6 +37,7 @@ pub fn expand_deriving_debug(
})),
}],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
}
diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs
index d669f6168..62af02c2b 100644
--- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs
@@ -16,6 +16,7 @@ pub fn expand_deriving_rustc_decodable(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
let krate = sym::rustc_serialize;
let typaram = sym::__D;
@@ -25,7 +26,6 @@ pub fn expand_deriving_rustc_decodable(
path: Path::new_(vec![krate, sym::Decodable], vec![], PathKind::Global),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![MethodDef {
name: sym::decode,
@@ -55,6 +55,7 @@ pub fn expand_deriving_rustc_decodable(
})),
}],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
@@ -77,11 +78,11 @@ fn decodable_substructure(
let blkarg = Ident::new(sym::_d, trait_span);
let blkdecoder = cx.expr_ident(trait_span, blkarg);
- let expr = match *substr.fields {
- StaticStruct(_, ref summary) => {
- let nfields = match *summary {
- Unnamed(ref fields, _) => fields.len(),
- Named(ref fields) => fields.len(),
+ let expr = match substr.fields {
+ StaticStruct(_, summary) => {
+ let nfields = match summary {
+ Unnamed(fields, _) => fields.len(),
+ Named(fields) => fields.len(),
};
let fn_read_struct_field_path: Vec<_> =
cx.def_site_path(&[sym::rustc_serialize, sym::Decoder, sym::read_struct_field]);
@@ -118,7 +119,7 @@ fn decodable_substructure(
],
)
}
- StaticEnum(_, ref fields) => {
+ StaticEnum(_, fields) => {
let variant = Ident::new(sym::i, trait_span);
let mut arms = Vec::with_capacity(fields.len() + 1);
@@ -193,10 +194,10 @@ fn decode_static_fields<F>(
where
F: FnMut(&mut ExtCtxt<'_>, Span, Symbol, usize) -> P<Expr>,
{
- match *fields {
- Unnamed(ref fields, is_tuple) => {
+ match fields {
+ Unnamed(fields, is_tuple) => {
let path_expr = cx.expr_path(outer_pat_path);
- if !is_tuple {
+ if !*is_tuple {
path_expr
} else {
let fields = fields
@@ -208,7 +209,7 @@ where
cx.expr_call(trait_span, path_expr, fields)
}
}
- Named(ref fields) => {
+ Named(fields) => {
// use the field's span to get nicer error messages.
let fields = fields
.iter()
diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs
index 17df9fb27..eb66c4a69 100644
--- a/compiler/rustc_builtin_macros/src/deriving/default.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/default.rs
@@ -16,17 +16,16 @@ pub fn expand_deriving_default(
mitem: &ast::MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
item.visit_with(&mut DetectNonVariantDefaultAttr { cx });
- let inline = cx.meta_word(span, sym::inline);
- let attrs = thin_vec![cx.attribute(inline)];
+ let attrs = thin_vec![cx.attr_word(sym::inline, span)];
let trait_def = TraitDef {
span,
path: Path::new(vec![kw::Default, sym::Default]),
skip_path_as_bound: has_a_default_variant(item),
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![MethodDef {
name: kw::Default,
@@ -47,6 +46,7 @@ pub fn expand_deriving_default(
})),
}],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
}
@@ -62,15 +62,12 @@ fn default_struct_substructure(
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new());
let expr = match summary {
- Unnamed(ref fields, is_tuple) => {
- if !is_tuple {
- cx.expr_ident(trait_span, substr.type_ident)
- } else {
- let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
- cx.expr_call_ident(trait_span, substr.type_ident, exprs)
- }
+ Unnamed(_, false) => cx.expr_ident(trait_span, substr.type_ident),
+ Unnamed(fields, true) => {
+ let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
+ cx.expr_call_ident(trait_span, substr.type_ident, exprs)
}
- Named(ref fields) => {
+ Named(fields) => {
let default_fields = fields
.iter()
.map(|&(ident, span)| cx.field_imm(span, ident, default_call(span)))
@@ -145,7 +142,7 @@ fn extract_default_variant<'a>(
let suggestion = default_variants
.iter()
.filter_map(|v| {
- if v.ident == variant.ident {
+ if v.span == variant.span {
None
} else {
Some((cx.sess.find_by_name(&v.attrs, kw::Default)?.span, String::new()))
diff --git a/compiler/rustc_builtin_macros/src/deriving/encodable.rs b/compiler/rustc_builtin_macros/src/deriving/encodable.rs
index f83f58b97..68bc0ff2e 100644
--- a/compiler/rustc_builtin_macros/src/deriving/encodable.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/encodable.rs
@@ -100,6 +100,7 @@ pub fn expand_deriving_rustc_encodable(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
let krate = sym::rustc_serialize;
let typaram = sym::__S;
@@ -109,7 +110,6 @@ pub fn expand_deriving_rustc_encodable(
path: Path::new_(vec![krate, sym::Encodable], vec![], PathKind::Global),
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![MethodDef {
name: sym::encode,
@@ -139,6 +139,7 @@ pub fn expand_deriving_rustc_encodable(
})),
}],
associated_types: Vec::new(),
+ is_const,
};
trait_def.expand(cx, mitem, item, push)
@@ -163,8 +164,8 @@ fn encodable_substructure(
],
));
- match *substr.fields {
- Struct(_, ref fields) => {
+ match substr.fields {
+ Struct(_, fields) => {
let fn_emit_struct_field_path =
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct_field]);
let mut stmts = Vec::new();
@@ -223,7 +224,7 @@ fn encodable_substructure(
BlockOrExpr::new_expr(expr)
}
- EnumMatching(idx, _, variant, ref fields) => {
+ EnumMatching(idx, _, variant, fields) => {
// We're not generating an AST that the borrow checker is expecting,
// so we need to generate a unique local variable to take the
// mutable loan out on, otherwise we get conflicts which don't
@@ -273,7 +274,7 @@ fn encodable_substructure(
vec![
blkencoder,
name,
- cx.expr_usize(trait_span, idx),
+ cx.expr_usize(trait_span, *idx),
cx.expr_usize(trait_span, fields.len()),
blk,
],
diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
index 16ee3aa89..beac591bf 100644
--- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs
@@ -171,7 +171,7 @@ use rustc_ast::{GenericArg, GenericParamKind, VariantData};
use rustc_attr as attr;
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::Span;
+use rustc_span::{Span, DUMMY_SP};
use std::cell::RefCell;
use std::iter;
use std::ops::Not;
@@ -195,15 +195,14 @@ pub struct TraitDef<'a> {
/// other than the current trait
pub additional_bounds: Vec<Ty>,
- /// Any extra lifetimes and/or bounds, e.g., `D: serialize::Decoder`
- pub generics: Bounds,
-
/// Can this trait be derived for unions?
pub supports_unions: bool,
pub methods: Vec<MethodDef<'a>>,
pub associated_types: Vec<(Ident, Ty)>,
+
+ pub is_const: bool,
}
pub struct MethodDef<'a> {
@@ -301,12 +300,12 @@ struct TypeParameter {
ty: P<ast::Ty>,
}
-// The code snippets built up for derived code are sometimes used as blocks
-// (e.g. in a function body) and sometimes used as expressions (e.g. in a match
-// arm). This structure avoids committing to either form until necessary,
-// avoiding the insertion of any unnecessary blocks.
-//
-// The statements come before the expression.
+/// The code snippets built up for derived code are sometimes used as blocks
+/// (e.g. in a function body) and sometimes used as expressions (e.g. in a match
+/// arm). This structure avoids committing to either form until necessary,
+/// avoiding the insertion of any unnecessary blocks.
+///
+/// The statements come before the expression.
pub struct BlockOrExpr(Vec<ast::Stmt>, Option<P<Expr>>);
impl BlockOrExpr {
@@ -370,15 +369,14 @@ fn find_type_parameters(
impl<'a, 'b> visit::Visitor<'a> for Visitor<'a, 'b> {
fn visit_ty(&mut self, ty: &'a ast::Ty) {
- if let ast::TyKind::Path(_, ref path) = ty.kind {
- if let Some(segment) = path.segments.first() {
- if self.ty_param_names.contains(&segment.ident.name) {
- self.type_params.push(TypeParameter {
- bound_generic_params: self.bound_generic_params_stack.clone(),
- ty: P(ty.clone()),
- });
- }
- }
+ if let ast::TyKind::Path(_, path) = &ty.kind
+ && let Some(segment) = path.segments.first()
+ && self.ty_param_names.contains(&segment.ident.name)
+ {
+ self.type_params.push(TypeParameter {
+ bound_generic_params: self.bound_generic_params_stack.clone(),
+ ty: P(ty.clone()),
+ });
}
visit::walk_ty(self, ty)
@@ -429,8 +427,8 @@ impl<'a> TraitDef<'a> {
push: &mut dyn FnMut(Annotatable),
from_scratch: bool,
) {
- match *item {
- Annotatable::Item(ref item) => {
+ match item {
+ Annotatable::Item(item) => {
let is_packed = item.attrs.iter().any(|attr| {
for r in attr::find_repr_attrs(&cx.sess, attr) {
if let attr::ReprPacked(_) = r {
@@ -439,38 +437,37 @@ impl<'a> TraitDef<'a> {
}
false
});
- let has_no_type_params = match item.kind {
- ast::ItemKind::Struct(_, ref generics)
- | ast::ItemKind::Enum(_, ref generics)
- | ast::ItemKind::Union(_, ref generics) => !generics
+ let has_no_type_params = match &item.kind {
+ ast::ItemKind::Struct(_, generics)
+ | ast::ItemKind::Enum(_, generics)
+ | ast::ItemKind::Union(_, generics) => !generics
.params
.iter()
.any(|param| matches!(param.kind, ast::GenericParamKind::Type { .. })),
_ => unreachable!(),
};
let container_id = cx.current_expansion.id.expn_data().parent.expect_local();
- let always_copy = has_no_type_params && cx.resolver.has_derive_copy(container_id);
+ let copy_fields =
+ is_packed && has_no_type_params && cx.resolver.has_derive_copy(container_id);
- let newitem = match item.kind {
- ast::ItemKind::Struct(ref struct_def, ref generics) => self.expand_struct_def(
+ let newitem = match &item.kind {
+ ast::ItemKind::Struct(struct_def, generics) => self.expand_struct_def(
cx,
&struct_def,
item.ident,
generics,
from_scratch,
- is_packed,
- always_copy,
+ copy_fields,
),
- ast::ItemKind::Enum(ref enum_def, ref generics) => {
- // We ignore `is_packed`/`always_copy` here, because
- // `repr(packed)` enums cause an error later on.
+ ast::ItemKind::Enum(enum_def, generics) => {
+ // We ignore `is_packed` here, because `repr(packed)`
+ // enums cause an error later on.
//
// This can only cause further compilation errors
- // downstream in blatantly illegal code, so it
- // is fine.
+ // downstream in blatantly illegal code, so it is fine.
self.expand_enum_def(cx, enum_def, item.ident, generics, from_scratch)
}
- ast::ItemKind::Union(ref struct_def, ref generics) => {
+ ast::ItemKind::Union(struct_def, generics) => {
if self.supports_unions {
self.expand_struct_def(
cx,
@@ -478,8 +475,7 @@ impl<'a> TraitDef<'a> {
item.ident,
generics,
from_scratch,
- is_packed,
- always_copy,
+ copy_fields,
)
} else {
cx.span_err(mitem.span, "this trait cannot be derived for unions");
@@ -581,19 +577,21 @@ impl<'a> TraitDef<'a> {
})
});
- let Generics { mut params, mut where_clause, .. } =
- self.generics.to_generics(cx, self.span, type_ident, generics);
+ let mut where_clause = ast::WhereClause::default();
where_clause.span = generics.where_clause.span;
let ctxt = self.span.ctxt();
let span = generics.span.with_ctxt(ctxt);
// Create the generic parameters
- params.extend(generics.params.iter().map(|param| match &param.kind {
- GenericParamKind::Lifetime { .. } => param.clone(),
- GenericParamKind::Type { .. } => {
- // I don't think this can be moved out of the loop, since
- // a GenericBound requires an ast id
- let bounds: Vec<_> =
+ let params: Vec<_> = generics
+ .params
+ .iter()
+ .map(|param| match &param.kind {
+ GenericParamKind::Lifetime { .. } => param.clone(),
+ GenericParamKind::Type { .. } => {
+ // I don't think this can be moved out of the loop, since
+ // a GenericBound requires an ast id
+ let bounds: Vec<_> =
// extra restrictions on the generics parameters to the
// type being derived upon
self.additional_bounds.iter().map(|p| {
@@ -606,21 +604,22 @@ impl<'a> TraitDef<'a> {
param.bounds.iter().cloned()
).collect();
- cx.typaram(param.ident.span.with_ctxt(ctxt), param.ident, bounds, None)
- }
- GenericParamKind::Const { ty, kw_span, .. } => {
- let const_nodefault_kind = GenericParamKind::Const {
- ty: ty.clone(),
- kw_span: kw_span.with_ctxt(ctxt),
-
- // We can't have default values inside impl block
- default: None,
- };
- let mut param_clone = param.clone();
- param_clone.kind = const_nodefault_kind;
- param_clone
- }
- }));
+ cx.typaram(param.ident.span.with_ctxt(ctxt), param.ident, bounds, None)
+ }
+ GenericParamKind::Const { ty, kw_span, .. } => {
+ let const_nodefault_kind = GenericParamKind::Const {
+ ty: ty.clone(),
+ kw_span: kw_span.with_ctxt(ctxt),
+
+ // We can't have default values inside impl block
+ default: None,
+ };
+ let mut param_clone = param.clone();
+ param_clone.kind = const_nodefault_kind;
+ param_clone
+ }
+ })
+ .collect();
// and similarly for where clauses
where_clause.predicates.extend(generics.where_clause.predicates.iter().map(|clause| {
@@ -663,12 +662,11 @@ impl<'a> TraitDef<'a> {
for field_ty_param in field_ty_params {
// if we have already handled this type, skip it
- if let ast::TyKind::Path(_, ref p) = field_ty_param.ty.kind {
- if p.segments.len() == 1
- && ty_param_names.contains(&p.segments[0].ident.name)
- {
- continue;
- };
+ if let ast::TyKind::Path(_, p) = &field_ty_param.ty.kind
+ && let [sole_segment] = &*p.segments
+ && ty_param_names.contains(&sole_segment.ident.name)
+ {
+ continue;
}
let mut bounds: Vec<_> = self
.additional_bounds
@@ -718,7 +716,7 @@ impl<'a> TraitDef<'a> {
let path = cx.path_all(self.span, false, vec![type_ident], self_params);
let self_type = cx.ty_path(path);
- let attr = cx.attribute(cx.meta_word(self.span, sym::automatically_derived));
+ let attr = cx.attr_word(sym::automatically_derived, self.span);
let attrs = thin_vec![attr];
let opt_trait_ref = Some(trait_ref);
@@ -730,7 +728,7 @@ impl<'a> TraitDef<'a> {
unsafety: ast::Unsafe::No,
polarity: ast::ImplPolarity::Positive,
defaultness: ast::Defaultness::Final,
- constness: ast::Const::No,
+ constness: if self.is_const { ast::Const::Yes(DUMMY_SP) } else { ast::Const::No },
generics: trait_generics,
of_trait: opt_trait_ref,
self_ty: self_type,
@@ -746,8 +744,7 @@ impl<'a> TraitDef<'a> {
type_ident: Ident,
generics: &Generics,
from_scratch: bool,
- is_packed: bool,
- always_copy: bool,
+ copy_fields: bool,
) -> P<ast::Item> {
let field_tys: Vec<P<ast::Ty>> =
struct_def.fields().iter().map(|field| field.ty.clone()).collect();
@@ -775,8 +772,7 @@ impl<'a> TraitDef<'a> {
type_ident,
&selflike_args,
&nonselflike_args,
- is_packed,
- always_copy,
+ copy_fields,
)
};
@@ -1014,19 +1010,9 @@ impl<'a> MethodDef<'a> {
/// }
/// }
/// ```
- /// If the struct doesn't impl `Copy`, we use let-destructuring with `ref`:
- /// ```
- /// # struct A { x: u8, y: u8 }
- /// impl PartialEq for A {
- /// fn eq(&self, other: &A) -> bool {
- /// let Self { x: ref __self_0_0, y: ref __self_0_1 } = *self;
- /// let Self { x: ref __self_1_0, y: ref __self_1_1 } = *other;
- /// *__self_0_0 == *__self_1_0 && *__self_0_1 == *__self_1_1
- /// }
- /// }
- /// ```
- /// This latter case only works if the fields match the alignment required
- /// by the `packed(N)` attribute. (We'll get errors later on if not.)
+ /// If the struct doesn't impl `Copy`, we use the normal `&self.x`. This
+ /// only works if the fields match the alignment required by the
+ /// `packed(N)` attribute. (We'll get errors later on if not.)
fn expand_struct_method_body<'b>(
&self,
cx: &mut ExtCtxt<'_>,
@@ -1035,54 +1021,19 @@ impl<'a> MethodDef<'a> {
type_ident: Ident,
selflike_args: &[P<Expr>],
nonselflike_args: &[P<Expr>],
- is_packed: bool,
- always_copy: bool,
+ copy_fields: bool,
) -> BlockOrExpr {
- let span = trait_.span;
assert!(selflike_args.len() == 1 || selflike_args.len() == 2);
- let mk_body = |cx, selflike_fields| {
- self.call_substructure_method(
- cx,
- trait_,
- type_ident,
- nonselflike_args,
- &Struct(struct_def, selflike_fields),
- )
- };
-
- if !is_packed {
- let selflike_fields =
- trait_.create_struct_field_access_fields(cx, selflike_args, struct_def, false);
- mk_body(cx, selflike_fields)
- } else if always_copy {
- let selflike_fields =
- trait_.create_struct_field_access_fields(cx, selflike_args, struct_def, true);
- mk_body(cx, selflike_fields)
- } else {
- // Neither packed nor copy. Need to use ref patterns.
- let prefixes: Vec<_> =
- (0..selflike_args.len()).map(|i| format!("__self_{}", i)).collect();
- let addr_of = always_copy;
- let selflike_fields =
- trait_.create_struct_pattern_fields(cx, struct_def, &prefixes, addr_of);
- let mut body = mk_body(cx, selflike_fields);
-
- let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]);
- let by_ref = ByRef::from(is_packed && !always_copy);
- let patterns =
- trait_.create_struct_patterns(cx, struct_path, struct_def, &prefixes, by_ref);
-
- // Do the let-destructuring.
- let mut stmts: Vec<_> = iter::zip(selflike_args, patterns)
- .map(|(selflike_arg_expr, pat)| {
- let selflike_arg_expr = cx.expr_deref(span, selflike_arg_expr.clone());
- cx.stmt_let_pat(span, pat, selflike_arg_expr)
- })
- .collect();
- stmts.extend(std::mem::take(&mut body.0));
- BlockOrExpr(stmts, body.1)
- }
+ let selflike_fields =
+ trait_.create_struct_field_access_fields(cx, selflike_args, struct_def, copy_fields);
+ self.call_substructure_method(
+ cx,
+ trait_,
+ type_ident,
+ nonselflike_args,
+ &Struct(struct_def, selflike_fields),
+ )
}
fn expand_static_struct_method_body(
@@ -1252,9 +1203,7 @@ impl<'a> MethodDef<'a> {
// A single arm has form (&VariantK, &VariantK, ...) => BodyK
// (see "Final wrinkle" note below for why.)
- let addr_of = false; // because enums can't be repr(packed)
- let fields =
- trait_.create_struct_pattern_fields(cx, &variant.data, &prefixes, addr_of);
+ let fields = trait_.create_struct_pattern_fields(cx, &variant.data, &prefixes);
let sp = variant.span.with_ctxt(trait_.span.ctxt());
let variant_path = cx.path(sp, vec![type_ident, variant.ident]);
@@ -1517,15 +1466,13 @@ impl<'a> TraitDef<'a> {
cx: &mut ExtCtxt<'_>,
struct_def: &'a VariantData,
prefixes: &[String],
- addr_of: bool,
) -> Vec<FieldInfo> {
self.create_fields(struct_def, |i, _struct_field, sp| {
prefixes
.iter()
.map(|prefix| {
let ident = self.mk_pattern_ident(prefix, i);
- let expr = cx.expr_path(cx.path_ident(sp, ident));
- if addr_of { cx.expr_addr_of(sp, expr) } else { expr }
+ cx.expr_path(cx.path_ident(sp, ident))
})
.collect()
})
@@ -1536,7 +1483,7 @@ impl<'a> TraitDef<'a> {
cx: &mut ExtCtxt<'_>,
selflike_args: &[P<Expr>],
struct_def: &'a VariantData,
- copy: bool,
+ copy_fields: bool,
) -> Vec<FieldInfo> {
self.create_fields(struct_def, |i, struct_field, sp| {
selflike_args
@@ -1555,7 +1502,7 @@ impl<'a> TraitDef<'a> {
}),
),
);
- if copy {
+ if copy_fields {
field_expr = cx.expr_block(
cx.block(struct_field.span, vec![cx.stmt_expr(field_expr)]),
);
diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs b/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs
index 36e2e2930..eaa488190 100644
--- a/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/generic/ty.rs
@@ -115,7 +115,7 @@ impl Ty {
self_ty: Ident,
generics: &Generics,
) -> ast::Path {
- match *self {
+ match self {
Self_ => {
let params: Vec<_> = generics
.params
@@ -135,7 +135,7 @@ impl Ty {
cx.path_all(span, false, vec![self_ty], params)
}
- Path(ref p) => p.to_path(cx, span, self_ty, generics),
+ Path(p) => p.to_path(cx, span, self_ty, generics),
Ref(..) => cx.span_bug(span, "ref in a path in generic `derive`"),
Unit => cx.span_bug(span, "unit in a path in generic `derive`"),
}
@@ -180,10 +180,7 @@ impl Bounds {
let params = self
.bounds
.iter()
- .map(|t| {
- let (name, ref bounds) = *t;
- mk_ty_param(cx, span, name, &bounds, self_ty, self_generics)
- })
+ .map(|&(name, ref bounds)| mk_ty_param(cx, span, name, &bounds, self_ty, self_generics))
.collect();
Generics {
diff --git a/compiler/rustc_builtin_macros/src/deriving/hash.rs b/compiler/rustc_builtin_macros/src/deriving/hash.rs
index 6e9d5f08b..c136bb714 100644
--- a/compiler/rustc_builtin_macros/src/deriving/hash.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/hash.rs
@@ -13,6 +13,7 @@ pub fn expand_deriving_hash(
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
+ is_const: bool,
) {
let path = Path::new_(pathvec_std!(hash::Hash), vec![], PathKind::Std);
@@ -24,7 +25,6 @@ pub fn expand_deriving_hash(
path,
skip_path_as_bound: false,
additional_bounds: Vec::new(),
- generics: Bounds::empty(),
supports_unions: false,
methods: vec![MethodDef {
name: sym::hash,
@@ -39,6 +39,7 @@ pub fn expand_deriving_hash(
})),
}],
associated_types: Vec::new(),
+ is_const,
};
hash_trait_def.expand(cx, mitem, item, push);
diff --git a/compiler/rustc_builtin_macros/src/deriving/mod.rs b/compiler/rustc_builtin_macros/src/deriving/mod.rs
index ee346047a..de657e4e6 100644
--- a/compiler/rustc_builtin_macros/src/deriving/mod.rs
+++ b/compiler/rustc_builtin_macros/src/deriving/mod.rs
@@ -38,9 +38,10 @@ pub mod partial_ord;
pub mod generic;
-pub(crate) struct BuiltinDerive(
- pub(crate) fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable)),
-);
+pub(crate) type BuiltinDeriveFn =
+ fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable), bool);
+
+pub(crate) struct BuiltinDerive(pub(crate) BuiltinDeriveFn);
impl MultiItemModifier for BuiltinDerive {
fn expand(
@@ -49,6 +50,7 @@ impl MultiItemModifier for BuiltinDerive {
span: Span,
meta_item: &MetaItem,
item: Annotatable,
+ is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
// FIXME: Built-in derives often forget to give spans contexts,
// so we are doing it here in a centralized way.
@@ -57,21 +59,28 @@ impl MultiItemModifier for BuiltinDerive {
match item {
Annotatable::Stmt(stmt) => {
if let ast::StmtKind::Item(item) = stmt.into_inner().kind {
- (self.0)(ecx, span, meta_item, &Annotatable::Item(item), &mut |a| {
- // Cannot use 'ecx.stmt_item' here, because we need to pass 'ecx'
- // to the function
- items.push(Annotatable::Stmt(P(ast::Stmt {
- id: ast::DUMMY_NODE_ID,
- kind: ast::StmtKind::Item(a.expect_item()),
- span,
- })));
- });
+ (self.0)(
+ ecx,
+ span,
+ meta_item,
+ &Annotatable::Item(item),
+ &mut |a| {
+ // Cannot use 'ecx.stmt_item' here, because we need to pass 'ecx'
+ // to the function
+ items.push(Annotatable::Stmt(P(ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ kind: ast::StmtKind::Item(a.expect_item()),
+ span,
+ })));
+ },
+ is_derive_const,
+ );
} else {
unreachable!("should have already errored on non-item statement")
}
}
_ => {
- (self.0)(ecx, span, meta_item, &item, &mut |a| items.push(a));
+ (self.0)(ecx, span, meta_item, &item, &mut |a| items.push(a), is_derive_const);
}
}
ExpandResult::Ready(items)
@@ -116,12 +125,12 @@ fn inject_impl_of_structural_trait(
structural_path: generic::ty::Path,
push: &mut dyn FnMut(Annotatable),
) {
- let Annotatable::Item(ref item) = *item else {
+ let Annotatable::Item(item) = item else {
unreachable!();
};
- let generics = match item.kind {
- ItemKind::Struct(_, ref generics) | ItemKind::Enum(_, ref generics) => generics,
+ let generics = match &item.kind {
+ ItemKind::Struct(_, generics) | ItemKind::Enum(_, generics) => generics,
// Do not inject `impl Structural for Union`. (`PartialEq` does not
// support unions, so we will see error downstream.)
ItemKind::Union(..) => return,
@@ -179,7 +188,7 @@ fn inject_impl_of_structural_trait(
.cloned(),
);
// Mark as `automatically_derived` to avoid some silly lints.
- attrs.push(cx.attribute(cx.meta_word(span, sym::automatically_derived)));
+ attrs.push(cx.attr_word(sym::automatically_derived, span));
let newitem = cx.item(
span,
diff --git a/compiler/rustc_builtin_macros/src/edition_panic.rs b/compiler/rustc_builtin_macros/src/edition_panic.rs
index 3f1a8b3bc..b2a21611d 100644
--- a/compiler/rustc_builtin_macros/src/edition_panic.rs
+++ b/compiler/rustc_builtin_macros/src/edition_panic.rs
@@ -6,15 +6,15 @@ use rustc_span::edition::Edition;
use rustc_span::symbol::sym;
use rustc_span::Span;
-// This expands to either
-// - `$crate::panic::panic_2015!(...)` or
-// - `$crate::panic::panic_2021!(...)`
-// depending on the edition.
-//
-// This is used for both std::panic!() and core::panic!().
-//
-// `$crate` will refer to either the `std` or `core` crate depending on which
-// one we're expanding from.
+/// This expands to either
+/// - `$crate::panic::panic_2015!(...)` or
+/// - `$crate::panic::panic_2021!(...)`
+/// depending on the edition.
+///
+/// This is used for both std::panic!() and core::panic!().
+///
+/// `$crate` will refer to either the `std` or `core` crate depending on which
+/// one we're expanding from.
pub fn expand_panic<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
@@ -24,10 +24,10 @@ pub fn expand_panic<'cx>(
expand(mac, cx, sp, tts)
}
-// This expands to either
-// - `$crate::panic::unreachable_2015!(...)` or
-// - `$crate::panic::unreachable_2021!(...)`
-// depending on the edition.
+/// This expands to either
+/// - `$crate::panic::unreachable_2015!(...)` or
+/// - `$crate::panic::unreachable_2021!(...)`
+/// depending on the edition.
pub fn expand_unreachable<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
@@ -58,11 +58,11 @@ fn expand<'cx>(
.collect(),
tokens: None,
},
- args: P(MacArgs::Delimited(
- DelimSpan::from_single(sp),
- MacDelimiter::Parenthesis,
- tts,
- )),
+ args: P(DelimArgs {
+ dspan: DelimSpan::from_single(sp),
+ delim: MacDelimiter::Parenthesis,
+ tokens: tts,
+ }),
prior_type_ascription: None,
})),
),
diff --git a/compiler/rustc_builtin_macros/src/env.rs b/compiler/rustc_builtin_macros/src/env.rs
index b8828fa67..0b4e545f7 100644
--- a/compiler/rustc_builtin_macros/src/env.rs
+++ b/compiler/rustc_builtin_macros/src/env.rs
@@ -53,7 +53,7 @@ pub fn expand_env<'cx>(
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
- Some(ref exprs) if exprs.is_empty() => {
+ Some(exprs) if exprs.is_empty() => {
cx.span_err(sp, "env! takes 1 or 2 arguments");
return DummyResult::any(sp);
}
diff --git a/compiler/rustc_builtin_macros/src/format_foreign.rs b/compiler/rustc_builtin_macros/src/format_foreign.rs
index ecd16736e..6f7fc3a95 100644
--- a/compiler/rustc_builtin_macros/src/format_foreign.rs
+++ b/compiler/rustc_builtin_macros/src/format_foreign.rs
@@ -13,23 +13,23 @@ pub(crate) mod printf {
impl<'a> Substitution<'a> {
pub fn as_str(&self) -> &str {
- match *self {
- Substitution::Format(ref fmt) => fmt.span,
+ match self {
+ Substitution::Format(fmt) => fmt.span,
Substitution::Escape(_) => "%%",
}
}
pub fn position(&self) -> Option<InnerSpan> {
- match *self {
- Substitution::Format(ref fmt) => Some(fmt.position),
- Substitution::Escape((start, end)) => Some(InnerSpan::new(start, end)),
+ match self {
+ Substitution::Format(fmt) => Some(fmt.position),
+ &Substitution::Escape((start, end)) => Some(InnerSpan::new(start, end)),
}
}
pub fn set_position(&mut self, start: usize, end: usize) {
match self {
- Substitution::Format(ref mut fmt) => fmt.position = InnerSpan::new(start, end),
- Substitution::Escape(ref mut pos) => *pos = (start, end),
+ Substitution::Format(fmt) => fmt.position = InnerSpan::new(start, end),
+ Substitution::Escape(pos) => *pos = (start, end),
}
}
@@ -38,8 +38,8 @@ pub(crate) mod printf {
/// This ignores cases where the substitution does not have an exact equivalent, or where
/// the substitution would be unnecessary.
pub fn translate(&self) -> Result<String, Option<String>> {
- match *self {
- Substitution::Format(ref fmt) => fmt.translate(),
+ match self {
+ Substitution::Format(fmt) => fmt.translate(),
Substitution::Escape(_) => Err(None),
}
}
@@ -635,23 +635,17 @@ pub mod shell {
}
pub fn position(&self) -> Option<InnerSpan> {
- match self {
- Substitution::Ordinal(_, pos)
- | Substitution::Name(_, pos)
- | Substitution::Escape(pos) => Some(InnerSpan::new(pos.0, pos.1)),
- }
+ let (Self::Ordinal(_, pos) | Self::Name(_, pos) | Self::Escape(pos)) = self;
+ Some(InnerSpan::new(pos.0, pos.1))
}
pub fn set_position(&mut self, start: usize, end: usize) {
- match self {
- Substitution::Ordinal(_, ref mut pos)
- | Substitution::Name(_, ref mut pos)
- | Substitution::Escape(ref mut pos) => *pos = (start, end),
- }
+ let (Self::Ordinal(_, pos) | Self::Name(_, pos) | Self::Escape(pos)) = self;
+ *pos = (start, end);
}
pub fn translate(&self) -> Result<String, Option<String>> {
- match *self {
+ match self {
Substitution::Ordinal(n, _) => Ok(format!("{{{}}}", n)),
Substitution::Name(n, _) => Ok(format!("{{{}}}", n)),
Substitution::Escape(_) => Err(None),
diff --git a/compiler/rustc_builtin_macros/src/format_foreign/printf/tests.rs b/compiler/rustc_builtin_macros/src/format_foreign/printf/tests.rs
index fc7442470..df773910d 100644
--- a/compiler/rustc_builtin_macros/src/format_foreign/printf/tests.rs
+++ b/compiler/rustc_builtin_macros/src/format_foreign/printf/tests.rs
@@ -100,7 +100,7 @@ fn test_iter() {
let s = "The %d'th word %% is: `%.*s` %!\n";
let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate().ok()).collect();
assert_eq!(
- subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
+ subs.iter().map(Option::as_deref).collect::<Vec<_>>(),
vec![Some("{}"), None, Some("{:.*}"), None]
);
}
diff --git a/compiler/rustc_builtin_macros/src/format_foreign/shell/tests.rs b/compiler/rustc_builtin_macros/src/format_foreign/shell/tests.rs
index f5f82732f..93a7afcd6 100644
--- a/compiler/rustc_builtin_macros/src/format_foreign/shell/tests.rs
+++ b/compiler/rustc_builtin_macros/src/format_foreign/shell/tests.rs
@@ -39,7 +39,7 @@ fn test_iter() {
let s = "The $0'th word $$ is: `$WORD` $!\n";
let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate().ok()).collect();
assert_eq!(
- subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
+ subs.iter().map(Option::as_deref).collect::<Vec<_>>(),
vec![Some("{0}"), None, Some("{WORD}")]
);
}
diff --git a/compiler/rustc_builtin_macros/src/global_allocator.rs b/compiler/rustc_builtin_macros/src/global_allocator.rs
index 45b9b8ab6..41531580c 100644
--- a/compiler/rustc_builtin_macros/src/global_allocator.rs
+++ b/compiler/rustc_builtin_macros/src/global_allocator.rs
@@ -20,27 +20,23 @@ pub fn expand(
check_builtin_macro_attribute(ecx, meta_item, sym::global_allocator);
let orig_item = item.clone();
- let not_static = || {
- ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics");
- vec![orig_item.clone()]
- };
// Allow using `#[global_allocator]` on an item statement
// FIXME - if we get deref patterns, use them to reduce duplication here
- let (item, is_stmt, ty_span) = match &item {
- Annotatable::Item(item) => match item.kind {
- ItemKind::Static(ref ty, ..) => (item, false, ecx.with_def_site_ctxt(ty.span)),
- _ => return not_static(),
- },
- Annotatable::Stmt(stmt) => match &stmt.kind {
- StmtKind::Item(item_) => match item_.kind {
- ItemKind::Static(ref ty, ..) => (item_, true, ecx.with_def_site_ctxt(ty.span)),
- _ => return not_static(),
- },
- _ => return not_static(),
- },
- _ => return not_static(),
- };
+ let (item, is_stmt, ty_span) =
+ if let Annotatable::Item(item) = &item
+ && let ItemKind::Static(ty, ..) = &item.kind
+ {
+ (item, false, ecx.with_def_site_ctxt(ty.span))
+ } else if let Annotatable::Stmt(stmt) = &item
+ && let StmtKind::Item(item) = &stmt.kind
+ && let ItemKind::Static(ty, ..) = &item.kind
+ {
+ (item, true, ecx.with_def_site_ctxt(ty.span))
+ } else {
+ ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics");
+ return vec![orig_item.clone()]
+ };
// Generate a bunch of new items using the AllocFnFactory
let span = ecx.with_def_site_ctxt(item.span);
@@ -115,9 +111,7 @@ impl AllocFnFactory<'_, '_> {
}
fn attrs(&self) -> AttrVec {
- let special = sym::rustc_std_internal_symbol;
- let special = self.cx.meta_word(self.span, special);
- thin_vec![self.cx.attribute(special)]
+ thin_vec![self.cx.attr_word(sym::rustc_std_internal_symbol, self.span)]
}
fn arg_ty(
diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs
index c7ea7de8f..75cfac723 100644
--- a/compiler/rustc_builtin_macros/src/lib.rs
+++ b/compiler/rustc_builtin_macros/src/lib.rs
@@ -25,6 +25,7 @@ use rustc_expand::base::{MacroExpanderFn, ResolverExpand, SyntaxExtensionKind};
use rustc_expand::proc_macro::BangProcMacro;
use rustc_span::symbol::sym;
+mod alloc_error_handler;
mod assert;
mod cfg;
mod cfg_accessible;
@@ -44,6 +45,7 @@ mod log_syntax;
mod source_util;
mod test;
mod trace_macros;
+mod type_ascribe;
mod util;
pub mod asm;
@@ -91,13 +93,16 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) {
unreachable: edition_panic::expand_unreachable,
stringify: source_util::expand_stringify,
trace_macros: trace_macros::expand_trace_macros,
+ type_ascribe: type_ascribe::expand_type_ascribe,
}
register_attr! {
+ alloc_error_handler: alloc_error_handler::expand,
bench: test::expand_bench,
cfg_accessible: cfg_accessible::Expander,
cfg_eval: cfg_eval::expand,
- derive: derive::Expander,
+ derive: derive::Expander(false),
+ derive_const: derive::Expander(true),
global_allocator: global_allocator::expand,
test: test::expand_test,
test_case: test::expand_test_case,
diff --git a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs
index ebe1c3663..ece660cf6 100644
--- a/compiler/rustc_builtin_macros/src/proc_macro_harness.rs
+++ b/compiler/rustc_builtin_macros/src/proc_macro_harness.rs
@@ -1,6 +1,3 @@
-use std::mem;
-
-use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::visit::{self, Visitor};
use rustc_ast::{self as ast, NodeId};
@@ -13,6 +10,7 @@ use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use smallvec::smallvec;
+use std::mem;
struct ProcMacroDerive {
id: NodeId,
@@ -365,14 +363,8 @@ fn mk_decls(cx: &mut ExtCtxt<'_>, macros: &[ProcMacro]) -> P<ast::Item> {
cx.expr_array_ref(span, decls),
)
.map(|mut i| {
- let attr = cx.meta_word(span, sym::rustc_proc_macro_decls);
- i.attrs.push(cx.attribute(attr));
-
- let deprecated_attr = attr::mk_nested_word_item(Ident::new(sym::deprecated, span));
- let allow_deprecated_attr =
- attr::mk_list_item(Ident::new(sym::allow, span), vec![deprecated_attr]);
- i.attrs.push(cx.attribute(allow_deprecated_attr));
-
+ i.attrs.push(cx.attr_word(sym::rustc_proc_macro_decls, span));
+ i.attrs.push(cx.attr_nested_word(sym::allow, sym::deprecated, span));
i
});
diff --git a/compiler/rustc_builtin_macros/src/source_util.rs b/compiler/rustc_builtin_macros/src/source_util.rs
index d78bbc3c9..0b17e92ef 100644
--- a/compiler/rustc_builtin_macros/src/source_util.rs
+++ b/compiler/rustc_builtin_macros/src/source_util.rs
@@ -164,7 +164,7 @@ pub fn expand_include<'cx>(
Box::new(ExpandResult { p, node_id: cx.current_expansion.lint_node_id })
}
-// include_str! : read the given file, insert it as a literal string expr
+/// `include_str!`: read the given file, insert it as a literal string expr
pub fn expand_include_str(
cx: &mut ExtCtxt<'_>,
sp: Span,
@@ -216,7 +216,10 @@ pub fn expand_include_bytes(
}
};
match cx.source_map().load_binary_file(&file) {
- Ok(bytes) => base::MacEager::expr(cx.expr_byte_str(sp, bytes)),
+ Ok(bytes) => {
+ let expr = cx.expr(sp, ast::ExprKind::IncludedBytes(bytes.into()));
+ base::MacEager::expr(expr)
+ }
Err(e) => {
cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
DummyResult::any(sp)
diff --git a/compiler/rustc_builtin_macros/src/standard_library_imports.rs b/compiler/rustc_builtin_macros/src/standard_library_imports.rs
index 49ef538f0..f73f20c84 100644
--- a/compiler/rustc_builtin_macros/src/standard_library_imports.rs
+++ b/compiler/rustc_builtin_macros/src/standard_library_imports.rs
@@ -52,7 +52,7 @@ pub fn inject(
cx.item(
span,
ident,
- thin_vec![cx.attribute(cx.meta_word(span, sym::macro_use))],
+ thin_vec![cx.attr_word(sym::macro_use, span)],
ast::ItemKind::ExternCrate(None),
),
);
@@ -79,7 +79,7 @@ pub fn inject(
let use_item = cx.item(
span,
Ident::empty(),
- thin_vec![cx.attribute(cx.meta_word(span, sym::prelude_import))],
+ thin_vec![cx.attr_word(sym::prelude_import, span)],
ast::ItemKind::Use(ast::UseTree {
prefix: cx.path(span, import_path),
kind: ast::UseTreeKind::Glob,
diff --git a/compiler/rustc_builtin_macros/src/test.rs b/compiler/rustc_builtin_macros/src/test.rs
index fee5d04cd..f5f02fc77 100644
--- a/compiler/rustc_builtin_macros/src/test.rs
+++ b/compiler/rustc_builtin_macros/src/test.rs
@@ -2,7 +2,6 @@
/// Ideally, this code would be in libtest but for efficiency and error messages it lives here.
use crate::util::{check_builtin_macro_attribute, warn_on_duplicate_attribute};
use rustc_ast as ast;
-use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast_pretty::pprust;
use rustc_errors::Applicability;
@@ -13,13 +12,13 @@ use rustc_span::Span;
use std::iter;
use thin_vec::thin_vec;
-// #[test_case] is used by custom test authors to mark tests
-// When building for test, it needs to make the item public and gensym the name
-// Otherwise, we'll omit the item. This behavior means that any item annotated
-// with #[test_case] is never addressable.
-//
-// We mark item with an inert attribute "rustc_test_marker" which the test generation
-// logic will pick up on.
+/// #[test_case] is used by custom test authors to mark tests
+/// When building for test, it needs to make the item public and gensym the name
+/// Otherwise, we'll omit the item. This behavior means that any item annotated
+/// with #[test_case] is never addressable.
+///
+/// We mark item with an inert attribute "rustc_test_marker" which the test generation
+/// logic will pick up on.
pub fn expand_test_case(
ecx: &mut ExtCtxt<'_>,
attr_sp: Span,
@@ -47,11 +46,7 @@ pub fn expand_test_case(
tokens: None,
};
item.ident.span = item.ident.span.with_ctxt(sp.ctxt());
- item.attrs.push(ecx.attribute(attr::mk_name_value_item_str(
- Ident::new(sym::rustc_test_marker, sp),
- test_path_symbol,
- sp,
- )));
+ item.attrs.push(ecx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, sp));
item
});
@@ -112,7 +107,7 @@ pub fn expand_test_or_bench(
};
// Note: non-associated fn items are already handled by `expand_test_or_bench`
- if !matches!(item.kind, ast::ItemKind::Fn(_)) {
+ let ast::ItemKind::Fn(fn_) = &item.kind else {
let diag = &cx.sess.parse_sess.span_diagnostic;
let msg = "the `#[test]` attribute may only be used on a non-associated function";
let mut err = match item.kind {
@@ -130,7 +125,7 @@ pub fn expand_test_or_bench(
.emit();
return vec![Annotatable::Item(item)];
- }
+ };
// has_*_signature will report any errors in the type so compilation
// will fail. We shouldn't try to expand in this case because the errors
@@ -141,12 +136,14 @@ pub fn expand_test_or_bench(
return vec![Annotatable::Item(item)];
}
- let (sp, attr_sp) = (cx.with_def_site_ctxt(item.span), cx.with_def_site_ctxt(attr_sp));
+ let sp = cx.with_def_site_ctxt(item.span);
+ let ret_ty_sp = cx.with_def_site_ctxt(fn_.sig.decl.output.span());
+ let attr_sp = cx.with_def_site_ctxt(attr_sp);
let test_id = Ident::new(sym::test, attr_sp);
// creates test::$name
- let test_path = |name| cx.path(sp, vec![test_id, Ident::from_str_and_span(name, sp)]);
+ let test_path = |name| cx.path(ret_ty_sp, vec![test_id, Ident::from_str_and_span(name, sp)]);
// creates test::ShouldPanic::$name
let should_panic_path = |name| {
@@ -192,7 +189,7 @@ pub fn expand_test_or_bench(
vec![
// super::$test_fn(b)
cx.expr_call(
- sp,
+ ret_ty_sp,
cx.expr_path(cx.path(sp, vec![item.ident])),
vec![cx.expr_ident(sp, b)],
),
@@ -216,7 +213,11 @@ pub fn expand_test_or_bench(
cx.expr_path(test_path("assert_test_result")),
vec![
// $test_fn()
- cx.expr_call(sp, cx.expr_path(cx.path(sp, vec![item.ident])), vec![]), // )
+ cx.expr_call(
+ ret_ty_sp,
+ cx.expr_path(cx.path(sp, vec![item.ident])),
+ vec![],
+ ), // )
],
), // }
), // )
@@ -235,16 +236,9 @@ pub fn expand_test_or_bench(
Ident::new(item.ident.name, sp),
thin_vec![
// #[cfg(test)]
- cx.attribute(attr::mk_list_item(
- Ident::new(sym::cfg, attr_sp),
- vec![attr::mk_nested_word_item(Ident::new(sym::test, attr_sp))],
- )),
+ cx.attr_nested_word(sym::cfg, sym::test, attr_sp),
// #[rustc_test_marker = "test_case_sort_key"]
- cx.attribute(attr::mk_name_value_item_str(
- Ident::new(sym::rustc_test_marker, attr_sp),
- test_path_symbol,
- attr_sp,
- )),
+ cx.attr_name_value_str(sym::rustc_test_marker, test_path_symbol, attr_sp),
]
.into(),
// const $ident: test::TestDescAndFn =
@@ -472,61 +466,67 @@ fn test_type(cx: &ExtCtxt<'_>) -> TestType {
fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
let has_should_panic_attr = cx.sess.contains_name(&i.attrs, sym::should_panic);
let sd = &cx.sess.parse_sess.span_diagnostic;
- if let ast::ItemKind::Fn(box ast::Fn { ref sig, ref generics, .. }) = i.kind {
- if let ast::Unsafe::Yes(span) = sig.header.unsafety {
- sd.struct_span_err(i.span, "unsafe functions cannot be used for tests")
- .span_label(span, "`unsafe` because of this")
- .emit();
- return false;
- }
- if let ast::Async::Yes { span, .. } = sig.header.asyncness {
- sd.struct_span_err(i.span, "async functions cannot be used for tests")
- .span_label(span, "`async` because of this")
- .emit();
- return false;
- }
-
- // If the termination trait is active, the compiler will check that the output
- // type implements the `Termination` trait as `libtest` enforces that.
- let has_output = match sig.decl.output {
- ast::FnRetTy::Default(..) => false,
- ast::FnRetTy::Ty(ref t) if t.kind.is_unit() => false,
- _ => true,
- };
-
- if !sig.decl.inputs.is_empty() {
- sd.span_err(i.span, "functions used as tests can not have any arguments");
- return false;
- }
+ match &i.kind {
+ ast::ItemKind::Fn(box ast::Fn { sig, generics, .. }) => {
+ if let ast::Unsafe::Yes(span) = sig.header.unsafety {
+ sd.struct_span_err(i.span, "unsafe functions cannot be used for tests")
+ .span_label(span, "`unsafe` because of this")
+ .emit();
+ return false;
+ }
+ if let ast::Async::Yes { span, .. } = sig.header.asyncness {
+ sd.struct_span_err(i.span, "async functions cannot be used for tests")
+ .span_label(span, "`async` because of this")
+ .emit();
+ return false;
+ }
- match (has_output, has_should_panic_attr) {
- (true, true) => {
- sd.span_err(i.span, "functions using `#[should_panic]` must return `()`");
- false
+ // If the termination trait is active, the compiler will check that the output
+ // type implements the `Termination` trait as `libtest` enforces that.
+ let has_output = match &sig.decl.output {
+ ast::FnRetTy::Default(..) => false,
+ ast::FnRetTy::Ty(t) if t.kind.is_unit() => false,
+ _ => true,
+ };
+
+ if !sig.decl.inputs.is_empty() {
+ sd.span_err(i.span, "functions used as tests can not have any arguments");
+ return false;
}
- (true, false) => {
- if !generics.params.is_empty() {
- sd.span_err(i.span, "functions used as tests must have signature fn() -> ()");
+
+ match (has_output, has_should_panic_attr) {
+ (true, true) => {
+ sd.span_err(i.span, "functions using `#[should_panic]` must return `()`");
false
- } else {
- true
}
+ (true, false) => {
+ if !generics.params.is_empty() {
+ sd.span_err(
+ i.span,
+ "functions used as tests must have signature fn() -> ()",
+ );
+ false
+ } else {
+ true
+ }
+ }
+ (false, _) => true,
}
- (false, _) => true,
}
- } else {
- // should be unreachable because `is_test_fn_item` should catch all non-fn items
- false
+ _ => {
+ // should be unreachable because `is_test_fn_item` should catch all non-fn items
+ debug_assert!(false);
+ false
+ }
}
}
fn has_bench_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
- let has_sig = if let ast::ItemKind::Fn(box ast::Fn { ref sig, .. }) = i.kind {
+ let has_sig = match &i.kind {
// N.B., inadequate check, but we're running
// well before resolve, can't get too deep.
- sig.decl.inputs.len() == 1
- } else {
- false
+ ast::ItemKind::Fn(box ast::Fn { sig, .. }) => sig.decl.inputs.len() == 1,
+ _ => false,
};
if !has_sig {
diff --git a/compiler/rustc_builtin_macros/src/test_harness.rs b/compiler/rustc_builtin_macros/src/test_harness.rs
index b8b8351a3..ad8871080 100644
--- a/compiler/rustc_builtin_macros/src/test_harness.rs
+++ b/compiler/rustc_builtin_macros/src/test_harness.rs
@@ -34,8 +34,8 @@ struct TestCtxt<'a> {
test_runner: Option<ast::Path>,
}
-// Traverse the crate, collecting all the test functions, eliding any
-// existing main functions, and synthesizing a main test harness
+/// Traverse the crate, collecting all the test functions, eliding any
+/// existing main functions, and synthesizing a main test harness
pub fn inject(sess: &Session, resolver: &mut dyn ResolverExpand, krate: &mut ast::Crate) {
let span_diagnostic = sess.diagnostic();
let panic_strategy = sess.panic_strategy();
@@ -131,8 +131,9 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> {
// We don't want to recurse into anything other than mods, since
// mods or tests inside of functions will break things
- if let ast::ItemKind::Mod(_, ModKind::Loaded(.., ref spans)) = item.kind {
- let ast::ModSpans { inner_span: span, inject_use_span: _ } = *spans;
+ if let ast::ItemKind::Mod(_, ModKind::Loaded(.., ast::ModSpans { inner_span: span, .. })) =
+ item.kind
+ {
let prev_tests = mem::take(&mut self.tests);
noop_visit_item_kind(&mut item.kind, self);
self.add_test_cases(item.id, span, prev_tests);
@@ -185,13 +186,12 @@ impl<'a> MutVisitor for EntryPointCleaner<'a> {
let item = match entry_point_type(self.sess, &item, self.depth) {
EntryPointType::MainNamed | EntryPointType::RustcMainAttr | EntryPointType::Start => {
item.map(|ast::Item { id, ident, attrs, kind, vis, span, tokens }| {
- let allow_ident = Ident::new(sym::allow, self.def_site);
- let dc_nested =
- attr::mk_nested_word_item(Ident::new(sym::dead_code, self.def_site));
- let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
- let allow_dead_code = attr::mk_attr_outer(
+ let allow_dead_code = attr::mk_attr_nested_word(
&self.sess.parse_sess.attr_id_generator,
- allow_dead_code_item,
+ ast::AttrStyle::Outer,
+ sym::allow,
+ sym::dead_code,
+ self.def_site,
);
let attrs = attrs
.into_iter()
@@ -309,8 +309,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
);
// #[rustc_main]
- let main_meta = ecx.meta_word(sp, sym::rustc_main);
- let main_attr = ecx.attribute(main_meta);
+ let main_attr = ecx.attr_word(sym::rustc_main, sp);
// pub fn main() { ... }
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
diff --git a/compiler/rustc_builtin_macros/src/type_ascribe.rs b/compiler/rustc_builtin_macros/src/type_ascribe.rs
new file mode 100644
index 000000000..72b85af14
--- /dev/null
+++ b/compiler/rustc_builtin_macros/src/type_ascribe.rs
@@ -0,0 +1,35 @@
+use rustc_ast::ptr::P;
+use rustc_ast::tokenstream::TokenStream;
+use rustc_ast::{token, Expr, ExprKind, Ty};
+use rustc_errors::PResult;
+use rustc_expand::base::{self, DummyResult, ExtCtxt, MacEager};
+use rustc_span::Span;
+
+pub fn expand_type_ascribe(
+ cx: &mut ExtCtxt<'_>,
+ span: Span,
+ tts: TokenStream,
+) -> Box<dyn base::MacResult + 'static> {
+ let (expr, ty) = match parse_ascribe(cx, tts) {
+ Ok(parsed) => parsed,
+ Err(mut err) => {
+ err.emit();
+ return DummyResult::any(span);
+ }
+ };
+
+ let asc_expr = cx.expr(span, ExprKind::Type(expr, ty));
+
+ return MacEager::expr(asc_expr);
+}
+
+fn parse_ascribe<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P<Expr>, P<Ty>)> {
+ let mut parser = cx.new_parser_from_tts(stream);
+
+ let expr = parser.parse_expr()?;
+ parser.expect(&token::Comma)?;
+
+ let ty = parser.parse_ty()?;
+
+ Ok((expr, ty))
+}
diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs
index 527fe50ef..83812631c 100644
--- a/compiler/rustc_builtin_macros/src/util.rs
+++ b/compiler/rustc_builtin_macros/src/util.rs
@@ -1,4 +1,4 @@
-use rustc_ast::{Attribute, MetaItem};
+use rustc_ast::{AttrStyle, Attribute, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_feature::AttributeTemplate;
use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES;
@@ -8,8 +8,13 @@ use rustc_span::Symbol;
pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
// All the built-in macro attributes are "words" at the moment.
let template = AttributeTemplate { word: true, ..Default::default() };
- let attr = ecx.attribute(meta_item.clone());
- validate_attr::check_builtin_attribute(&ecx.sess.parse_sess, &attr, name, template);
+ validate_attr::check_builtin_meta_item(
+ &ecx.sess.parse_sess,
+ &meta_item,
+ AttrStyle::Outer,
+ name,
+ template,
+ );
}
/// Emit a warning if the item is annotated with the given attribute. This is used to diagnose when
diff --git a/compiler/rustc_codegen_cranelift/Cargo.lock b/compiler/rustc_codegen_cranelift/Cargo.lock
index 3fa9d56cd..3b406036c 100644
--- a/compiler/rustc_codegen_cranelift/Cargo.lock
+++ b/compiler/rustc_codegen_cranelift/Cargo.lock
@@ -20,11 +20,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c794e162a5eff65c72ef524dfe393eb923c354e350bb78b9c7383df13f3bc142"
[[package]]
-name = "ar"
-version = "0.8.0"
-source = "git+https://github.com/bjorn3/rust-ar.git?branch=do_not_remove_cg_clif_ranlib#de9ab0e56bf3a208381d342aa5b60f9ff2891648"
-
-[[package]]
name = "arrayvec"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -324,7 +319,6 @@ dependencies = [
name = "rustc_codegen_cranelift"
version = "0.1.0"
dependencies = [
- "ar",
"cranelift-codegen",
"cranelift-frontend",
"cranelift-jit",
diff --git a/compiler/rustc_codegen_cranelift/Cargo.toml b/compiler/rustc_codegen_cranelift/Cargo.toml
index 09cf5b4a1..0fdd5de11 100644
--- a/compiler/rustc_codegen_cranelift/Cargo.toml
+++ b/compiler/rustc_codegen_cranelift/Cargo.toml
@@ -18,7 +18,6 @@ target-lexicon = "0.12.0"
gimli = { version = "0.26.0", default-features = false, features = ["write"]}
object = { version = "0.29.0", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
-ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" }
indexmap = "1.9.1"
libloading = { version = "0.7.3", optional = true }
once_cell = "1.10.0"
diff --git a/compiler/rustc_codegen_cranelift/build_system/utils.rs b/compiler/rustc_codegen_cranelift/build_system/utils.rs
index 48da64906..c627af4e6 100644
--- a/compiler/rustc_codegen_cranelift/build_system/utils.rs
+++ b/compiler/rustc_codegen_cranelift/build_system/utils.rs
@@ -104,5 +104,5 @@ pub(crate) fn copy_dir_recursively(from: &Path, to: &Path) {
}
pub(crate) fn is_ci() -> bool {
- env::var("CI").as_ref().map(|val| &**val) == Ok("true")
+ env::var("CI").as_deref() == Ok("true")
}
diff --git a/compiler/rustc_codegen_cranelift/src/abi/mod.rs b/compiler/rustc_codegen_cranelift/src/abi/mod.rs
index 99059e788..1e22537c2 100644
--- a/compiler/rustc_codegen_cranelift/src/abi/mod.rs
+++ b/compiler/rustc_codegen_cranelift/src/abi/mod.rs
@@ -22,7 +22,19 @@ fn clif_sig_from_fn_abi<'tcx>(
default_call_conv: CallConv,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
) -> Signature {
- let call_conv = match fn_abi.conv {
+ let call_conv = conv_to_call_conv(fn_abi.conv, default_call_conv);
+
+ let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
+
+ let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
+ // Sometimes the first param is an pointer to the place where the return value needs to be stored.
+ let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
+
+ Signature { params, returns, call_conv }
+}
+
+pub(crate) fn conv_to_call_conv(c: Conv, default_call_conv: CallConv) -> CallConv {
+ match c {
Conv::Rust | Conv::C => default_call_conv,
Conv::RustCold => CallConv::Cold,
Conv::X86_64SysV => CallConv::SystemV,
@@ -38,15 +50,8 @@ fn clif_sig_from_fn_abi<'tcx>(
| Conv::X86VectorCall
| Conv::AmdGpuKernel
| Conv::AvrInterrupt
- | Conv::AvrNonBlockingInterrupt => todo!("{:?}", fn_abi.conv),
- };
- let inputs = fn_abi.args.iter().map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter()).flatten();
-
- let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
- // Sometimes the first param is an pointer to the place where the return value needs to be stored.
- let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
-
- Signature { params, returns, call_conv }
+ | Conv::AvrNonBlockingInterrupt => todo!("{:?}", c),
+ }
}
pub(crate) fn get_function_sig<'tcx>(
diff --git a/compiler/rustc_codegen_cranelift/src/allocator.rs b/compiler/rustc_codegen_cranelift/src/allocator.rs
index bad8a87b9..12bb00d34 100644
--- a/compiler/rustc_codegen_cranelift/src/allocator.rs
+++ b/compiler/rustc_codegen_cranelift/src/allocator.rs
@@ -5,6 +5,7 @@ use crate::prelude::*;
use rustc_ast::expand::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS};
use rustc_session::config::OomStrategy;
+use rustc_span::symbol::sym;
/// Returns whether an allocator shim was created
pub(crate) fn codegen(
@@ -23,7 +24,7 @@ pub(crate) fn codegen(
module,
unwind_context,
kind,
- tcx.lang_items().oom().is_some(),
+ tcx.alloc_error_handler_kind(()).unwrap(),
tcx.sess.opts.unstable_opts.oom,
);
true
@@ -36,7 +37,7 @@ fn codegen_inner(
module: &mut impl Module,
unwind_context: &mut UnwindContext,
kind: AllocatorKind,
- has_alloc_error_handler: bool,
+ alloc_error_handler_kind: AllocatorKind,
oom_strategy: OomStrategy,
) {
let usize_ty = module.target_config().pointer_type();
@@ -108,12 +109,12 @@ fn codegen_inner(
returns: vec![],
};
- let callee_name = if has_alloc_error_handler { "__rg_oom" } else { "__rdl_oom" };
+ let callee_name = alloc_error_handler_kind.fn_name(sym::oom);
let func_id =
module.declare_function("__rust_alloc_error_handler", Linkage::Export, &sig).unwrap();
- let callee_func_id = module.declare_function(callee_name, Linkage::Import, &sig).unwrap();
+ let callee_func_id = module.declare_function(&callee_name, Linkage::Import, &sig).unwrap();
let mut ctx = Context::new();
ctx.func.signature = sig;
diff --git a/compiler/rustc_codegen_cranelift/src/archive.rs b/compiler/rustc_codegen_cranelift/src/archive.rs
index f2e3bf16e..5a29bc18d 100644
--- a/compiler/rustc_codegen_cranelift/src/archive.rs
+++ b/compiler/rustc_codegen_cranelift/src/archive.rs
@@ -1,35 +1,15 @@
-//! Creation of ar archives like for the lib and staticlib crate type
-
-use std::collections::BTreeMap;
-use std::fs::File;
-use std::io::{self, Read, Seek};
use std::path::{Path, PathBuf};
-use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
+use rustc_codegen_ssa::back::archive::{
+ get_native_object_symbols, ArArchiveBuilder, ArchiveBuilder, ArchiveBuilderBuilder,
+};
use rustc_session::Session;
-use object::read::archive::ArchiveFile;
-use object::{Object, ObjectSymbol, ReadCache};
-
-#[derive(Debug)]
-enum ArchiveEntry {
- FromArchive { archive_index: usize, file_range: (u64, u64) },
- File(PathBuf),
-}
-
pub(crate) struct ArArchiveBuilderBuilder;
impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a> {
- Box::new(ArArchiveBuilder {
- sess,
- use_gnu_style_archive: sess.target.archive_format == "gnu",
- // FIXME fix builtin ranlib on macOS
- no_builtin_ranlib: sess.target.is_like_osx,
-
- src_archives: vec![],
- entries: vec![],
- })
+ Box::new(ArArchiveBuilder::new(sess, get_native_object_symbols))
}
fn create_dll_import_lib(
@@ -40,200 +20,6 @@ impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
_tmpdir: &Path,
_is_direct_dependency: bool,
) -> PathBuf {
- bug!("creating dll imports is not supported");
- }
-}
-
-pub(crate) struct ArArchiveBuilder<'a> {
- sess: &'a Session,
- use_gnu_style_archive: bool,
- no_builtin_ranlib: bool,
-
- src_archives: Vec<File>,
- // Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
- // the end of an archive for linkers to not get confused.
- entries: Vec<(Vec<u8>, ArchiveEntry)>,
-}
-
-impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
- fn add_file(&mut self, file: &Path) {
- self.entries.push((
- file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
- ArchiveEntry::File(file.to_owned()),
- ));
- }
-
- fn add_archive(
- &mut self,
- archive_path: &Path,
- mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
- ) -> std::io::Result<()> {
- let read_cache = ReadCache::new(std::fs::File::open(&archive_path)?);
- let archive = ArchiveFile::parse(&read_cache).unwrap();
- let archive_index = self.src_archives.len();
-
- for entry in archive.members() {
- let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
- let file_name = String::from_utf8(entry.name().to_vec())
- .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
- if !skip(&file_name) {
- self.entries.push((
- file_name.into_bytes(),
- ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
- ));
- }
- }
-
- self.src_archives.push(read_cache.into_inner());
- Ok(())
- }
-
- fn build(mut self: Box<Self>, output: &Path) -> bool {
- enum BuilderKind {
- Bsd(ar::Builder<File>),
- Gnu(ar::GnuBuilder<File>),
- }
-
- let sess = self.sess;
-
- let mut symbol_table = BTreeMap::new();
-
- let mut entries = Vec::new();
-
- for (mut entry_name, entry) in self.entries {
- // FIXME only read the symbol table of the object files to avoid having to keep all
- // object files in memory at once, or read them twice.
- let data = match entry {
- ArchiveEntry::FromArchive { archive_index, file_range } => {
- // FIXME read symbols from symtab
- let src_read_cache = &mut self.src_archives[archive_index];
-
- src_read_cache.seek(io::SeekFrom::Start(file_range.0)).unwrap();
- let mut data = std::vec::from_elem(0, usize::try_from(file_range.1).unwrap());
- src_read_cache.read_exact(&mut data).unwrap();
-
- data
- }
- ArchiveEntry::File(file) => std::fs::read(file).unwrap_or_else(|err| {
- sess.fatal(&format!(
- "error while reading object file during archive building: {}",
- err
- ));
- }),
- };
-
- if !self.no_builtin_ranlib {
- if symbol_table.contains_key(&entry_name) {
- // The ar crate can't handle creating a symbol table in case of multiple archive
- // members with the same name. Work around this by prepending a number until we
- // get a unique name.
- for i in 1.. {
- let new_name = format!("{}_", i)
- .into_bytes()
- .into_iter()
- .chain(entry_name.iter().copied())
- .collect::<Vec<_>>();
- if !symbol_table.contains_key(&new_name) {
- entry_name = new_name;
- break;
- }
- }
- }
-
- match object::File::parse(&*data) {
- Ok(object) => {
- symbol_table.insert(
- entry_name.to_vec(),
- object
- .symbols()
- .filter_map(|symbol| {
- if symbol.is_undefined() || symbol.is_local() {
- None
- } else {
- symbol.name().map(|name| name.as_bytes().to_vec()).ok()
- }
- })
- .collect::<Vec<_>>(),
- );
- }
- Err(err) => {
- let err = err.to_string();
- if err == "Unknown file magic" {
- // Not an object file; skip it.
- } else if object::read::archive::ArchiveFile::parse(&*data).is_ok() {
- // Nested archive file; skip it.
- } else {
- sess.fatal(&format!(
- "error parsing `{}` during archive creation: {}",
- String::from_utf8_lossy(&entry_name),
- err
- ));
- }
- }
- }
- }
-
- entries.push((entry_name, data));
- }
-
- let mut builder = if self.use_gnu_style_archive {
- BuilderKind::Gnu(
- ar::GnuBuilder::new(
- File::create(output).unwrap_or_else(|err| {
- sess.fatal(&format!(
- "error opening destination during archive building: {}",
- err
- ));
- }),
- entries.iter().map(|(name, _)| name.clone()).collect(),
- ar::GnuSymbolTableFormat::Size32,
- symbol_table,
- )
- .unwrap(),
- )
- } else {
- BuilderKind::Bsd(
- ar::Builder::new(
- File::create(output).unwrap_or_else(|err| {
- sess.fatal(&format!(
- "error opening destination during archive building: {}",
- err
- ));
- }),
- symbol_table,
- )
- .unwrap(),
- )
- };
-
- let any_members = !entries.is_empty();
-
- // Add all files
- for (entry_name, data) in entries.into_iter() {
- let header = ar::Header::new(entry_name, data.len() as u64);
- match builder {
- BuilderKind::Bsd(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
- BuilderKind::Gnu(ref mut builder) => builder.append(&header, &mut &*data).unwrap(),
- }
- }
-
- // Finalize archive
- std::mem::drop(builder);
-
- if self.no_builtin_ranlib {
- let ranlib = crate::toolchain::get_toolchain_binary(self.sess, "ranlib");
-
- // Run ranlib to be able to link the archive
- let status = std::process::Command::new(ranlib)
- .arg(output)
- .status()
- .expect("Couldn't run ranlib");
-
- if !status.success() {
- self.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
- }
- }
-
- any_members
+ unimplemented!("creating dll imports is not yet supported");
}
}
diff --git a/compiler/rustc_codegen_cranelift/src/config.rs b/compiler/rustc_codegen_cranelift/src/config.rs
index e59a0cb0a..45522fb1a 100644
--- a/compiler/rustc_codegen_cranelift/src/config.rs
+++ b/compiler/rustc_codegen_cranelift/src/config.rs
@@ -2,7 +2,7 @@ use std::env;
use std::str::FromStr;
fn bool_env_var(key: &str) -> bool {
- env::var(key).as_ref().map(|val| &**val) == Ok("1")
+ env::var(key).as_deref() == Ok("1")
}
/// The mode to use for compilation.
diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs
index 148b66d95..a6bde8840 100644
--- a/compiler/rustc_codegen_cranelift/src/constant.rs
+++ b/compiler/rustc_codegen_cranelift/src/constant.rs
@@ -38,22 +38,8 @@ impl ConstantCx {
pub(crate) fn check_constants(fx: &mut FunctionCx<'_, '_, '_>) -> bool {
let mut all_constants_ok = true;
for constant in &fx.mir.required_consts {
- let unevaluated = match fx.monomorphize(constant.literal) {
- ConstantKind::Ty(_) => unreachable!(),
- ConstantKind::Unevaluated(uv, _) => uv,
- ConstantKind::Val(..) => continue,
- };
-
- if let Err(err) = fx.tcx.const_eval_resolve(ParamEnv::reveal_all(), unevaluated, None) {
+ if eval_mir_constant(fx, constant).is_none() {
all_constants_ok = false;
- match err {
- ErrorHandled::Reported(_) | ErrorHandled::Linted => {
- fx.tcx.sess.span_err(constant.span, "erroneous constant encountered");
- }
- ErrorHandled::TooGeneric => {
- span_bug!(constant.span, "codegen encountered polymorphic constant: {:?}", err);
- }
- }
}
}
all_constants_ok
@@ -80,15 +66,15 @@ pub(crate) fn codegen_tls_ref<'tcx>(
}
pub(crate) fn eval_mir_constant<'tcx>(
- fx: &mut FunctionCx<'_, '_, 'tcx>,
+ fx: &FunctionCx<'_, '_, 'tcx>,
constant: &Constant<'tcx>,
-) -> (ConstValue<'tcx>, Ty<'tcx>) {
+) -> Option<(ConstValue<'tcx>, Ty<'tcx>)> {
let constant_kind = fx.monomorphize(constant.literal);
let uv = match constant_kind {
ConstantKind::Ty(const_) => match const_.kind() {
ty::ConstKind::Unevaluated(uv) => uv.expand(),
ty::ConstKind::Value(val) => {
- return (fx.tcx.valtree_to_const_val((const_.ty(), val)), const_.ty());
+ return Some((fx.tcx.valtree_to_const_val((const_.ty(), val)), const_.ty()));
}
err => span_bug!(
constant.span,
@@ -102,22 +88,31 @@ pub(crate) fn eval_mir_constant<'tcx>(
span_bug!(constant.span, "MIR constant refers to static");
}
ConstantKind::Unevaluated(uv, _) => uv,
- ConstantKind::Val(val, _) => return (val, constant_kind.ty()),
+ ConstantKind::Val(val, _) => return Some((val, constant_kind.ty())),
};
- (
- fx.tcx.const_eval_resolve(ty::ParamEnv::reveal_all(), uv, None).unwrap_or_else(|_err| {
- span_bug!(constant.span, "erroneous constant not captured by required_consts");
- }),
- constant_kind.ty(),
- )
+ let val = fx
+ .tcx
+ .const_eval_resolve(ty::ParamEnv::reveal_all(), uv, None)
+ .map_err(|err| match err {
+ ErrorHandled::Reported(_) => {
+ fx.tcx.sess.span_err(constant.span, "erroneous constant encountered");
+ }
+ ErrorHandled::TooGeneric => {
+ span_bug!(constant.span, "codegen encountered polymorphic constant: {:?}", err);
+ }
+ })
+ .ok();
+ val.map(|val| (val, constant_kind.ty()))
}
pub(crate) fn codegen_constant_operand<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
constant: &Constant<'tcx>,
) -> CValue<'tcx> {
- let (const_val, ty) = eval_mir_constant(fx, constant);
+ let (const_val, ty) = eval_mir_constant(fx, constant).unwrap_or_else(|| {
+ span_bug!(constant.span, "erroneous constant not captured by required_consts")
+ });
codegen_const_value(fx, const_val, ty)
}
@@ -128,7 +123,7 @@ pub(crate) fn codegen_const_value<'tcx>(
ty: Ty<'tcx>,
) -> CValue<'tcx> {
let layout = fx.layout_of(ty);
- assert!(!layout.is_unsized(), "sized const value");
+ assert!(layout.is_sized(), "unsized const value");
if layout.is_zst() {
return CValue::by_ref(crate::Pointer::dangling(layout.align.pref), layout);
@@ -262,9 +257,9 @@ pub(crate) fn data_id_for_alloc_id(
mutability: rustc_hir::Mutability,
) -> DataId {
cx.todo.push(TodoItem::Alloc(alloc_id));
- *cx.anon_allocs.entry(alloc_id).or_insert_with(|| {
- module.declare_anonymous_data(mutability == rustc_hir::Mutability::Mut, false).unwrap()
- })
+ *cx.anon_allocs
+ .entry(alloc_id)
+ .or_insert_with(|| module.declare_anonymous_data(mutability.is_mut(), false).unwrap())
}
fn data_id_for_static(
@@ -348,12 +343,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
}
};
let data_id = *cx.anon_allocs.entry(alloc_id).or_insert_with(|| {
- module
- .declare_anonymous_data(
- alloc.inner().mutability == rustc_hir::Mutability::Mut,
- false,
- )
- .unwrap()
+ module.declare_anonymous_data(alloc.inner().mutability.is_mut(), false).unwrap()
});
(data_id, alloc, None)
}
@@ -398,7 +388,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
data_ctx.define(bytes.into_boxed_slice());
- for &(offset, alloc_id) in alloc.provenance().iter() {
+ for &(offset, alloc_id) in alloc.provenance().ptrs().iter() {
let addend = {
let endianness = tcx.data_layout.endian;
let offset = offset.bytes() as usize;
@@ -431,7 +421,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
{
tcx.sess.fatal(&format!(
"Allocation {:?} contains reference to TLS value {:?}",
- alloc, def_id
+ alloc_id, def_id
));
}
@@ -453,20 +443,13 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
assert!(cx.todo.is_empty(), "{:?}", cx.todo);
}
+/// Used only for intrinsic implementations that need a compile-time constant
pub(crate) fn mir_operand_get_const_val<'tcx>(
fx: &FunctionCx<'_, '_, 'tcx>,
operand: &Operand<'tcx>,
) -> Option<ConstValue<'tcx>> {
match operand {
- Operand::Constant(const_) => match fx.monomorphize(const_.literal) {
- ConstantKind::Ty(const_) => Some(
- const_.eval_for_mir(fx.tcx, ParamEnv::reveal_all()).try_to_value(fx.tcx).unwrap(),
- ),
- ConstantKind::Val(val, _) => Some(val),
- ConstantKind::Unevaluated(uv, _) => {
- Some(fx.tcx.const_eval_resolve(ParamEnv::reveal_all(), uv, None).unwrap())
- }
- },
+ Operand::Constant(const_) => Some(eval_mir_constant(fx, const_).unwrap().0),
// FIXME(rust-lang/rust#85105): Casts like `IMM8 as u32` result in the const being stored
// inside a temporary before being passed to the intrinsic requiring the const argument.
// This code tries to find a single constant defining definition of the referenced local.
diff --git a/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs b/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs
index c55db2017..2ba012a77 100644
--- a/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs
+++ b/compiler/rustc_codegen_cranelift/src/debuginfo/mod.rs
@@ -59,7 +59,7 @@ impl DebugContext {
let producer = format!(
"cg_clif (rustc {}, cranelift {})",
- rustc_interface::util::version_str().unwrap_or("unknown version"),
+ rustc_interface::util::rustc_version_str().unwrap_or("unknown version"),
cranelift_codegen::VERSION,
);
let comp_dir = tcx
diff --git a/compiler/rustc_codegen_cranelift/src/main_shim.rs b/compiler/rustc_codegen_cranelift/src/main_shim.rs
index cae6312a6..f7434633e 100644
--- a/compiler/rustc_codegen_cranelift/src/main_shim.rs
+++ b/compiler/rustc_codegen_cranelift/src/main_shim.rs
@@ -63,10 +63,14 @@ pub(crate) fn maybe_create_entry_wrapper(
AbiParam::new(m.target_config().pointer_type()),
],
returns: vec![AbiParam::new(m.target_config().pointer_type() /*isize*/)],
- call_conv: CallConv::triple_default(m.isa().triple()),
+ call_conv: crate::conv_to_call_conv(
+ tcx.sess.target.options.entry_abi,
+ CallConv::triple_default(m.isa().triple()),
+ ),
};
- let cmain_func_id = m.declare_function("main", Linkage::Export, &cmain_sig).unwrap();
+ let entry_name = tcx.sess.target.options.entry_name.as_ref();
+ let cmain_func_id = m.declare_function(entry_name, Linkage::Export, &cmain_sig).unwrap();
let instance = Instance::mono(tcx, rust_main_def_id).polymorphize(tcx);
diff --git a/compiler/rustc_codegen_cranelift/src/value_and_place.rs b/compiler/rustc_codegen_cranelift/src/value_and_place.rs
index c3dfbd372..34746ff6b 100644
--- a/compiler/rustc_codegen_cranelift/src/value_and_place.rs
+++ b/compiler/rustc_codegen_cranelift/src/value_and_place.rs
@@ -19,7 +19,7 @@ fn codegen_field<'tcx>(
};
if let Some(extra) = extra {
- if !field_layout.is_unsized() {
+ if field_layout.is_sized() {
return simple(fx);
}
match field_layout.ty.kind() {
@@ -108,8 +108,8 @@ impl<'tcx> CValue<'tcx> {
}
// FIXME remove
- // Forces the data value of a dyn* value to the stack and returns a pointer to it as well as the
- // vtable pointer.
+ /// Forces the data value of a dyn* value to the stack and returns a pointer to it as well as the
+ /// vtable pointer.
pub(crate) fn dyn_star_force_data_on_stack(
self,
fx: &mut FunctionCx<'_, '_, 'tcx>,
@@ -364,7 +364,7 @@ impl<'tcx> CPlace<'tcx> {
fx: &mut FunctionCx<'_, '_, 'tcx>,
layout: TyAndLayout<'tcx>,
) -> CPlace<'tcx> {
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
if layout.size.bytes() == 0 {
return CPlace {
inner: CPlaceInner::Addr(Pointer::dangling(layout.align.pref), None),
@@ -825,7 +825,7 @@ impl<'tcx> CPlace<'tcx> {
fx: &FunctionCx<'_, '_, 'tcx>,
variant: VariantIdx,
) -> Self {
- assert!(!self.layout().is_unsized());
+ assert!(self.layout().is_sized());
let layout = self.layout().for_variant(fx, variant);
CPlace { inner: self.inner, layout }
}
diff --git a/compiler/rustc_codegen_gcc/Cargo.lock b/compiler/rustc_codegen_gcc/Cargo.lock
index 6df210247..1cb219e12 100644
--- a/compiler/rustc_codegen_gcc/Cargo.lock
+++ b/compiler/rustc_codegen_gcc/Cargo.lock
@@ -12,12 +12,6 @@ dependencies = [
]
[[package]]
-name = "ar"
-version = "0.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "450575f58f7bee32816abbff470cbc47797397c2a81e0eaced4b98436daf52e1"
-
-[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -212,10 +206,8 @@ dependencies = [
name = "rustc_codegen_gcc"
version = "0.1.0"
dependencies = [
- "ar",
"gccjit",
"lang_tester",
- "target-lexicon",
"tempfile",
]
@@ -229,12 +221,6 @@ dependencies = [
]
[[package]]
-name = "target-lexicon"
-version = "0.10.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab0e7238dcc7b40a7be719a25365910f6807bd864f4cce6b2e6b873658e2b19d"
-
-[[package]]
name = "tempfile"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/compiler/rustc_codegen_gcc/Cargo.toml b/compiler/rustc_codegen_gcc/Cargo.toml
index 211d19a8d..1f3da2f79 100644
--- a/compiler/rustc_codegen_gcc/Cargo.toml
+++ b/compiler/rustc_codegen_gcc/Cargo.toml
@@ -27,10 +27,6 @@ gccjit = { git = "https://github.com/antoyo/gccjit.rs" }
# Local copy.
#gccjit = { path = "../gccjit.rs" }
-target-lexicon = "0.10.0"
-
-ar = "0.8.0"
-
[dev-dependencies]
lang_tester = "0.3.9"
tempfile = "3.1.0"
diff --git a/compiler/rustc_codegen_gcc/example/alloc_system.rs b/compiler/rustc_codegen_gcc/example/alloc_system.rs
index 89661918d..fd01fcf1f 100644
--- a/compiler/rustc_codegen_gcc/example/alloc_system.rs
+++ b/compiler/rustc_codegen_gcc/example/alloc_system.rs
@@ -13,17 +13,17 @@
// The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values.
-#[cfg(all(any(target_arch = "x86",
+#[cfg(any(target_arch = "x86",
target_arch = "arm",
target_arch = "mips",
target_arch = "powerpc",
- target_arch = "powerpc64")))]
+ target_arch = "powerpc64"))]
const MIN_ALIGN: usize = 8;
-#[cfg(all(any(target_arch = "x86_64",
+#[cfg(any(target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "mips64",
target_arch = "s390x",
- target_arch = "sparc64")))]
+ target_arch = "sparc64"))]
const MIN_ALIGN: usize = 16;
pub struct System;
diff --git a/compiler/rustc_codegen_gcc/src/allocator.rs b/compiler/rustc_codegen_gcc/src/allocator.rs
index 58efb81e8..e2c9ffe9c 100644
--- a/compiler/rustc_codegen_gcc/src/allocator.rs
+++ b/compiler/rustc_codegen_gcc/src/allocator.rs
@@ -7,7 +7,7 @@ use rustc_span::symbol::sym;
use crate::GccContext;
-pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_name: &str, kind: AllocatorKind, has_alloc_error_handler: bool) {
+pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) {
let context = &mods.context;
let usize =
match tcx.sess.target.pointer_width {
@@ -90,14 +90,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_nam
.collect();
let func = context.new_function(None, FunctionType::Exported, void, &args, name, false);
- let kind =
- if has_alloc_error_handler {
- AllocatorKind::Global
- }
- else {
- AllocatorKind::Default
- };
- let callee = kind.fn_name(sym::oom);
+ let callee = alloc_error_handler_kind.fn_name(sym::oom);
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
diff --git a/compiler/rustc_codegen_gcc/src/archive.rs b/compiler/rustc_codegen_gcc/src/archive.rs
index f18ae7ea5..11fa074f5 100644
--- a/compiler/rustc_codegen_gcc/src/archive.rs
+++ b/compiler/rustc_codegen_gcc/src/archive.rs
@@ -1,44 +1,17 @@
-use std::fs::File;
use std::path::{Path, PathBuf};
-use crate::errors::RanlibFailure;
-
-use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
+use rustc_codegen_ssa::back::archive::{
+ get_native_object_symbols, ArArchiveBuilder, ArchiveBuilder, ArchiveBuilderBuilder,
+};
use rustc_session::Session;
use rustc_session::cstore::DllImport;
-struct ArchiveConfig<'a> {
- sess: &'a Session,
- use_native_ar: bool,
- use_gnu_style_archive: bool,
-}
-
-#[derive(Debug)]
-enum ArchiveEntry {
- FromArchive {
- archive_index: usize,
- entry_index: usize,
- },
- File(PathBuf),
-}
-
-pub struct ArArchiveBuilderBuilder;
+pub(crate) struct ArArchiveBuilderBuilder;
impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a> {
- let config = ArchiveConfig {
- sess,
- use_native_ar: false,
- // FIXME test for linux and System V derivatives instead
- use_gnu_style_archive: sess.target.options.archive_format == "gnu",
- };
-
- Box::new(ArArchiveBuilder {
- config,
- src_archives: vec![],
- entries: vec![],
- })
+ Box::new(ArArchiveBuilder::new(sess, get_native_object_symbols))
}
fn create_dll_import_lib(
@@ -49,144 +22,6 @@ impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
_tmpdir: &Path,
_is_direct_dependency: bool,
) -> PathBuf {
- unimplemented!();
- }
-}
-
-pub struct ArArchiveBuilder<'a> {
- config: ArchiveConfig<'a>,
- src_archives: Vec<(PathBuf, ar::Archive<File>)>,
- // Don't use `HashMap` here, as the order is important. `rust.metadata.bin` must always be at
- // the end of an archive for linkers to not get confused.
- entries: Vec<(String, ArchiveEntry)>,
-}
-
-impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
- fn add_file(&mut self, file: &Path) {
- self.entries.push((
- file.file_name().unwrap().to_str().unwrap().to_string(),
- ArchiveEntry::File(file.to_owned()),
- ));
- }
-
- fn add_archive(
- &mut self,
- archive_path: &Path,
- mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
- ) -> std::io::Result<()> {
- let mut archive = ar::Archive::new(std::fs::File::open(&archive_path)?);
- let archive_index = self.src_archives.len();
-
- let mut i = 0;
- while let Some(entry) = archive.next_entry() {
- let entry = entry?;
- let file_name = String::from_utf8(entry.header().identifier().to_vec())
- .map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err))?;
- if !skip(&file_name) {
- self.entries
- .push((file_name, ArchiveEntry::FromArchive { archive_index, entry_index: i }));
- }
- i += 1;
- }
-
- self.src_archives.push((archive_path.to_owned(), archive));
- Ok(())
- }
-
- fn build(mut self: Box<Self>, output: &Path) -> bool {
- use std::process::Command;
-
- fn add_file_using_ar(archive: &Path, file: &Path) {
- Command::new("ar")
- .arg("r") // add or replace file
- .arg("-c") // silence created file message
- .arg(archive)
- .arg(&file)
- .status()
- .unwrap();
- }
-
- enum BuilderKind<'a> {
- Bsd(ar::Builder<File>),
- Gnu(ar::GnuBuilder<File>),
- NativeAr(&'a Path),
- }
-
- let mut builder = if self.config.use_native_ar {
- BuilderKind::NativeAr(output)
- } else if self.config.use_gnu_style_archive {
- BuilderKind::Gnu(ar::GnuBuilder::new(
- File::create(output).unwrap(),
- self.entries
- .iter()
- .map(|(name, _)| name.as_bytes().to_vec())
- .collect(),
- ))
- } else {
- BuilderKind::Bsd(ar::Builder::new(File::create(output).unwrap()))
- };
-
- let any_members = !self.entries.is_empty();
-
- // Add all files
- for (entry_name, entry) in self.entries.into_iter() {
- match entry {
- ArchiveEntry::FromArchive {
- archive_index,
- entry_index,
- } => {
- let (ref src_archive_path, ref mut src_archive) =
- self.src_archives[archive_index];
- let entry = src_archive.jump_to_entry(entry_index).unwrap();
- let header = entry.header().clone();
-
- match builder {
- BuilderKind::Bsd(ref mut builder) => {
- builder.append(&header, entry).unwrap()
- }
- BuilderKind::Gnu(ref mut builder) => {
- builder.append(&header, entry).unwrap()
- }
- BuilderKind::NativeAr(archive_file) => {
- Command::new("ar")
- .arg("x")
- .arg(src_archive_path)
- .arg(&entry_name)
- .status()
- .unwrap();
- add_file_using_ar(archive_file, Path::new(&entry_name));
- std::fs::remove_file(entry_name).unwrap();
- }
- }
- }
- ArchiveEntry::File(file) =>
- match builder {
- BuilderKind::Bsd(ref mut builder) => {
- builder
- .append_file(entry_name.as_bytes(), &mut File::open(file).expect("file for bsd builder"))
- .unwrap()
- },
- BuilderKind::Gnu(ref mut builder) => {
- builder
- .append_file(entry_name.as_bytes(), &mut File::open(&file).expect(&format!("file {:?} for gnu builder", file)))
- .unwrap()
- },
- BuilderKind::NativeAr(archive_file) => add_file_using_ar(archive_file, &file),
- },
- }
- }
-
- // Finalize archive
- std::mem::drop(builder);
-
- // Run ranlib to be able to link the archive
- let status =
- std::process::Command::new("ranlib").arg(output).status().expect("Couldn't run ranlib");
-
- if !status.success() {
- self.config.sess.emit_fatal(RanlibFailure::new(status.code()));
- }
-
- any_members
+ unimplemented!("creating dll imports is not yet supported");
}
}
diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs
index a314b7cc2..782f68566 100644
--- a/compiler/rustc_codegen_gcc/src/builder.rs
+++ b/compiler/rustc_codegen_gcc/src/builder.rs
@@ -755,11 +755,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
OperandRef { val, layout: place.layout }
}
- fn write_operand_repeatedly(mut self, cg_elem: OperandRef<'tcx, RValue<'gcc>>, count: u64, dest: PlaceRef<'tcx, RValue<'gcc>>) -> Self {
+ fn write_operand_repeatedly(&mut self, cg_elem: OperandRef<'tcx, RValue<'gcc>>, count: u64, dest: PlaceRef<'tcx, RValue<'gcc>>) {
let zero = self.const_usize(0);
let count = self.const_usize(count);
- let start = dest.project_index(&mut self, zero).llval;
- let end = dest.project_index(&mut self, count).llval;
+ let start = dest.project_index(self, zero).llval;
+ let end = dest.project_index(self, count).llval;
let header_bb = self.append_sibling_block("repeat_loop_header");
let body_bb = self.append_sibling_block("repeat_loop_body");
@@ -778,14 +778,13 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
self.switch_to_block(body_bb);
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
- cg_elem.val.store(&mut self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align));
+ cg_elem.val.store(self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align));
let next = self.inbounds_gep(self.backend_type(cg_elem.layout), current.to_rvalue(), &[self.const_usize(1)]);
self.llbb().add_assignment(None, current, next);
self.br(header_bb);
self.switch_to_block(next_bb);
- self
}
fn range_metadata(&mut self, _load: RValue<'gcc>, _range: WrappingRange) {
diff --git a/compiler/rustc_codegen_gcc/src/consts.rs b/compiler/rustc_codegen_gcc/src/consts.rs
index 81f533288..ea8ab7611 100644
--- a/compiler/rustc_codegen_gcc/src/consts.rs
+++ b/compiler/rustc_codegen_gcc/src/consts.rs
@@ -8,13 +8,11 @@ use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::{self, Instance, Ty};
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::mir::interpret::{self, ConstAllocation, ErrorHandled, Scalar as InterpScalar, read_target_uint};
-use rustc_span::Span;
use rustc_span::def_id::DefId;
use rustc_target::abi::{self, Align, HasDataLayout, Primitive, Size, WrappingRange};
use crate::base;
use crate::context::CodegenCx;
-use crate::errors::LinkageConstOrMutType;
use crate::type_of::LayoutGccExt;
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
@@ -239,12 +237,12 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
}
Node::ForeignItem(&hir::ForeignItem {
- span,
+ span: _,
kind: hir::ForeignItemKind::Static(..),
..
}) => {
let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
- check_and_apply_linkage(&self, &fn_attrs, ty, sym, span)
+ check_and_apply_linkage(&self, &fn_attrs, ty, sym)
}
item => bug!("get_static: expected static, found {:?}", item),
@@ -257,8 +255,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
//debug!("get_static: sym={} item_attr={:?}", sym, self.tcx.item_attrs(def_id));
let attrs = self.tcx.codegen_fn_attrs(def_id);
- let span = self.tcx.def_span(def_id);
- let global = check_and_apply_linkage(&self, &attrs, ty, sym, span);
+ let global = check_and_apply_linkage(&self, &attrs, ty, sym);
let needs_dll_storage_attr = false; // TODO(antoyo)
@@ -297,12 +294,12 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAllocation<'tcx>) -> RValue<'gcc> {
let alloc = alloc.inner();
- let mut llvals = Vec::with_capacity(alloc.provenance().len() + 1);
+ let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1);
let dl = cx.data_layout();
let pointer_size = dl.pointer_size.bytes() as usize;
let mut next_offset = 0;
- for &(offset, alloc_id) in alloc.provenance().iter() {
+ for &(offset, alloc_id) in alloc.provenance().ptrs().iter() {
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
@@ -355,24 +352,12 @@ pub fn codegen_static_initializer<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, def_id
Ok((const_alloc_to_gcc(cx, alloc), alloc))
}
-fn check_and_apply_linkage<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, attrs: &CodegenFnAttrs, ty: Ty<'tcx>, sym: &str, span: Span) -> LValue<'gcc> {
+fn check_and_apply_linkage<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, attrs: &CodegenFnAttrs, ty: Ty<'tcx>, sym: &str) -> LValue<'gcc> {
let is_tls = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
let llty = cx.layout_of(ty).gcc_type(cx, true);
- if let Some(linkage) = attrs.linkage {
- // If this is a static with a linkage specified, then we need to handle
- // it a little specially. The typesystem prevents things like &T and
- // extern "C" fn() from being non-null, so we can't just declare a
- // static and call it a day. Some linkages (like weak) will make it such
- // that the static actually has a null value.
- let llty2 =
- if let ty::RawPtr(ref mt) = ty.kind() {
- cx.layout_of(mt.ty).gcc_type(cx, true)
- }
- else {
- cx.sess().emit_fatal(LinkageConstOrMutType { span: span })
- };
+ if let Some(linkage) = attrs.import_linkage {
// Declare a symbol `foo` with the desired linkage.
- let global1 = cx.declare_global_with_linkage(&sym, llty2, base::global_linkage_to_gcc(linkage));
+ let global1 = cx.declare_global_with_linkage(&sym, cx.type_i8(), base::global_linkage_to_gcc(linkage));
// Declare an internal global `extern_with_linkage_foo` which
// is initialized with the address of `foo`. If `foo` is
diff --git a/compiler/rustc_codegen_gcc/src/context.rs b/compiler/rustc_codegen_gcc/src/context.rs
index 62a61eb85..837708aeb 100644
--- a/compiler/rustc_codegen_gcc/src/context.rs
+++ b/compiler/rustc_codegen_gcc/src/context.rs
@@ -88,9 +88,9 @@ pub struct CodegenCx<'gcc, 'tcx> {
pub vtables: RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), RValue<'gcc>>>,
// TODO(antoyo): improve the SSA API to not require those.
- // Mapping from function pointer type to indexes of on stack parameters.
+ /// Mapping from function pointer type to indexes of on stack parameters.
pub on_stack_params: RefCell<FxHashMap<FunctionPtrType<'gcc>, FxHashSet<usize>>>,
- // Mapping from function to indexes of on stack parameters.
+ /// Mapping from function to indexes of on stack parameters.
pub on_stack_function_params: RefCell<FxHashMap<Function<'gcc>, FxHashSet<usize>>>,
/// Cache of emitted const globals (value -> global)
@@ -425,8 +425,9 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
}
fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
- if self.get_declared_value("main").is_none() {
- Some(self.declare_cfn("main", fn_type))
+ let entry_name = self.sess().target.entry_name.as_ref();
+ if self.get_declared_value(entry_name).is_none() {
+ Some(self.declare_entry_fn(entry_name, fn_type, ()))
}
else {
// If the symbol already exists, it is an error: for example, the user wrote
diff --git a/compiler/rustc_codegen_gcc/src/debuginfo.rs b/compiler/rustc_codegen_gcc/src/debuginfo.rs
index 266759ed6..a81585d41 100644
--- a/compiler/rustc_codegen_gcc/src/debuginfo.rs
+++ b/compiler/rustc_codegen_gcc/src/debuginfo.rs
@@ -4,8 +4,9 @@ use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoMethods};
use rustc_middle::mir;
use rustc_middle::ty::{Instance, PolyExistentialTraitRef, Ty};
use rustc_span::{SourceFile, Span, Symbol};
-use rustc_target::abi::Size;
use rustc_target::abi::call::FnAbi;
+use rustc_target::abi::Size;
+use std::ops::Range;
use crate::builder::Builder;
use crate::context::CodegenCx;
@@ -13,7 +14,15 @@ use crate::context::CodegenCx;
impl<'a, 'gcc, 'tcx> DebugInfoBuilderMethods for Builder<'a, 'gcc, 'tcx> {
// FIXME(eddyb) find a common convention for all of the debuginfo-related
// names (choose between `dbg`, `debug`, `debuginfo`, `debug_info` etc.).
- fn dbg_var_addr(&mut self, _dbg_var: Self::DIVariable, _scope_metadata: Self::DIScope, _variable_alloca: Self::Value, _direct_offset: Size, _indirect_offsets: &[Size]) {
+ fn dbg_var_addr(
+ &mut self,
+ _dbg_var: Self::DIVariable,
+ _scope_metadata: Self::DIScope,
+ _variable_alloca: Self::Value,
+ _direct_offset: Size,
+ _indirect_offsets: &[Size],
+ _fragment: Option<Range<Size>>,
+ ) {
unimplemented!();
}
@@ -31,16 +40,31 @@ impl<'a, 'gcc, 'tcx> DebugInfoBuilderMethods for Builder<'a, 'gcc, 'tcx> {
}
impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
- fn create_vtable_debuginfo(&self, _ty: Ty<'tcx>, _trait_ref: Option<PolyExistentialTraitRef<'tcx>>, _vtable: Self::Value) {
+ fn create_vtable_debuginfo(
+ &self,
+ _ty: Ty<'tcx>,
+ _trait_ref: Option<PolyExistentialTraitRef<'tcx>>,
+ _vtable: Self::Value,
+ ) {
// TODO(antoyo)
}
- fn create_function_debug_context(&self, _instance: Instance<'tcx>, _fn_abi: &FnAbi<'tcx, Ty<'tcx>>, _llfn: RValue<'gcc>, _mir: &mir::Body<'tcx>) -> Option<FunctionDebugContext<Self::DIScope, Self::DILocation>> {
+ fn create_function_debug_context(
+ &self,
+ _instance: Instance<'tcx>,
+ _fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
+ _llfn: RValue<'gcc>,
+ _mir: &mir::Body<'tcx>,
+ ) -> Option<FunctionDebugContext<Self::DIScope, Self::DILocation>> {
// TODO(antoyo)
None
}
- fn extend_scope_to_file(&self, _scope_metadata: Self::DIScope, _file: &SourceFile) -> Self::DIScope {
+ fn extend_scope_to_file(
+ &self,
+ _scope_metadata: Self::DIScope,
+ _file: &SourceFile,
+ ) -> Self::DIScope {
unimplemented!();
}
@@ -48,15 +72,32 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
// TODO(antoyo)
}
- fn create_dbg_var(&self, _variable_name: Symbol, _variable_type: Ty<'tcx>, _scope_metadata: Self::DIScope, _variable_kind: VariableKind, _span: Span) -> Self::DIVariable {
+ fn create_dbg_var(
+ &self,
+ _variable_name: Symbol,
+ _variable_type: Ty<'tcx>,
+ _scope_metadata: Self::DIScope,
+ _variable_kind: VariableKind,
+ _span: Span,
+ ) -> Self::DIVariable {
unimplemented!();
}
- fn dbg_scope_fn(&self, _instance: Instance<'tcx>, _fn_abi: &FnAbi<'tcx, Ty<'tcx>>, _maybe_definition_llfn: Option<RValue<'gcc>>) -> Self::DIScope {
+ fn dbg_scope_fn(
+ &self,
+ _instance: Instance<'tcx>,
+ _fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
+ _maybe_definition_llfn: Option<RValue<'gcc>>,
+ ) -> Self::DIScope {
unimplemented!();
}
- fn dbg_loc(&self, _scope: Self::DIScope, _inlined_at: Option<Self::DILocation>, _span: Span) -> Self::DILocation {
+ fn dbg_loc(
+ &self,
+ _scope: Self::DIScope,
+ _inlined_at: Option<Self::DILocation>,
+ _span: Span,
+ ) -> Self::DILocation {
unimplemented!();
}
}
diff --git a/compiler/rustc_codegen_gcc/src/declare.rs b/compiler/rustc_codegen_gcc/src/declare.rs
index a619e2f77..eae77508c 100644
--- a/compiler/rustc_codegen_gcc/src/declare.rs
+++ b/compiler/rustc_codegen_gcc/src/declare.rs
@@ -65,13 +65,13 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global
}
- pub fn declare_cfn(&self, name: &str, _fn_type: Type<'gcc>) -> RValue<'gcc> {
+ pub fn declare_entry_fn(&self, name: &str, _fn_type: Type<'gcc>, callconv: () /*llvm::CCallConv*/) -> RValue<'gcc> {
// TODO(antoyo): use the fn_type parameter.
let const_string = self.context.new_type::<u8>().make_pointer().make_pointer();
let return_type = self.type_i32();
let variadic = false;
self.linkage.set(FunctionType::Exported);
- let func = declare_raw_fn(self, name, () /*llvm::CCallConv*/, return_type, &[self.type_i32(), const_string], variadic);
+ let func = declare_raw_fn(self, name, callconv, return_type, &[self.type_i32(), const_string], variadic);
// NOTE: it is needed to set the current_func here as well, because get_fn() is not called
// for the main function.
*self.current_func.borrow_mut() = Some(func);
diff --git a/compiler/rustc_codegen_gcc/src/errors.rs b/compiler/rustc_codegen_gcc/src/errors.rs
index 15ad90f90..d0ba7e247 100644
--- a/compiler/rustc_codegen_gcc/src/errors.rs
+++ b/compiler/rustc_codegen_gcc/src/errors.rs
@@ -17,18 +17,6 @@ impl IntoDiagnosticArg for ExitCode {
}
#[derive(Diagnostic)]
-#[diag(codegen_gcc_ranlib_failure)]
-pub(crate) struct RanlibFailure {
- exit_code: ExitCode,
-}
-
-impl RanlibFailure {
- pub fn new(exit_code: Option<i32>) -> Self {
- RanlibFailure { exit_code: ExitCode(exit_code) }
- }
-}
-
-#[derive(Diagnostic)]
#[diag(codegen_gcc_invalid_monomorphization_basic_integer, code = "E0511")]
pub(crate) struct InvalidMonomorphizationBasicInteger<'a> {
#[primary_span]
@@ -224,13 +212,6 @@ pub(crate) struct InvalidMonomorphizationUnsupportedOperation<'a> {
}
#[derive(Diagnostic)]
-#[diag(codegen_gcc_linkage_const_or_mut_type)]
-pub(crate) struct LinkageConstOrMutType {
- #[primary_span]
- pub span: Span
-}
-
-#[derive(Diagnostic)]
#[diag(codegen_gcc_lto_not_supported)]
pub(crate) struct LTONotSupported;
@@ -238,5 +219,5 @@ pub(crate) struct LTONotSupported;
#[diag(codegen_gcc_unwinding_inline_asm)]
pub(crate) struct UnwindingInlineAsm {
#[primary_span]
- pub span: Span
+ pub span: Span,
}
diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs
index accd02ab0..b9600da5c 100644
--- a/compiler/rustc_codegen_gcc/src/lib.rs
+++ b/compiler/rustc_codegen_gcc/src/lib.rs
@@ -153,11 +153,11 @@ impl CodegenBackend for GccCodegenBackend {
}
impl ExtraBackendMethods for GccCodegenBackend {
- fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, has_alloc_error_handler: bool) -> Self::Module {
+ fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) -> Self::Module {
let mut mods = GccContext {
context: Context::default(),
};
- unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, has_alloc_error_handler); }
+ unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, alloc_error_handler_kind); }
mods
}
@@ -315,10 +315,10 @@ pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
false
}
/*
- adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512gfni,
- avx512ifma, avx512pf, avx512vaes, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpclmulqdq,
- avx512vpopcntdq, bmi1, bmi2, cmpxchg16b, ermsb, f16c, fma, fxsr, lzcnt, movbe, pclmulqdq, popcnt, rdrand, rdseed, rtm,
- sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, xsave, xsavec, xsaveopt, xsaves
+ adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512ifma,
+ avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq,
+ bmi1, bmi2, cmpxchg16b, ermsb, f16c, fma, fxsr, gfni, lzcnt, movbe, pclmulqdq, popcnt, rdrand, rdseed, rtm,
+ sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, vaes, vpclmulqdq, xsave, xsavec, xsaveopt, xsaves
*/
//false
})
diff --git a/compiler/rustc_codegen_gcc/src/type_.rs b/compiler/rustc_codegen_gcc/src/type_.rs
index 68bdb8d4e..bdf7318ce 100644
--- a/compiler/rustc_codegen_gcc/src/type_.rs
+++ b/compiler/rustc_codegen_gcc/src/type_.rs
@@ -201,6 +201,27 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
fn val_ty(&self, value: RValue<'gcc>) -> Type<'gcc> {
value.get_type()
}
+
+ fn type_array(&self, ty: Type<'gcc>, mut len: u64) -> Type<'gcc> {
+ if let Some(struct_type) = ty.is_struct() {
+ if struct_type.get_field_count() == 0 {
+ // NOTE: since gccjit only supports i32 for the array size and libcore's tests uses a
+ // size of usize::MAX in test_binary_search, we workaround this by setting the size to
+ // zero for ZSTs.
+ // FIXME(antoyo): fix gccjit API.
+ len = 0;
+ }
+ }
+
+ // NOTE: see note above. Some other test uses usize::MAX.
+ if len == u64::MAX {
+ len = 0;
+ }
+
+ let len: i32 = len.try_into().expect("array len");
+
+ self.context.new_array_type(None, ty, len)
+ }
}
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
@@ -227,27 +248,6 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
self.context.new_opaque_struct_type(None, name)
}
- pub fn type_array(&self, ty: Type<'gcc>, mut len: u64) -> Type<'gcc> {
- if let Some(struct_type) = ty.is_struct() {
- if struct_type.get_field_count() == 0 {
- // NOTE: since gccjit only supports i32 for the array size and libcore's tests uses a
- // size of usize::MAX in test_binary_search, we workaround this by setting the size to
- // zero for ZSTs.
- // FIXME(antoyo): fix gccjit API.
- len = 0;
- }
- }
-
- // NOTE: see note above. Some other test uses usize::MAX.
- if len == u64::MAX {
- len = 0;
- }
-
- let len: i32 = len.try_into().expect("array len");
-
- self.context.new_array_type(None, ty, len)
- }
-
pub fn type_bool(&self) -> Type<'gcc> {
self.context.new_type::<bool>()
}
@@ -277,7 +277,7 @@ pub fn struct_fields<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
offset = target_offset + field.size;
prev_effective_align = effective_field_align;
}
- if !layout.is_unsized() && field_count > 0 {
+ if layout.is_sized() && field_count > 0 {
if offset > layout.size {
bug!("layout: {:#?} stride: {:?} offset: {:?}", layout, layout.size, offset);
}
diff --git a/compiler/rustc_codegen_llvm/Cargo.toml b/compiler/rustc_codegen_llvm/Cargo.toml
index 0ad39c240..93d6234dc 100644
--- a/compiler/rustc_codegen_llvm/Cargo.toml
+++ b/compiler/rustc_codegen_llvm/Cargo.toml
@@ -11,7 +11,7 @@ bitflags = "1.0"
cstr = "0.2"
libc = "0.2"
measureme = "10.0.0"
-object = { version = "0.29.0", default-features = false, features = ["std", "read_core", "archive", "coff", "elf", "macho", "pe"] }
+object = { version = "0.29.0", default-features = false, features = ["std", "read"] }
tracing = "0.1"
rustc_middle = { path = "../rustc_middle" }
rustc-demangle = "0.1.21"
diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs
index d478efc86..a6fd2a7de 100644
--- a/compiler/rustc_codegen_llvm/src/abi.rs
+++ b/compiler/rustc_codegen_llvm/src/abi.rs
@@ -398,23 +398,7 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
}
fn llvm_cconv(&self) -> llvm::CallConv {
- match self.conv {
- Conv::C | Conv::Rust | Conv::CCmseNonSecureCall => llvm::CCallConv,
- Conv::RustCold => llvm::ColdCallConv,
- Conv::AmdGpuKernel => llvm::AmdGpuKernel,
- Conv::AvrInterrupt => llvm::AvrInterrupt,
- Conv::AvrNonBlockingInterrupt => llvm::AvrNonBlockingInterrupt,
- Conv::ArmAapcs => llvm::ArmAapcsCallConv,
- Conv::Msp430Intr => llvm::Msp430Intr,
- Conv::PtxKernel => llvm::PtxKernel,
- Conv::X86Fastcall => llvm::X86FastcallCallConv,
- Conv::X86Intr => llvm::X86_Intr,
- Conv::X86Stdcall => llvm::X86StdcallCallConv,
- Conv::X86ThisCall => llvm::X86_ThisCall,
- Conv::X86VectorCall => llvm::X86_VectorCall,
- Conv::X86_64SysV => llvm::X86_64_SysV,
- Conv::X86_64Win64 => llvm::X86_64_Win64,
- }
+ self.conv.into()
}
fn apply_attrs_llfn(&self, cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value) {
@@ -596,3 +580,25 @@ impl<'tcx> AbiBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
llvm::get_param(self.llfn(), index as c_uint)
}
}
+
+impl From<Conv> for llvm::CallConv {
+ fn from(conv: Conv) -> Self {
+ match conv {
+ Conv::C | Conv::Rust | Conv::CCmseNonSecureCall => llvm::CCallConv,
+ Conv::RustCold => llvm::ColdCallConv,
+ Conv::AmdGpuKernel => llvm::AmdGpuKernel,
+ Conv::AvrInterrupt => llvm::AvrInterrupt,
+ Conv::AvrNonBlockingInterrupt => llvm::AvrNonBlockingInterrupt,
+ Conv::ArmAapcs => llvm::ArmAapcsCallConv,
+ Conv::Msp430Intr => llvm::Msp430Intr,
+ Conv::PtxKernel => llvm::PtxKernel,
+ Conv::X86Fastcall => llvm::X86FastcallCallConv,
+ Conv::X86Intr => llvm::X86_Intr,
+ Conv::X86Stdcall => llvm::X86StdcallCallConv,
+ Conv::X86ThisCall => llvm::X86_ThisCall,
+ Conv::X86VectorCall => llvm::X86_VectorCall,
+ Conv::X86_64SysV => llvm::X86_64_SysV,
+ Conv::X86_64Win64 => llvm::X86_64_Win64,
+ }
+ }
+}
diff --git a/compiler/rustc_codegen_llvm/src/allocator.rs b/compiler/rustc_codegen_llvm/src/allocator.rs
index 72961ae88..fed56cdd4 100644
--- a/compiler/rustc_codegen_llvm/src/allocator.rs
+++ b/compiler/rustc_codegen_llvm/src/allocator.rs
@@ -15,7 +15,7 @@ pub(crate) unsafe fn codegen(
module_llvm: &mut ModuleLlvm,
module_name: &str,
kind: AllocatorKind,
- has_alloc_error_handler: bool,
+ alloc_error_handler_kind: AllocatorKind,
) {
let llcx = &*module_llvm.llcx;
let llmod = module_llvm.llmod();
@@ -117,8 +117,7 @@ pub(crate) unsafe fn codegen(
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}
- let kind = if has_alloc_error_handler { AllocatorKind::Global } else { AllocatorKind::Default };
- let callee = kind.fn_name(sym::oom);
+ let callee = alloc_error_handler_kind.fn_name(sym::oom);
let callee = llvm::LLVMRustGetOrInsertFunction(llmod, callee.as_ptr().cast(), callee.len(), ty);
// -> ! DIFlagNoReturn
attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]);
diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs
index 017513721..219a4f8fa 100644
--- a/compiler/rustc_codegen_llvm/src/asm.rs
+++ b/compiler/rustc_codegen_llvm/src/asm.rs
@@ -130,7 +130,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
op_idx.insert(idx, constraints.len());
constraints.push(reg_to_llvm(reg, Some(&value.layout)));
}
- InlineAsmOperandRef::InOut { reg, late: _, in_value, out_place: _ } => {
+ InlineAsmOperandRef::InOut { reg, late, in_value, out_place: _ } => {
let value = llvm_fixup_input(
self,
in_value.immediate(),
@@ -138,7 +138,16 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
&in_value.layout,
);
inputs.push(value);
- constraints.push(format!("{}", op_idx[&idx]));
+
+ // In the case of fixed registers, we have the choice of
+ // either using a tied operand or duplicating the constraint.
+ // We prefer the latter because it matches the behavior of
+ // Clang.
+ if late && matches!(reg, InlineAsmRegOrRegClass::Reg(_)) {
+ constraints.push(format!("{}", reg_to_llvm(reg, Some(&in_value.layout))));
+ } else {
+ constraints.push(format!("{}", op_idx[&idx]));
+ }
}
InlineAsmOperandRef::SymFn { instance } => {
inputs.push(self.cx.get_fn(instance));
@@ -276,13 +285,13 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
let mut attrs = SmallVec::<[_; 2]>::new();
if options.contains(InlineAsmOptions::PURE) {
if options.contains(InlineAsmOptions::NOMEM) {
- attrs.push(llvm::AttributeKind::ReadNone.create_attr(self.cx.llcx));
+ attrs.push(llvm::MemoryEffects::None.create_attr(self.cx.llcx));
} else if options.contains(InlineAsmOptions::READONLY) {
- attrs.push(llvm::AttributeKind::ReadOnly.create_attr(self.cx.llcx));
+ attrs.push(llvm::MemoryEffects::ReadOnly.create_attr(self.cx.llcx));
}
attrs.push(llvm::AttributeKind::WillReturn.create_attr(self.cx.llcx));
} else if options.contains(InlineAsmOptions::NOMEM) {
- attrs.push(llvm::AttributeKind::InaccessibleMemOnly.create_attr(self.cx.llcx));
+ attrs.push(llvm::MemoryEffects::InaccessibleMemOnly.create_attr(self.cx.llcx));
} else {
// LLVM doesn't have an attribute to represent ReadOnly + SideEffect
}
@@ -496,6 +505,44 @@ fn xmm_reg_index(reg: InlineAsmReg) -> Option<u32> {
}
}
+/// If the register is an AArch64 integer register then return its index.
+fn a64_reg_index(reg: InlineAsmReg) -> Option<u32> {
+ match reg {
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x0) => Some(0),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x1) => Some(1),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x2) => Some(2),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x3) => Some(3),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x4) => Some(4),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x5) => Some(5),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x6) => Some(6),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x7) => Some(7),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x8) => Some(8),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x9) => Some(9),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x10) => Some(10),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x11) => Some(11),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x12) => Some(12),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x13) => Some(13),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x14) => Some(14),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x15) => Some(15),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x16) => Some(16),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x17) => Some(17),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x18) => Some(18),
+ // x19 is reserved
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x20) => Some(20),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x21) => Some(21),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x22) => Some(22),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x23) => Some(23),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x24) => Some(24),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x25) => Some(25),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x26) => Some(26),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x27) => Some(27),
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x28) => Some(28),
+ // x29 is reserved
+ InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) => Some(30),
+ _ => None,
+ }
+}
+
/// If the register is an AArch64 vector register then return its index.
fn a64_vreg_index(reg: InlineAsmReg) -> Option<u32> {
match reg {
@@ -526,6 +573,22 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
'x'
};
format!("{{{}mm{}}}", class, idx)
+ } else if let Some(idx) = a64_reg_index(reg) {
+ let class = if let Some(layout) = layout {
+ match layout.size.bytes() {
+ 8 => 'x',
+ _ => 'w',
+ }
+ } else {
+ // We use i32 as the type for discarded outputs
+ 'w'
+ };
+ if class == 'x' && reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
+ // LLVM doesn't recognize x30. use lr instead.
+ "{lr}".to_string()
+ } else {
+ format!("{{{}{}}}", class, idx)
+ }
} else if let Some(idx) = a64_vreg_index(reg) {
let class = if let Some(layout) = layout {
match layout.size.bytes() {
@@ -541,9 +604,6 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
'q'
};
format!("{{{}{}}}", class, idx)
- } else if reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
- // LLVM doesn't recognize x30
- "{lr}".to_string()
} else if reg == InlineAsmReg::Arm(ArmInlineAsmReg::r14) {
// LLVM doesn't recognize r14
"{lr}".to_string()
diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs
index eff2436d4..f3bdacf60 100644
--- a/compiler/rustc_codegen_llvm/src/attributes.rs
+++ b/compiler/rustc_codegen_llvm/src/attributes.rs
@@ -12,8 +12,9 @@ use rustc_target::spec::{FramePointer, SanitizerSet, StackProbeType, StackProtec
use smallvec::SmallVec;
use crate::attributes;
+use crate::errors::{MissingFeatures, SanitizerMemtagRequiresMte, TargetFeatureDisableOrEnable};
use crate::llvm::AttributePlace::Function;
-use crate::llvm::{self, AllocKindFlags, Attribute, AttributeKind, AttributePlace};
+use crate::llvm::{self, AllocKindFlags, Attribute, AttributeKind, AttributePlace, MemoryEffects};
use crate::llvm_util;
pub use rustc_attr::{InlineAttr, InstructionSetAttr, OptimizeAttr};
@@ -82,7 +83,7 @@ pub fn sanitize_attrs<'ll>(
let mte_feature =
features.iter().map(|s| &s[..]).rfind(|n| ["+mte", "-mte"].contains(&&n[..]));
if let None | Some("-mte") = mte_feature {
- cx.tcx.sess.err("`-Zsanitizer=memtag` requires `-Ctarget-feature=+mte`");
+ cx.tcx.sess.emit_err(SanitizerMemtagRequiresMte);
}
attrs.push(llvm::AttributeKind::SanitizeMemTag.create_attr(cx.llcx));
@@ -257,13 +258,12 @@ pub fn from_fn_attrs<'ll, 'tcx>(
OptimizeAttr::Speed => {}
}
- let inline = if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
- InlineAttr::Never
- } else if codegen_fn_attrs.inline == InlineAttr::None && instance.def.requires_inline(cx.tcx) {
- InlineAttr::Hint
- } else {
- codegen_fn_attrs.inline
- };
+ let inline =
+ if codegen_fn_attrs.inline == InlineAttr::None && instance.def.requires_inline(cx.tcx) {
+ InlineAttr::Hint
+ } else {
+ codegen_fn_attrs.inline
+ };
to_add.extend(inline_attr(cx, inline));
// The `uwtable` attribute according to LLVM is:
@@ -303,10 +303,10 @@ pub fn from_fn_attrs<'ll, 'tcx>(
to_add.push(AttributeKind::ReturnsTwice.create_attr(cx.llcx));
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_PURE) {
- to_add.push(AttributeKind::ReadOnly.create_attr(cx.llcx));
+ to_add.push(MemoryEffects::ReadOnly.create_attr(cx.llcx));
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_CONST) {
- to_add.push(AttributeKind::ReadNone.create_attr(cx.llcx));
+ to_add.push(MemoryEffects::None.create_attr(cx.llcx));
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
to_add.push(AttributeKind::Naked.create_attr(cx.llcx));
@@ -393,13 +393,14 @@ pub fn from_fn_attrs<'ll, 'tcx>(
.get_attrs(instance.def_id(), sym::target_feature)
.next()
.map_or_else(|| cx.tcx.def_span(instance.def_id()), |a| a.span);
- let msg = format!(
- "the target features {} must all be either enabled or disabled together",
- f.join(", ")
- );
- let mut err = cx.tcx.sess.struct_span_err(span, &msg);
- err.help("add the missing features in a `target_feature` attribute");
- err.emit();
+ cx.tcx
+ .sess
+ .create_err(TargetFeatureDisableOrEnable {
+ features: f,
+ span: Some(span),
+ missing_features: Some(MissingFeatures),
+ })
+ .emit();
return;
}
diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs
index 082665bba..2cf2f585f 100644
--- a/compiler/rustc_codegen_llvm/src/back/archive.rs
+++ b/compiler/rustc_codegen_llvm/src/back/archive.rs
@@ -1,27 +1,30 @@
//! A helper class for dealing with static archives
use std::env;
-use std::ffi::{CStr, CString, OsString};
-use std::fs;
-use std::io::{self, Write};
+use std::ffi::{c_char, c_void, CStr, CString, OsString};
+use std::io;
use std::mem;
use std::path::{Path, PathBuf};
use std::ptr;
use std::str;
-use object::read::macho::FatArch;
-
use crate::common;
+use crate::errors::{
+ DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorCreatingImportLibrary, ErrorWritingDEFFile,
+};
use crate::llvm::archive_ro::{ArchiveRO, Child};
use crate::llvm::{self, ArchiveKind, LLVMMachineType, LLVMRustCOFFShortExport};
-use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
-use rustc_data_structures::memmap::Mmap;
+use rustc_codegen_ssa::back::archive::{
+ get_native_object_symbols, try_extract_macho_fat_archive, ArArchiveBuilder,
+ ArchiveBuildFailure, ArchiveBuilder, ArchiveBuilderBuilder, UnknownArchiveKind,
+};
+
use rustc_session::cstore::DllImport;
use rustc_session::Session;
/// Helper for adding many files to an archive.
#[must_use = "must call build() to finish building the archive"]
-pub struct LlvmArchiveBuilder<'a> {
+pub(crate) struct LlvmArchiveBuilder<'a> {
sess: &'a Session,
additions: Vec<Addition>,
}
@@ -57,57 +60,6 @@ fn llvm_machine_type(cpu: &str) -> LLVMMachineType {
}
}
-fn try_filter_fat_archs(
- archs: object::read::Result<&[impl FatArch]>,
- target_arch: object::Architecture,
- archive_path: &Path,
- archive_map_data: &[u8],
-) -> io::Result<Option<PathBuf>> {
- let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
-
- let desired = match archs.iter().filter(|a| a.architecture() == target_arch).next() {
- Some(a) => a,
- None => return Ok(None),
- };
-
- let (mut new_f, extracted_path) = tempfile::Builder::new()
- .suffix(archive_path.file_name().unwrap())
- .tempfile()?
- .keep()
- .unwrap();
-
- new_f.write_all(
- desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
- )?;
-
- Ok(Some(extracted_path))
-}
-
-fn try_extract_macho_fat_archive(
- sess: &Session,
- archive_path: &Path,
-) -> io::Result<Option<PathBuf>> {
- let archive_map = unsafe { Mmap::map(fs::File::open(&archive_path)?)? };
- let target_arch = match sess.target.arch.as_ref() {
- "aarch64" => object::Architecture::Aarch64,
- "x86_64" => object::Architecture::X86_64,
- _ => return Ok(None),
- };
-
- match object::macho::FatHeader::parse(&*archive_map) {
- Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
- let archs = object::macho::FatHeader::parse_arch32(&*archive_map);
- try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
- }
- Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
- let archs = object::macho::FatHeader::parse_arch64(&*archive_map);
- try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
- }
- // Not a FatHeader at all, just return None.
- _ => Ok(None),
- }
-}
-
impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
fn add_archive(
&mut self,
@@ -147,7 +99,7 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
fn build(mut self: Box<Self>, output: &Path) -> bool {
match self.build_with_llvm(output) {
Ok(any_members) => any_members,
- Err(e) => self.sess.fatal(&format!("failed to build archive: {}", e)),
+ Err(e) => self.sess.emit_fatal(ArchiveBuildFailure { error: e }),
}
}
}
@@ -156,7 +108,13 @@ pub struct LlvmArchiveBuilderBuilder;
impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a> {
- Box::new(LlvmArchiveBuilder { sess, additions: Vec::new() })
+ // FIXME use ArArchiveBuilder on most targets again once reading thin archives is
+ // implemented
+ if true || sess.target.arch == "wasm32" || sess.target.arch == "wasm64" {
+ Box::new(LlvmArchiveBuilder { sess, additions: Vec::new() })
+ } else {
+ Box::new(ArArchiveBuilder::new(sess, get_llvm_object_symbols))
+ }
}
fn create_dll_import_lib(
@@ -217,7 +175,7 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
match std::fs::write(&def_file_path, def_file_content) {
Ok(_) => {}
Err(e) => {
- sess.fatal(&format!("Error writing .DEF file: {}", e));
+ sess.emit_fatal(ErrorWritingDEFFile { error: e });
}
};
@@ -239,13 +197,14 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
match result {
Err(e) => {
- sess.fatal(&format!("Error calling dlltool: {}", e));
+ sess.emit_fatal(ErrorCallingDllTool { error: e });
+ }
+ Ok(output) if !output.status.success() => {
+ sess.emit_fatal(DlltoolFailImportLibrary {
+ stdout: String::from_utf8_lossy(&output.stdout),
+ stderr: String::from_utf8_lossy(&output.stderr),
+ })
}
- Ok(output) if !output.status.success() => sess.fatal(&format!(
- "Dlltool could not create import library: {}\n{}",
- String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr)
- )),
_ => {}
}
} else {
@@ -293,11 +252,10 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
};
if result == crate::llvm::LLVMRustResult::Failure {
- sess.fatal(&format!(
- "Error creating import library for {}: {}",
+ sess.emit_fatal(ErrorCreatingImportLibrary {
lib_name,
- llvm::last_error().unwrap_or("unknown LLVM error".to_string())
- ));
+ error: llvm::last_error().unwrap_or("unknown LLVM error".to_string()),
+ });
}
};
@@ -305,12 +263,68 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
}
}
+// The object crate doesn't know how to get symbols for LLVM bitcode and COFF bigobj files.
+// As such we need to use LLVM for them.
+#[deny(unsafe_op_in_unsafe_fn)]
+fn get_llvm_object_symbols(
+ buf: &[u8],
+ f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
+) -> io::Result<bool> {
+ let is_bitcode = unsafe { llvm::LLVMRustIsBitcode(buf.as_ptr(), buf.len()) };
+
+ // COFF bigobj file, msvc LTO file or import library. See
+ // https://github.com/llvm/llvm-project/blob/453f27bc9/llvm/lib/BinaryFormat/Magic.cpp#L38-L51
+ let is_unsupported_windows_obj_file = buf.get(0..4) == Some(b"\0\0\xFF\xFF");
+
+ if is_bitcode || is_unsupported_windows_obj_file {
+ let mut state = Box::new(f);
+
+ let err = unsafe {
+ llvm::LLVMRustGetSymbols(
+ buf.as_ptr(),
+ buf.len(),
+ &mut *state as *mut &mut _ as *mut c_void,
+ callback,
+ error_callback,
+ )
+ };
+
+ if err.is_null() {
+ return Ok(true);
+ } else {
+ return Err(unsafe { *Box::from_raw(err as *mut io::Error) });
+ }
+
+ unsafe extern "C" fn callback(
+ state: *mut c_void,
+ symbol_name: *const c_char,
+ ) -> *mut c_void {
+ let f = unsafe { &mut *(state as *mut &mut dyn FnMut(&[u8]) -> io::Result<()>) };
+ match f(unsafe { CStr::from_ptr(symbol_name) }.to_bytes()) {
+ Ok(()) => std::ptr::null_mut(),
+ Err(err) => Box::into_raw(Box::new(err)) as *mut c_void,
+ }
+ }
+
+ unsafe extern "C" fn error_callback(error: *const c_char) -> *mut c_void {
+ let error = unsafe { CStr::from_ptr(error) };
+ Box::into_raw(Box::new(io::Error::new(
+ io::ErrorKind::Other,
+ format!("LLVM error: {}", error.to_string_lossy()),
+ ))) as *mut c_void
+ }
+ } else {
+ get_native_object_symbols(buf, f)
+ }
+}
+
impl<'a> LlvmArchiveBuilder<'a> {
fn build_with_llvm(&mut self, output: &Path) -> io::Result<bool> {
let kind = &*self.sess.target.archive_format;
- let kind = kind.parse::<ArchiveKind>().map_err(|_| kind).unwrap_or_else(|kind| {
- self.sess.fatal(&format!("Don't know how to build archive of type: {}", kind))
- });
+ let kind = kind
+ .parse::<ArchiveKind>()
+ .map_err(|_| kind)
+ .unwrap_or_else(|kind| self.sess.emit_fatal(UnknownArchiveKind { kind }));
let mut additions = mem::take(&mut self.additions);
let mut strings = Vec::new();
diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs
index a49cc7f8d..3fa21355b 100644
--- a/compiler/rustc_codegen_llvm/src/back/lto.rs
+++ b/compiler/rustc_codegen_llvm/src/back/lto.rs
@@ -1,4 +1,5 @@
use crate::back::write::{self, save_temp_bitcode, DiagnosticHandlers};
+use crate::errors::DynamicLinkingWithLTO;
use crate::llvm::{self, build_string};
use crate::{LlvmCodegenBackend, ModuleLlvm};
use object::read::archive::ArchiveFile;
@@ -90,13 +91,7 @@ fn prepare_lto(
}
if cgcx.opts.cg.prefer_dynamic && !cgcx.opts.unstable_opts.dylib_lto {
- diag_handler
- .struct_err("cannot prefer dynamic linking when performing LTO")
- .note(
- "only 'staticlib', 'bin', and 'cdylib' outputs are \
- supported with LTO",
- )
- .emit();
+ diag_handler.emit_err(DynamicLinkingWithLTO);
return Err(FatalError);
}
diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs
index 11053a8f6..97d0de47b 100644
--- a/compiler/rustc_codegen_llvm/src/back/write.rs
+++ b/compiler/rustc_codegen_llvm/src/back/write.rs
@@ -765,11 +765,21 @@ pub(crate) unsafe fn codegen(
drop(handlers);
}
+ // `.dwo` files are only emitted if:
+ //
+ // - Object files are being emitted (i.e. bitcode only or metadata only compilations will not
+ // produce dwarf objects, even if otherwise enabled)
+ // - Target supports Split DWARF
+ // - Split debuginfo is enabled
+ // - Split DWARF kind is `split` (i.e. debuginfo is split into `.dwo` files, not different
+ // sections in the `.o` files).
+ let dwarf_object_emitted = matches!(config.emit_obj, EmitObj::ObjectCode(_))
+ && cgcx.target_can_use_split_dwarf
+ && cgcx.split_debuginfo != SplitDebuginfo::Off
+ && cgcx.split_dwarf_kind == SplitDwarfKind::Split;
Ok(module.into_compiled_module(
config.emit_obj != EmitObj::None,
- cgcx.target_can_use_split_dwarf
- && cgcx.split_debuginfo != SplitDebuginfo::Off
- && cgcx.split_dwarf_kind == SplitDwarfKind::Split,
+ dwarf_object_emitted,
config.emit_bc,
&cgcx.output_filenames,
))
diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs
index fca43a0d8..77dd15ef4 100644
--- a/compiler/rustc_codegen_llvm/src/builder.rs
+++ b/compiler/rustc_codegen_llvm/src/builder.rs
@@ -365,11 +365,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
Int(I64) => "llvm.ssub.with.overflow.i64",
Int(I128) => "llvm.ssub.with.overflow.i128",
- Uint(U8) => "llvm.usub.with.overflow.i8",
- Uint(U16) => "llvm.usub.with.overflow.i16",
- Uint(U32) => "llvm.usub.with.overflow.i32",
- Uint(U64) => "llvm.usub.with.overflow.i64",
- Uint(U128) => "llvm.usub.with.overflow.i128",
+ Uint(_) => {
+ // Emit sub and icmp instead of llvm.usub.with.overflow. LLVM considers these
+ // to be the canonical form. It will attempt to reform llvm.usub.with.overflow
+ // in the backend if profitable.
+ let sub = self.sub(lhs, rhs);
+ let cmp = self.icmp(IntPredicate::IntULT, lhs, rhs);
+ return (sub, cmp);
+ }
_ => unreachable!(),
},
@@ -553,15 +556,15 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
}
fn write_operand_repeatedly(
- mut self,
+ &mut self,
cg_elem: OperandRef<'tcx, &'ll Value>,
count: u64,
dest: PlaceRef<'tcx, &'ll Value>,
- ) -> Self {
+ ) {
let zero = self.const_usize(0);
let count = self.const_usize(count);
- let start = dest.project_index(&mut self, zero).llval;
- let end = dest.project_index(&mut self, count).llval;
+ let start = dest.project_index(self, zero).llval;
+ let end = dest.project_index(self, count).llval;
let header_bb = self.append_sibling_block("repeat_loop_header");
let body_bb = self.append_sibling_block("repeat_loop_body");
@@ -589,7 +592,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
body_bx.br(header_bb);
header_bx.add_incoming_to_phi(current, next, body_bb);
- Self::build(self.cx, next_bb)
+ *self = Self::build(self.cx, next_bb);
}
fn range_metadata(&mut self, load: &'ll Value, range: WrappingRange) {
diff --git a/compiler/rustc_codegen_llvm/src/callee.rs b/compiler/rustc_codegen_llvm/src/callee.rs
index 6f0d1b7ce..70ff5c961 100644
--- a/compiler/rustc_codegen_llvm/src/callee.rs
+++ b/compiler/rustc_codegen_llvm/src/callee.rs
@@ -83,7 +83,20 @@ pub fn get_fn<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) ->
let llfn = if tcx.sess.target.arch == "x86" &&
let Some(dllimport) = common::get_dllimport(tcx, instance_def_id, sym)
{
- cx.declare_fn(&common::i686_decorated_name(&dllimport, common::is_mingw_gnu_toolchain(&tcx.sess.target), true), fn_abi)
+ // Fix for https://github.com/rust-lang/rust/issues/104453
+ // On x86 Windows, LLVM uses 'L' as the prefix for any private
+ // global symbols, so when we create an undecorated function symbol
+ // that begins with an 'L' LLVM misinterprets that as a private
+ // global symbol that it created and so fails the compilation at a
+ // later stage since such a symbol must have a definition.
+ //
+ // To avoid this, we set the Storage Class to "DllImport" so that
+ // LLVM will prefix the name with `__imp_`. Ideally, we'd like the
+ // existing logic below to set the Storage Class, but it has an
+ // exemption for MinGW for backwards compatability.
+ let llfn = cx.declare_fn(&common::i686_decorated_name(&dllimport, common::is_mingw_gnu_toolchain(&tcx.sess.target), true), fn_abi);
+ unsafe { llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport); }
+ llfn
} else {
cx.declare_fn(sym, fn_abi)
};
diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs
index dd3c43ba5..3626aa901 100644
--- a/compiler/rustc_codegen_llvm/src/consts.rs
+++ b/compiler/rustc_codegen_llvm/src/consts.rs
@@ -1,6 +1,7 @@
use crate::base;
use crate::common::{self, CodegenCx};
use crate::debuginfo;
+use crate::errors::{InvalidMinimumAlignment, SymbolAlreadyDefined};
use crate::llvm::{self, True};
use crate::llvm_util;
use crate::type_::Type;
@@ -19,6 +20,7 @@ use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, Instance, Ty};
use rustc_middle::{bug, span_bug};
+use rustc_session::config::Lto;
use rustc_target::abi::{
AddressSpace, Align, HasDataLayout, Primitive, Scalar, Size, WrappingRange,
};
@@ -26,7 +28,7 @@ use std::ops::Range;
pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<'_>) -> &'ll Value {
let alloc = alloc.inner();
- let mut llvals = Vec::with_capacity(alloc.provenance().len() + 1);
+ let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1);
let dl = cx.data_layout();
let pointer_size = dl.pointer_size.bytes() as usize;
@@ -38,9 +40,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
alloc: &'a Allocation,
range: Range<usize>,
) {
- let chunks = alloc
- .init_mask()
- .range_as_init_chunks(Size::from_bytes(range.start), Size::from_bytes(range.end));
+ let chunks = alloc.init_mask().range_as_init_chunks(range.clone().into());
let chunk_to_llval = move |chunk| match chunk {
InitChunk::Init(range) => {
@@ -78,7 +78,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
}
let mut next_offset = 0;
- for &(offset, alloc_id) in alloc.provenance().iter() {
+ for &(offset, alloc_id) in alloc.provenance().ptrs().iter() {
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
@@ -145,7 +145,7 @@ fn set_global_alignment<'ll>(cx: &CodegenCx<'ll, '_>, gv: &'ll Value, mut align:
match Align::from_bits(min) {
Ok(min) => align = align.max(min),
Err(err) => {
- cx.sess().err(&format!("invalid minimum global alignment: {}", err));
+ cx.sess().emit_err(InvalidMinimumAlignment { err });
}
}
}
@@ -162,25 +162,12 @@ fn check_and_apply_linkage<'ll, 'tcx>(
def_id: DefId,
) -> &'ll Value {
let llty = cx.layout_of(ty).llvm_type(cx);
- if let Some(linkage) = attrs.linkage {
+ if let Some(linkage) = attrs.import_linkage {
debug!("get_static: sym={} linkage={:?}", sym, linkage);
- // If this is a static with a linkage specified, then we need to handle
- // it a little specially. The typesystem prevents things like &T and
- // extern "C" fn() from being non-null, so we can't just declare a
- // static and call it a day. Some linkages (like weak) will make it such
- // that the static actually has a null value.
- let llty2 = if let ty::RawPtr(ref mt) = ty.kind() {
- cx.layout_of(mt.ty).llvm_type(cx)
- } else {
- cx.sess().span_fatal(
- cx.tcx.def_span(def_id),
- "must have type `*const T` or `*mut T` due to `#[linkage]` attribute",
- )
- };
unsafe {
// Declare a symbol `foo` with the desired linkage.
- let g1 = cx.declare_global(sym, llty2);
+ let g1 = cx.declare_global(sym, cx.type_i8());
llvm::LLVMRustSetLinkage(g1, base::linkage_to_llvm(linkage));
// Declare an internal global `extern_with_linkage_foo` which
@@ -192,13 +179,13 @@ fn check_and_apply_linkage<'ll, 'tcx>(
let mut real_name = "_rust_extern_with_linkage_".to_string();
real_name.push_str(sym);
let g2 = cx.define_global(&real_name, llty).unwrap_or_else(|| {
- cx.sess().span_fatal(
- cx.tcx.def_span(def_id),
- &format!("symbol `{}` is already defined", &sym),
- )
+ cx.sess().emit_fatal(SymbolAlreadyDefined {
+ span: cx.tcx.def_span(def_id),
+ symbol_name: sym,
+ })
});
llvm::LLVMRustSetLinkage(g2, llvm::Linkage::InternalLinkage);
- llvm::LLVMSetInitializer(g2, g1);
+ llvm::LLVMSetInitializer(g2, cx.const_ptrcast(g1, llty));
g2
}
} else if cx.tcx.sess.target.arch == "x86" &&
@@ -298,12 +285,23 @@ impl<'ll> CodegenCx<'ll, '_> {
llvm::set_thread_local_mode(g, self.tls_model);
}
+ let dso_local = unsafe { self.should_assume_dso_local(g, true) };
+ if dso_local {
+ unsafe {
+ llvm::LLVMRustSetDSOLocal(g, true);
+ }
+ }
+
if !def_id.is_local() {
let needs_dll_storage_attr = self.use_dll_storage_attrs && !self.tcx.is_foreign_item(def_id) &&
+ // Local definitions can never be imported, so we must not apply
+ // the DLLImport annotation.
+ !dso_local &&
// ThinLTO can't handle this workaround in all cases, so we don't
// emit the attrs. Instead we make them unnecessary by disallowing
// dynamic linking when linker plugin based LTO is enabled.
- !self.tcx.sess.opts.cg.linker_plugin_lto.enabled();
+ !self.tcx.sess.opts.cg.linker_plugin_lto.enabled() &&
+ self.tcx.sess.lto() != Lto::Thin;
// If this assertion triggers, there's something wrong with commandline
// argument validation.
@@ -342,12 +340,6 @@ impl<'ll> CodegenCx<'ll, '_> {
}
}
- unsafe {
- if self.should_assume_dso_local(g, true) {
- llvm::LLVMRustSetDSOLocal(g, true);
- }
- }
-
self.instances.borrow_mut().insert(instance, g);
g
}
@@ -489,7 +481,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
// happens to be zero. Instead, we should only check the value of defined bytes
// and set all undefined bytes to zero if this allocation is headed for the
// BSS.
- let all_bytes_are_zero = alloc.provenance().is_empty()
+ let all_bytes_are_zero = alloc.provenance().ptrs().is_empty()
&& alloc
.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len())
.iter()
@@ -513,7 +505,7 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
section.as_str().as_ptr().cast(),
section.as_str().len() as c_uint,
);
- assert!(alloc.provenance().is_empty());
+ assert!(alloc.provenance().ptrs().is_empty());
// The `inspect` method is okay here because we checked for provenance, and
// because we are doing this access to inspect the final interpreter state (not
diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs
index 79ddfd884..4dcc7cd54 100644
--- a/compiler/rustc_codegen_llvm/src/context.rs
+++ b/compiler/rustc_codegen_llvm/src/context.rs
@@ -3,6 +3,7 @@ use crate::back::write::to_llvm_code_model;
use crate::callee::get_fn;
use crate::coverageinfo;
use crate::debuginfo;
+use crate::errors::BranchProtectionRequiresAArch64;
use crate::llvm;
use crate::llvm_util;
use crate::type_::Type;
@@ -26,6 +27,7 @@ use rustc_session::config::{BranchProtection, CFGuard, CFProtection};
use rustc_session::config::{CrateType, DebugInfo, PAuthKey, PacRet};
use rustc_session::Session;
use rustc_span::source_map::Span;
+use rustc_span::source_map::Spanned;
use rustc_target::abi::{
call::FnAbi, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx,
};
@@ -158,6 +160,10 @@ pub unsafe fn create_module<'ll>(
if sess.target.arch == "s390x" {
target_data_layout = target_data_layout.replace("-v128:64", "");
}
+
+ if sess.target.arch == "riscv64" {
+ target_data_layout = target_data_layout.replace("-n32:64-", "-n64-");
+ }
}
// Ensure the data-layout values hardcoded remain the defaults.
@@ -271,7 +277,7 @@ pub unsafe fn create_module<'ll>(
if let Some(BranchProtection { bti, pac_ret }) = sess.opts.unstable_opts.branch_protection {
if sess.target.arch != "aarch64" {
- sess.err("-Zbranch-protection is only supported on aarch64");
+ sess.emit_err(BranchProtectionRequiresAArch64);
} else {
llvm::LLVMRustAddModuleFlag(
llmod,
@@ -570,8 +576,14 @@ impl<'ll, 'tcx> MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}
fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
- if self.get_declared_value("main").is_none() {
- Some(self.declare_cfn("main", llvm::UnnamedAddr::Global, fn_type))
+ let entry_name = self.sess().target.entry_name.as_ref();
+ if self.get_declared_value(entry_name).is_none() {
+ Some(self.declare_entry_fn(
+ entry_name,
+ self.sess().target.entry_abi.into(),
+ llvm::UnnamedAddr::Global,
+ fn_type,
+ ))
} else {
// If the symbol already exists, it is an error: for example, the user wrote
// #[no_mangle] extern "C" fn main(..) {..}
@@ -947,7 +959,7 @@ impl<'tcx> LayoutOfHelpers<'tcx> for CodegenCx<'_, 'tcx> {
#[inline]
fn handle_layout_err(&self, err: LayoutError<'tcx>, span: Span, ty: Ty<'tcx>) -> ! {
if let LayoutError::SizeOverflow(_) = err {
- self.sess().span_fatal(span, &err.to_string())
+ self.sess().emit_fatal(Spanned { span, node: err })
} else {
span_bug!(span, "failed to get layout for `{}`: {}", ty, err)
}
@@ -965,7 +977,7 @@ impl<'tcx> FnAbiOfHelpers<'tcx> for CodegenCx<'_, 'tcx> {
fn_abi_request: FnAbiRequest<'tcx>,
) -> ! {
if let FnAbiError::Layout(LayoutError::SizeOverflow(_)) = err {
- self.sess().span_fatal(span, &err.to_string())
+ self.sess().emit_fatal(Spanned { span, node: err })
} else {
match fn_abi_request {
FnAbiRequest::OfFnPtr { sig, extra_args } => {
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
index 433f04320..86580d05d 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
@@ -1,5 +1,6 @@
use crate::common::CodegenCx;
use crate::coverageinfo;
+use crate::errors::InstrumentCoverageRequiresLLVM12;
use crate::llvm;
use llvm::coverageinfo::CounterMappingRegion;
@@ -37,7 +38,7 @@ pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
// LLVM 12.
let version = coverageinfo::mapping_version();
if version < 4 {
- tcx.sess.fatal("rustc option `-C instrument-coverage` requires LLVM 12 or higher.");
+ tcx.sess.emit_fatal(InstrumentCoverageRequiresLLVM12);
}
debug!("Generating coverage map for CodegenUnit: `{}`", cx.codegen_unit.name());
@@ -173,7 +174,7 @@ impl CoverageMapGenerator {
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
for (counter, region) in counter_regions {
let CodeRegion { file_name, start_line, start_col, end_line, end_col } = *region;
- let same_file = current_file_name.as_ref().map_or(false, |p| *p == file_name);
+ let same_file = current_file_name.map_or(false, |p| p == file_name);
if !same_file {
if current_file_name.is_some() {
current_file_id += 1;
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
index 964a632b6..ace15cfb0 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
@@ -37,7 +37,7 @@ const VAR_ALIGN_BYTES: usize = 8;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'ll, 'tcx> {
- // Coverage data for each instrumented function identified by DefId.
+ /// Coverage data for each instrumented function identified by DefId.
pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
pub(crate) pgo_func_name_var_map: RefCell<FxHashMap<Instance<'tcx>, &'ll llvm::Value>>,
}
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
index 163ccd946..d87117dff 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
@@ -998,7 +998,7 @@ fn build_struct_type_di_node<'ll, 'tcx>(
.iter()
.enumerate()
.map(|(i, f)| {
- let field_name = if variant_def.ctor_kind == CtorKind::Fn {
+ let field_name = if variant_def.ctor_kind() == Some(CtorKind::Fn) {
// This is a tuple struct
tuple_field_name(i)
} else {
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs
index 129e336c7..53e8a291d 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs
@@ -462,7 +462,7 @@ fn build_variant_names_type_di_node<'ll, 'tcx>(
cx,
"VariantNames",
variant_names_enum_base_type(cx),
- variants.map(|(variant_index, variant_name)| (variant_name, variant_index.as_u32() as u64)),
+ variants.map(|(variant_index, variant_name)| (variant_name, variant_index.as_u32().into())),
containing_scope,
)
}
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
index 14044d0f9..564ab351b 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
@@ -91,9 +91,7 @@ fn build_c_style_enum_di_node<'ll, 'tcx>(
tag_base_type(cx, enum_type_and_layout),
enum_adt_def.discriminants(cx.tcx).map(|(variant_index, discr)| {
let name = Cow::from(enum_adt_def.variant(variant_index).name.as_str());
- // Is there anything we can do to support 128-bit C-Style enums?
- let value = discr.val as u64;
- (name, value)
+ (name, discr.val)
}),
containing_scope,
),
@@ -147,14 +145,11 @@ fn tag_base_type<'ll, 'tcx>(
/// This is a helper function and does not register anything in the type map by itself.
///
/// `variants` is an iterator of (discr-value, variant-name).
-///
-// NOTE: Handling of discriminant values is somewhat inconsistent. They can appear as u128,
-// u64, and i64. Here everything gets mapped to i64 because that's what LLVM's API expects.
fn build_enumeration_type_di_node<'ll, 'tcx>(
cx: &CodegenCx<'ll, 'tcx>,
type_name: &str,
base_type: Ty<'tcx>,
- enumerators: impl Iterator<Item = (Cow<'tcx, str>, u64)>,
+ enumerators: impl Iterator<Item = (Cow<'tcx, str>, u128)>,
containing_scope: &'ll DIType,
) -> &'ll DIType {
let is_unsigned = match base_type.kind() {
@@ -162,21 +157,22 @@ fn build_enumeration_type_di_node<'ll, 'tcx>(
ty::Uint(_) => true,
_ => bug!("build_enumeration_type_di_node() called with non-integer tag type."),
};
+ let (size, align) = cx.size_and_align_of(base_type);
let enumerator_di_nodes: SmallVec<Option<&'ll DIType>> = enumerators
.map(|(name, value)| unsafe {
+ let value = [value as u64, (value >> 64) as u64];
Some(llvm::LLVMRustDIBuilderCreateEnumerator(
DIB(cx),
name.as_ptr().cast(),
name.len(),
- value as i64,
+ value.as_ptr(),
+ size.bits() as libc::c_uint,
is_unsigned,
))
})
.collect();
- let (size, align) = cx.size_and_align_of(base_type);
-
unsafe {
llvm::LLVMRustDIBuilderCreateEnumerationType(
DIB(cx),
@@ -273,7 +269,7 @@ fn build_enum_variant_struct_type_di_node<'ll, 'tcx>(
|cx, struct_type_di_node| {
(0..variant_layout.fields.count())
.map(|field_index| {
- let field_name = if variant_def.ctor_kind != CtorKind::Fn {
+ let field_name = if variant_def.ctor_kind() != Some(CtorKind::Fn) {
// Fields have names
Cow::from(variant_def.fields[field_index].name.as_str())
} else {
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
index b23fe3fc9..ca7a07d83 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
@@ -39,6 +39,7 @@ use smallvec::SmallVec;
use std::cell::OnceCell;
use std::cell::RefCell;
use std::iter;
+use std::ops::Range;
mod create_scope_map;
pub mod gdb;
@@ -163,12 +164,14 @@ impl<'ll> DebugInfoBuilderMethods for Builder<'_, 'll, '_> {
variable_alloca: Self::Value,
direct_offset: Size,
indirect_offsets: &[Size],
+ fragment: Option<Range<Size>>,
) {
- // Convert the direct and indirect offsets to address ops.
+ // Convert the direct and indirect offsets and fragment byte range to address ops.
// FIXME(eddyb) use `const`s instead of getting the values via FFI,
// the values should match the ones in the DWARF standard anyway.
let op_deref = || unsafe { llvm::LLVMRustDIBuilderCreateOpDeref() };
let op_plus_uconst = || unsafe { llvm::LLVMRustDIBuilderCreateOpPlusUconst() };
+ let op_llvm_fragment = || unsafe { llvm::LLVMRustDIBuilderCreateOpLLVMFragment() };
let mut addr_ops = SmallVec::<[u64; 8]>::new();
if direct_offset.bytes() > 0 {
@@ -182,6 +185,13 @@ impl<'ll> DebugInfoBuilderMethods for Builder<'_, 'll, '_> {
addr_ops.push(offset.bytes() as u64);
}
}
+ if let Some(fragment) = fragment {
+ // `DW_OP_LLVM_fragment` takes as arguments the fragment's
+ // offset and size, both of them in bits.
+ addr_ops.push(op_llvm_fragment());
+ addr_ops.push(fragment.start.bits() as u64);
+ addr_ops.push((fragment.end - fragment.start).bits() as u64);
+ }
unsafe {
// FIXME(eddyb) replace `llvm.dbg.declare` with `llvm.dbg.addr`.
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs b/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs
index a40cfc8b2..5cd0e1cb6 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs
@@ -72,7 +72,7 @@ pub(crate) fn fat_pointer_kind<'ll, 'tcx>(
layout.is_unsized()
);
- if !layout.is_unsized() {
+ if layout.is_sized() {
return None;
}
diff --git a/compiler/rustc_codegen_llvm/src/declare.rs b/compiler/rustc_codegen_llvm/src/declare.rs
index f79ef1172..dc21a02ce 100644
--- a/compiler/rustc_codegen_llvm/src/declare.rs
+++ b/compiler/rustc_codegen_llvm/src/declare.rs
@@ -90,6 +90,28 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
declare_raw_fn(self, name, llvm::CCallConv, unnamed, visibility, fn_type)
}
+ /// Declare an entry Function
+ ///
+ /// The ABI of this function can change depending on the target (although for now the same as
+ /// `declare_cfn`)
+ ///
+ /// If there’s a value with the same name already declared, the function will
+ /// update the declaration and return existing Value instead.
+ pub fn declare_entry_fn(
+ &self,
+ name: &str,
+ callconv: llvm::CallConv,
+ unnamed: llvm::UnnamedAddr,
+ fn_type: &'ll Type,
+ ) -> &'ll Value {
+ let visibility = if self.tcx.sess.target.default_hidden_visibility {
+ llvm::Visibility::Hidden
+ } else {
+ llvm::Visibility::Default
+ };
+ declare_raw_fn(self, name, callconv, unnamed, visibility, fn_type)
+ }
+
/// Declare a Rust function.
///
/// If there’s a value with the same name already declared, the function will
diff --git a/compiler/rustc_codegen_llvm/src/errors.rs b/compiler/rustc_codegen_llvm/src/errors.rs
new file mode 100644
index 000000000..af9f31fc3
--- /dev/null
+++ b/compiler/rustc_codegen_llvm/src/errors.rs
@@ -0,0 +1,120 @@
+use std::borrow::Cow;
+
+use rustc_errors::fluent;
+use rustc_errors::DiagnosticBuilder;
+use rustc_errors::ErrorGuaranteed;
+use rustc_errors::Handler;
+use rustc_errors::IntoDiagnostic;
+use rustc_macros::{Diagnostic, Subdiagnostic};
+use rustc_span::Span;
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_unknown_ctarget_feature_prefix)]
+#[note]
+pub(crate) struct UnknownCTargetFeaturePrefix<'a> {
+ pub feature: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_unknown_ctarget_feature)]
+#[note]
+pub(crate) struct UnknownCTargetFeature<'a> {
+ pub feature: &'a str,
+ #[subdiagnostic]
+ pub rust_feature: PossibleFeature<'a>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum PossibleFeature<'a> {
+ #[help(possible_feature)]
+ Some { rust_feature: &'a str },
+ #[help(consider_filing_feature_request)]
+ None,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_error_creating_import_library)]
+pub(crate) struct ErrorCreatingImportLibrary<'a> {
+ pub lib_name: &'a str,
+ pub error: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_instrument_coverage_requires_llvm_12)]
+pub(crate) struct InstrumentCoverageRequiresLLVM12;
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_symbol_already_defined)]
+pub(crate) struct SymbolAlreadyDefined<'a> {
+ #[primary_span]
+ pub span: Span,
+ pub symbol_name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_branch_protection_requires_aarch64)]
+pub(crate) struct BranchProtectionRequiresAArch64;
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_invalid_minimum_alignment)]
+pub(crate) struct InvalidMinimumAlignment {
+ pub err: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_sanitizer_memtag_requires_mte)]
+pub(crate) struct SanitizerMemtagRequiresMte;
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_error_writing_def_file)]
+pub(crate) struct ErrorWritingDEFFile {
+ pub error: std::io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_error_calling_dlltool)]
+pub(crate) struct ErrorCallingDllTool {
+ pub error: std::io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_dlltool_fail_import_library)]
+pub(crate) struct DlltoolFailImportLibrary<'a> {
+ pub stdout: Cow<'a, str>,
+ pub stderr: Cow<'a, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_dynamic_linking_with_lto)]
+#[note]
+pub(crate) struct DynamicLinkingWithLTO;
+
+#[derive(Diagnostic)]
+#[diag(codegen_llvm_fail_parsing_target_machine_config_to_target_machine)]
+pub(crate) struct FailParsingTargetMachineConfigToTargetMachine {
+ pub error: String,
+}
+
+pub(crate) struct TargetFeatureDisableOrEnable<'a> {
+ pub features: &'a [&'a str],
+ pub span: Option<Span>,
+ pub missing_features: Option<MissingFeatures>,
+}
+
+#[derive(Subdiagnostic)]
+#[help(codegen_llvm_missing_features)]
+pub(crate) struct MissingFeatures;
+
+impl IntoDiagnostic<'_, ErrorGuaranteed> for TargetFeatureDisableOrEnable<'_> {
+ fn into_diagnostic(self, sess: &'_ Handler) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
+ let mut diag = sess.struct_err(fluent::codegen_llvm_target_feature_disable_or_enable);
+ if let Some(span) = self.span {
+ diag.set_span(span);
+ };
+ if let Some(missing_features) = self.missing_features {
+ diag.subdiagnostic(missing_features);
+ }
+ diag.set_arg("features", self.features.join(", "));
+ diag
+ }
+}
diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs
index 825011941..2f5dd519b 100644
--- a/compiler/rustc_codegen_llvm/src/intrinsic.rs
+++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs
@@ -340,17 +340,26 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
sym::black_box => {
args[0].val.store(self, result);
-
+ let result_val_span = [result.llval];
// We need to "use" the argument in some way LLVM can't introspect, and on
// targets that support it we can typically leverage inline assembly to do
// this. LLVM's interpretation of inline assembly is that it's, well, a black
// box. This isn't the greatest implementation since it probably deoptimizes
// more than we want, but it's so far good enough.
+ //
+ // For zero-sized types, the location pointed to by the result may be
+ // uninitialized. Do not "use" the result in this case; instead just clobber
+ // the memory.
+ let (constraint, inputs): (&str, &[_]) = if result.layout.is_zst() {
+ ("~{memory}", &[])
+ } else {
+ ("r,~{memory}", &result_val_span)
+ };
crate::asm::inline_asm_call(
self,
"",
- "r,~{memory}",
- &[result.llval],
+ constraint,
+ inputs,
self.type_void(),
true,
false,
@@ -1491,7 +1500,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx());
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
let (pointer_count, underlying_ty) = match element_ty1.kind() {
- ty::RawPtr(p) if p.ty == in_elem && p.mutbl == hir::Mutability::Mut => {
+ ty::RawPtr(p) if p.ty == in_elem && p.mutbl.is_mut() => {
(ptr_count(element_ty1), non_ptr(element_ty1))
}
_ => {
diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs
index 89c7e51d0..246e82545 100644
--- a/compiler/rustc_codegen_llvm/src/lib.rs
+++ b/compiler/rustc_codegen_llvm/src/lib.rs
@@ -12,6 +12,8 @@
#![feature(iter_intersperse)]
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]
+#![deny(rustc::untranslatable_diagnostic)]
+#![deny(rustc::diagnostic_outside_of_impl)]
#[macro_use]
extern crate rustc_macros;
@@ -20,6 +22,7 @@ extern crate tracing;
use back::write::{create_informational_target_machine, create_target_machine};
+use errors::FailParsingTargetMachineConfigToTargetMachine;
pub use llvm_util::target_features;
use rustc_ast::expand::allocator::AllocatorKind;
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
@@ -62,6 +65,7 @@ mod context;
mod coverageinfo;
mod debuginfo;
mod declare;
+mod errors;
mod intrinsic;
// The following is a work around that replaces `pub mod llvm;` and that fixes issue 53912.
@@ -108,11 +112,11 @@ impl ExtraBackendMethods for LlvmCodegenBackend {
tcx: TyCtxt<'tcx>,
module_name: &str,
kind: AllocatorKind,
- has_alloc_error_handler: bool,
+ alloc_error_handler_kind: AllocatorKind,
) -> ModuleLlvm {
let mut module_llvm = ModuleLlvm::new_metadata(tcx, module_name);
unsafe {
- allocator::codegen(tcx, &mut module_llvm, module_name, kind, has_alloc_error_handler);
+ allocator::codegen(tcx, &mut module_llvm, module_name, kind, alloc_error_handler_kind);
}
module_llvm
}
@@ -412,7 +416,7 @@ impl ModuleLlvm {
let tm = match (cgcx.tm_factory)(tm_factory_config) {
Ok(m) => m,
Err(e) => {
- handler.struct_err(&e).emit();
+ handler.emit_err(FailParsingTargetMachineConfigToTargetMachine { error: e });
return Err(FatalError);
}
};
diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
index 42cb694c0..8a9392255 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
@@ -35,7 +35,7 @@ pub enum LLVMRustResult {
pub struct LLVMRustCOFFShortExport {
pub name: *const c_char,
pub ordinal_present: bool,
- // value of `ordinal` only important when `ordinal_present` is true
+ /// value of `ordinal` only important when `ordinal_present` is true
pub ordinal: u16,
}
@@ -183,7 +183,6 @@ pub enum AttributeKind {
OptimizeNone = 24,
ReturnsTwice = 25,
ReadNone = 26,
- InaccessibleMemOnly = 27,
SanitizeHWAddress = 28,
WillReturn = 29,
StackProtectReq = 30,
@@ -590,6 +589,15 @@ pub enum ChecksumKind {
SHA256,
}
+/// LLVMRustMemoryEffects
+#[derive(Copy, Clone)]
+#[repr(C)]
+pub enum MemoryEffects {
+ None,
+ ReadOnly,
+ InaccessibleMemOnly,
+}
+
extern "C" {
type Opaque;
}
@@ -975,6 +983,9 @@ pub type SelfProfileBeforePassCallback =
unsafe extern "C" fn(*mut c_void, *const c_char, *const c_char);
pub type SelfProfileAfterPassCallback = unsafe extern "C" fn(*mut c_void);
+pub type GetSymbolsCallback = unsafe extern "C" fn(*mut c_void, *const c_char) -> *mut c_void;
+pub type GetSymbolsErrorCallback = unsafe extern "C" fn(*const c_char) -> *mut c_void;
+
extern "C" {
pub fn LLVMRustInstallFatalErrorHandler();
pub fn LLVMRustDisableSystemDialogsOnCrash();
@@ -1175,6 +1186,7 @@ extern "C" {
pub fn LLVMRustCreateUWTableAttr(C: &Context, async_: bool) -> &Attribute;
pub fn LLVMRustCreateAllocSizeAttr(C: &Context, size_arg: u32) -> &Attribute;
pub fn LLVMRustCreateAllocKindAttr(C: &Context, size_arg: u64) -> &Attribute;
+ pub fn LLVMRustCreateMemoryEffectsAttr(C: &Context, effects: MemoryEffects) -> &Attribute;
// Operations on functions
pub fn LLVMRustGetOrInsertFunction<'a>(
@@ -2118,7 +2130,8 @@ extern "C" {
Builder: &DIBuilder<'a>,
Name: *const c_char,
NameLen: size_t,
- Value: i64,
+ Value: *const u64,
+ SizeInBits: c_uint,
IsUnsigned: bool,
) -> &'a DIEnumerator;
@@ -2201,6 +2214,7 @@ extern "C" {
) -> &'a DILocation;
pub fn LLVMRustDIBuilderCreateOpDeref() -> u64;
pub fn LLVMRustDIBuilderCreateOpPlusUconst() -> u64;
+ pub fn LLVMRustDIBuilderCreateOpLLVMFragment() -> u64;
#[allow(improper_ctypes)]
pub fn LLVMRustWriteTypeToString(Type: &Type, s: &RustString);
@@ -2463,4 +2477,14 @@ extern "C" {
pub fn LLVMRustGetMangledName(V: &Value, out: &RustString);
pub fn LLVMRustGetElementTypeArgIndex(CallSite: &Value) -> i32;
+
+ pub fn LLVMRustIsBitcode(ptr: *const u8, len: usize) -> bool;
+
+ pub fn LLVMRustGetSymbols(
+ buf_ptr: *const u8,
+ buf_len: usize,
+ state: *mut c_void,
+ callback: GetSymbolsCallback,
+ error_callback: GetSymbolsErrorCallback,
+ ) -> *mut c_void;
}
diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs
index 6602a4ab8..f820e7523 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs
@@ -185,6 +185,13 @@ impl AttributeKind {
}
}
+impl MemoryEffects {
+ /// Create an LLVM Attribute with these memory effects.
+ pub fn create_attr(self, llcx: &Context) -> &Attribute {
+ unsafe { LLVMRustCreateMemoryEffectsAttr(llcx, self) }
+ }
+}
+
pub fn set_section(llglobal: &Value, section_name: &str) {
let section_name_cstr = CString::new(section_name).expect("unexpected CString error");
unsafe {
diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs
index 2fd58567c..2fa602520 100644
--- a/compiler/rustc_codegen_llvm/src/llvm_util.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs
@@ -1,4 +1,8 @@
use crate::back::write::create_informational_target_machine;
+use crate::errors::{
+ PossibleFeature, TargetFeatureDisableOrEnable, UnknownCTargetFeature,
+ UnknownCTargetFeaturePrefix,
+};
use crate::llvm;
use libc::c_int;
use rustc_codegen_ssa::target_features::{
@@ -159,6 +163,9 @@ pub fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]
("x86", "rdrand") => smallvec!["rdrnd"],
("x86", "bmi1") => smallvec!["bmi"],
("x86", "cmpxchg16b") => smallvec!["cx16"],
+ // FIXME: These aliases are misleading, and should be removed before avx512_target_feature is
+ // stabilized. They must remain until std::arch switches off them.
+ // rust#100752
("x86", "avx512vaes") => smallvec!["vaes"],
("x86", "avx512gfni") => smallvec!["gfni"],
("x86", "avx512vpclmulqdq") => smallvec!["vpclmulqdq"],
@@ -187,8 +194,8 @@ pub fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]
}
}
-// Given a map from target_features to whether they are enabled or disabled,
-// ensure only valid combinations are allowed.
+/// Given a map from target_features to whether they are enabled or disabled,
+/// ensure only valid combinations are allowed.
pub fn check_tied_features(
sess: &Session,
features: &FxHashMap<&str, bool>,
@@ -206,8 +213,8 @@ pub fn check_tied_features(
return None;
}
-// Used to generate cfg variables and apply features
-// Must express features in the way Rust understands them
+/// Used to generate cfg variables and apply features
+/// Must express features in the way Rust understands them
pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
let target_machine = create_informational_target_machine(sess);
let mut features: Vec<Symbol> = supported_target_features(sess)
@@ -285,30 +292,33 @@ fn llvm_target_features(tm: &llvm::TargetMachine) -> Vec<(&str, &str)> {
}
fn print_target_features(sess: &Session, tm: &llvm::TargetMachine) {
- let mut target_features = llvm_target_features(tm);
+ let mut llvm_target_features = llvm_target_features(tm);
+ let mut known_llvm_target_features = FxHashSet::<&'static str>::default();
let mut rustc_target_features = supported_target_features(sess)
.iter()
- .filter_map(|(feature, _gate)| {
- for llvm_feature in to_llvm_features(sess, *feature) {
+ .map(|(feature, _gate)| {
+ let desc = if let Some(llvm_feature) = to_llvm_features(sess, *feature).first() {
// LLVM asserts that these are sorted. LLVM and Rust both use byte comparison for these strings.
- match target_features.binary_search_by_key(&llvm_feature, |(f, _d)| f).ok().map(
- |index| {
- let (_f, desc) = target_features.remove(index);
- (*feature, desc)
- },
- ) {
- Some(v) => return Some(v),
- None => {}
+ match llvm_target_features.binary_search_by_key(&llvm_feature, |(f, _d)| f).ok() {
+ Some(index) => {
+ known_llvm_target_features.insert(llvm_feature);
+ llvm_target_features[index].1
+ }
+ None => "",
}
- }
- None
+ } else {
+ ""
+ };
+ (*feature, desc)
})
.collect::<Vec<_>>();
rustc_target_features.extend_from_slice(&[(
"crt-static",
"Enables C Run-time Libraries to be statically linked",
)]);
- let max_feature_len = target_features
+ llvm_target_features.retain(|(f, _d)| !known_llvm_target_features.contains(f));
+
+ let max_feature_len = llvm_target_features
.iter()
.chain(rustc_target_features.iter())
.map(|(feature, _desc)| feature.len())
@@ -320,10 +330,10 @@ fn print_target_features(sess: &Session, tm: &llvm::TargetMachine) {
println!(" {1:0$} - {2}.", max_feature_len, feature, desc);
}
println!("\nCode-generation features supported by LLVM for this target:");
- for (feature, desc) in &target_features {
+ for (feature, desc) in &llvm_target_features {
println!(" {1:0$} - {2}.", max_feature_len, feature, desc);
}
- if target_features.is_empty() {
+ if llvm_target_features.is_empty() {
println!(" Target features listing is not supported by this LLVM version.");
}
println!("\nUse +feature to enable a feature, or -feature to disable it.");
@@ -434,12 +444,7 @@ pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec<Str
Some(c @ '+' | c @ '-') => c,
Some(_) => {
if diagnostics {
- let mut diag = sess.struct_warn(&format!(
- "unknown feature specified for `-Ctarget-feature`: `{}`",
- s
- ));
- diag.note("features must begin with a `+` to enable or `-` to disable it");
- diag.emit();
+ sess.emit_warning(UnknownCTargetFeaturePrefix { feature: s });
}
return None;
}
@@ -456,17 +461,15 @@ pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec<Str
None
}
});
- let mut diag = sess.struct_warn(&format!(
- "unknown feature specified for `-Ctarget-feature`: `{}`",
- feature
- ));
- diag.note("it is still passed through to the codegen backend");
- if let Some(rust_feature) = rust_feature {
- diag.help(&format!("you might have meant: `{}`", rust_feature));
+ let unknown_feature = if let Some(rust_feature) = rust_feature {
+ UnknownCTargetFeature {
+ feature,
+ rust_feature: PossibleFeature::Some { rust_feature },
+ }
} else {
- diag.note("consider filing a feature request");
- }
- diag.emit();
+ UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None }
+ };
+ sess.emit_warning(unknown_feature);
}
if diagnostics {
@@ -491,11 +494,17 @@ pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec<Str
.flatten();
features.extend(feats);
+ // FIXME: Move v8a to target definition list when earliest supported LLVM is 14.
+ if get_version() >= (14, 0, 0) && sess.target.arch == "aarch64" {
+ features.push("+v8a".into());
+ }
+
if diagnostics && let Some(f) = check_tied_features(sess, &featsmap) {
- sess.err(&format!(
- "target features {} must all be enabled or disabled together",
- f.join(", ")
- ));
+ sess.emit_err(TargetFeatureDisableOrEnable {
+ features: f,
+ span: None,
+ missing_features: None,
+ });
}
features
diff --git a/compiler/rustc_codegen_llvm/src/mono_item.rs b/compiler/rustc_codegen_llvm/src/mono_item.rs
index 1eceb7f5c..76f692b20 100644
--- a/compiler/rustc_codegen_llvm/src/mono_item.rs
+++ b/compiler/rustc_codegen_llvm/src/mono_item.rs
@@ -1,6 +1,7 @@
use crate::attributes;
use crate::base;
use crate::context::CodegenCx;
+use crate::errors::SymbolAlreadyDefined;
use crate::llvm;
use crate::type_of::LayoutLlvmExt;
use rustc_codegen_ssa::traits::*;
@@ -25,10 +26,8 @@ impl<'tcx> PreDefineMethods<'tcx> for CodegenCx<'_, 'tcx> {
let llty = self.layout_of(ty).llvm_type(self);
let g = self.define_global(symbol_name, llty).unwrap_or_else(|| {
- self.sess().span_fatal(
- self.tcx.def_span(def_id),
- &format!("symbol `{}` is already defined", symbol_name),
- )
+ self.sess()
+ .emit_fatal(SymbolAlreadyDefined { span: self.tcx.def_span(def_id), symbol_name })
});
unsafe {
diff --git a/compiler/rustc_codegen_llvm/src/type_.rs b/compiler/rustc_codegen_llvm/src/type_.rs
index eeb38d4ec..5772b7e1d 100644
--- a/compiler/rustc_codegen_llvm/src/type_.rs
+++ b/compiler/rustc_codegen_llvm/src/type_.rs
@@ -127,10 +127,6 @@ impl<'ll> CodegenCx<'ll, '_> {
pub(crate) fn type_variadic_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type {
unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, True) }
}
-
- pub(crate) fn type_array(&self, ty: &'ll Type, len: u64) -> &'ll Type {
- unsafe { llvm::LLVMRustArrayType(ty, len) }
- }
}
impl<'ll, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'ll, 'tcx> {
@@ -231,6 +227,10 @@ impl<'ll, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'ll, 'tcx> {
fn val_ty(&self, v: &'ll Value) -> &'ll Type {
common::val_ty(v)
}
+
+ fn type_array(&self, ty: &'ll Type, len: u64) -> &'ll Type {
+ unsafe { llvm::LLVMRustArrayType(ty, len) }
+ }
}
impl Type {
@@ -238,7 +238,7 @@ impl Type {
unsafe { llvm::LLVMInt8TypeInContext(llcx) }
}
- // Creates an integer type with the given number of bits, e.g., i24
+ /// Creates an integer type with the given number of bits, e.g., i24
pub fn ix_llcx(llcx: &llvm::Context, num_bits: u64) -> &Type {
unsafe { llvm::LLVMIntTypeInContext(llcx, num_bits as c_uint) }
}
diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs
index dc1165835..182adf817 100644
--- a/compiler/rustc_codegen_llvm/src/type_of.rs
+++ b/compiler/rustc_codegen_llvm/src/type_of.rs
@@ -140,7 +140,7 @@ fn struct_llfields<'a, 'tcx>(
prev_effective_align = effective_field_align;
}
let padding_used = result.len() > field_count;
- if !layout.is_unsized() && field_count > 0 {
+ if layout.is_sized() && field_count > 0 {
if offset > layout.size {
bug!("layout: {:#?} stride: {:?} offset: {:?}", layout, layout.size, offset);
}
diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml
index d868e3d56..345174fb5 100644
--- a/compiler/rustc_codegen_ssa/Cargo.toml
+++ b/compiler/rustc_codegen_ssa/Cargo.toml
@@ -7,6 +7,7 @@ edition = "2021"
test = false
[dependencies]
+ar_archive_writer = "0.1.1"
bitflags = "1.2.1"
cc = "1.0.69"
itertools = "0.10.1"
diff --git a/compiler/rustc_codegen_ssa/src/back/archive.rs b/compiler/rustc_codegen_ssa/src/back/archive.rs
index bb76ca5d2..58558fb8c 100644
--- a/compiler/rustc_codegen_ssa/src/back/archive.rs
+++ b/compiler/rustc_codegen_ssa/src/back/archive.rs
@@ -4,13 +4,22 @@ use rustc_session::cstore::DllImport;
use rustc_session::Session;
use rustc_span::symbol::Symbol;
+use super::metadata::search_for_section;
+
+pub use ar_archive_writer::get_native_object_symbols;
+use ar_archive_writer::{write_archive_to_stream, ArchiveKind, NewArchiveMember};
use object::read::archive::ArchiveFile;
+use object::read::macho::FatArch;
+use tempfile::Builder as TempFileBuilder;
-use std::fmt::Display;
+use std::error::Error;
use std::fs::File;
-use std::io;
+use std::io::{self, Write};
use std::path::{Path, PathBuf};
+// Re-exporting for rustc_codegen_llvm::back::archive
+pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
+
pub trait ArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
@@ -28,32 +37,38 @@ pub trait ArchiveBuilderBuilder {
is_direct_dependency: bool,
) -> PathBuf;
- fn extract_bundled_libs(
- &self,
- rlib: &Path,
+ fn extract_bundled_libs<'a>(
+ &'a self,
+ rlib: &'a Path,
outdir: &Path,
bundled_lib_file_names: &FxHashSet<Symbol>,
- ) -> Result<(), String> {
- let message = |msg: &str, e: &dyn Display| format!("{} '{}': {}", msg, &rlib.display(), e);
+ ) -> Result<(), ExtractBundledLibsError<'_>> {
let archive_map = unsafe {
- Mmap::map(File::open(rlib).map_err(|e| message("failed to open file", &e))?)
- .map_err(|e| message("failed to mmap file", &e))?
+ Mmap::map(
+ File::open(rlib)
+ .map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
+ )
+ .map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
};
let archive = ArchiveFile::parse(&*archive_map)
- .map_err(|e| message("failed to parse archive", &e))?;
+ .map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
for entry in archive.members() {
- let entry = entry.map_err(|e| message("failed to read entry", &e))?;
+ let entry = entry
+ .map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
let data = entry
.data(&*archive_map)
- .map_err(|e| message("failed to get data from archive member", &e))?;
+ .map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
let name = std::str::from_utf8(entry.name())
- .map_err(|e| message("failed to convert name", &e))?;
+ .map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
continue; // We need to extract only native libraries.
}
+ let data = search_for_section(rlib, data, ".bundled_lib").map_err(|e| {
+ ExtractBundledLibsError::ExtractSection { rlib, error: Box::<dyn Error>::from(e) }
+ })?;
std::fs::write(&outdir.join(&name), data)
- .map_err(|e| message("failed to write file", &e))?;
+ .map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
}
Ok(())
}
@@ -70,3 +85,225 @@ pub trait ArchiveBuilder<'a> {
fn build(self: Box<Self>, output: &Path) -> bool;
}
+
+#[must_use = "must call build() to finish building the archive"]
+pub struct ArArchiveBuilder<'a> {
+ sess: &'a Session,
+ get_object_symbols:
+ fn(buf: &[u8], f: &mut dyn FnMut(&[u8]) -> io::Result<()>) -> io::Result<bool>,
+
+ src_archives: Vec<(PathBuf, Mmap)>,
+ // Don't use an `HashMap` here, as the order is important. `lib.rmeta` needs
+ // to be at the end of an archive in some cases for linkers to not get confused.
+ entries: Vec<(Vec<u8>, ArchiveEntry)>,
+}
+
+#[derive(Debug)]
+enum ArchiveEntry {
+ FromArchive { archive_index: usize, file_range: (u64, u64) },
+ File(PathBuf),
+}
+
+impl<'a> ArArchiveBuilder<'a> {
+ pub fn new(
+ sess: &'a Session,
+ get_object_symbols: fn(
+ buf: &[u8],
+ f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
+ ) -> io::Result<bool>,
+ ) -> ArArchiveBuilder<'a> {
+ ArArchiveBuilder { sess, get_object_symbols, src_archives: vec![], entries: vec![] }
+ }
+}
+
+fn try_filter_fat_archs(
+ archs: object::read::Result<&[impl FatArch]>,
+ target_arch: object::Architecture,
+ archive_path: &Path,
+ archive_map_data: &[u8],
+) -> io::Result<Option<PathBuf>> {
+ let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
+
+ let desired = match archs.iter().filter(|a| a.architecture() == target_arch).next() {
+ Some(a) => a,
+ None => return Ok(None),
+ };
+
+ let (mut new_f, extracted_path) = tempfile::Builder::new()
+ .suffix(archive_path.file_name().unwrap())
+ .tempfile()?
+ .keep()
+ .unwrap();
+
+ new_f.write_all(
+ desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
+ )?;
+
+ Ok(Some(extracted_path))
+}
+
+pub fn try_extract_macho_fat_archive(
+ sess: &Session,
+ archive_path: &Path,
+) -> io::Result<Option<PathBuf>> {
+ let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
+ let target_arch = match sess.target.arch.as_ref() {
+ "aarch64" => object::Architecture::Aarch64,
+ "x86_64" => object::Architecture::X86_64,
+ _ => return Ok(None),
+ };
+
+ match object::macho::FatHeader::parse(&*archive_map) {
+ Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
+ let archs = object::macho::FatHeader::parse_arch32(&*archive_map);
+ try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
+ }
+ Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
+ let archs = object::macho::FatHeader::parse_arch64(&*archive_map);
+ try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
+ }
+ // Not a FatHeader at all, just return None.
+ _ => Ok(None),
+ }
+}
+
+impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
+ fn add_archive(
+ &mut self,
+ archive_path: &Path,
+ mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
+ ) -> io::Result<()> {
+ let mut archive_path = archive_path.to_path_buf();
+ if self.sess.target.llvm_target.contains("-apple-macosx") {
+ if let Some(new_archive_path) =
+ try_extract_macho_fat_archive(&self.sess, &archive_path)?
+ {
+ archive_path = new_archive_path
+ }
+ }
+
+ if self.src_archives.iter().any(|archive| archive.0 == archive_path) {
+ return Ok(());
+ }
+
+ let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
+ let archive = ArchiveFile::parse(&*archive_map)
+ .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
+ let archive_index = self.src_archives.len();
+
+ for entry in archive.members() {
+ let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
+ let file_name = String::from_utf8(entry.name().to_vec())
+ .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
+ if !skip(&file_name) {
+ self.entries.push((
+ file_name.into_bytes(),
+ ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
+ ));
+ }
+ }
+
+ self.src_archives.push((archive_path.to_owned(), archive_map));
+ Ok(())
+ }
+
+ /// Adds an arbitrary file to this archive
+ fn add_file(&mut self, file: &Path) {
+ self.entries.push((
+ file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
+ ArchiveEntry::File(file.to_owned()),
+ ));
+ }
+
+ /// Combine the provided files, rlibs, and native libraries into a single
+ /// `Archive`.
+ fn build(self: Box<Self>, output: &Path) -> bool {
+ let sess = self.sess;
+ match self.build_inner(output) {
+ Ok(any_members) => any_members,
+ Err(e) => sess.emit_fatal(ArchiveBuildFailure { error: e }),
+ }
+ }
+}
+
+impl<'a> ArArchiveBuilder<'a> {
+ fn build_inner(self, output: &Path) -> io::Result<bool> {
+ let archive_kind = match &*self.sess.target.archive_format {
+ "gnu" => ArchiveKind::Gnu,
+ "bsd" => ArchiveKind::Bsd,
+ "darwin" => ArchiveKind::Darwin,
+ "coff" => ArchiveKind::Coff,
+ kind => {
+ self.sess.emit_fatal(UnknownArchiveKind { kind });
+ }
+ };
+
+ let mut entries = Vec::new();
+
+ for (entry_name, entry) in self.entries {
+ let data =
+ match entry {
+ ArchiveEntry::FromArchive { archive_index, file_range } => {
+ let src_archive = &self.src_archives[archive_index];
+
+ let data = &src_archive.1
+ [file_range.0 as usize..file_range.0 as usize + file_range.1 as usize];
+
+ Box::new(data) as Box<dyn AsRef<[u8]>>
+ }
+ ArchiveEntry::File(file) => unsafe {
+ Box::new(
+ Mmap::map(File::open(file).map_err(|err| {
+ io_error_context("failed to open object file", err)
+ })?)
+ .map_err(|err| io_error_context("failed to map object file", err))?,
+ ) as Box<dyn AsRef<[u8]>>
+ },
+ };
+
+ entries.push(NewArchiveMember {
+ buf: data,
+ get_symbols: self.get_object_symbols,
+ member_name: String::from_utf8(entry_name).unwrap(),
+ mtime: 0,
+ uid: 0,
+ gid: 0,
+ perms: 0o644,
+ })
+ }
+
+ // Write to a temporary file first before atomically renaming to the final name.
+ // This prevents programs (including rustc) from attempting to read a partial archive.
+ // It also enables writing an archive with the same filename as a dependency on Windows as
+ // required by a test.
+ let mut archive_tmpfile = TempFileBuilder::new()
+ .suffix(".temp-archive")
+ .tempfile_in(output.parent().unwrap_or_else(|| Path::new("")))
+ .map_err(|err| io_error_context("couldn't create a temp file", err))?;
+
+ write_archive_to_stream(
+ archive_tmpfile.as_file_mut(),
+ &entries,
+ true,
+ archive_kind,
+ true,
+ false,
+ )?;
+
+ let any_entries = !entries.is_empty();
+ drop(entries);
+ // Drop src_archives to unmap all input archives, which is necessary if we want to write the
+ // output archive to the same location as an input archive on Windows.
+ drop(self.src_archives);
+
+ archive_tmpfile
+ .persist(output)
+ .map_err(|err| io_error_context("failed to rename archive file", err.error))?;
+
+ Ok(any_entries)
+ }
+}
+
+fn io_error_context(context: &str, err: io::Error) -> io::Error {
+ io::Error::new(io::ErrorKind::Other, format!("{context}: {err}"))
+}
diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs
index 0dc0dee86..fe2e4b36c 100644
--- a/compiler/rustc_codegen_ssa/src/back/link.rs
+++ b/compiler/rustc_codegen_ssa/src/back/link.rs
@@ -6,9 +6,9 @@ use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_errors::{ErrorGuaranteed, Handler};
use rustc_fs_util::fix_windows_verbatim_for_gcc;
-use rustc_hir::def_id::CrateNum;
+use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_metadata::find_native_static_library;
-use rustc_metadata::fs::{emit_metadata, METADATA_FILENAME};
+use rustc_metadata::fs::{emit_wrapper_file, METADATA_FILENAME};
use rustc_middle::middle::dependency_format::Linkage;
use rustc_middle::middle::exported_symbols::SymbolExportKind;
use rustc_session::config::{self, CFGuard, CrateType, DebugInfo, LdImpl, Lto, Strip};
@@ -24,12 +24,12 @@ use rustc_span::symbol::Symbol;
use rustc_span::DebuggerVisualizerFile;
use rustc_target::spec::crt_objects::{CrtObjects, LinkSelfContainedDefault};
use rustc_target::spec::{Cc, LinkOutputKind, LinkerFlavor, LinkerFlavorCli, Lld, PanicStrategy};
-use rustc_target::spec::{RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo, Target};
+use rustc_target::spec::{RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo};
use super::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
use super::command::Command;
use super::linker::{self, Linker};
-use super::metadata::{create_rmeta_file, MetadataPosition};
+use super::metadata::{create_wrapper_file, MetadataPosition};
use super::rpath::{self, RPathConfig};
use crate::{
errors, looks_like_rust_object_file, CodegenResults, CompiledModule, CrateInfo, NativeLib,
@@ -44,7 +44,7 @@ use std::borrow::Borrow;
use std::cell::OnceCell;
use std::collections::BTreeSet;
use std::ffi::OsString;
-use std::fs::{File, OpenOptions};
+use std::fs::{read, File, OpenOptions};
use std::io::{BufWriter, Write};
use std::ops::Deref;
use std::path::{Path, PathBuf};
@@ -102,7 +102,7 @@ pub fn link_binary<'a>(
sess,
crate_type,
outputs,
- codegen_results.crate_info.local_crate_name.as_str(),
+ codegen_results.crate_info.local_crate_name,
);
match crate_type {
CrateType::Rlib => {
@@ -253,7 +253,7 @@ pub fn each_linked_rlib(
};
for &cnum in crates {
match fmts.get(cnum.as_usize() - 1) {
- Some(&Linkage::NotLinked | &Linkage::IncludedFromDylib) => continue,
+ Some(&Linkage::NotLinked | &Linkage::Dynamic | &Linkage::IncludedFromDylib) => continue,
Some(_) => {}
None => return Err(errors::LinkRlibError::MissingFormat),
}
@@ -292,8 +292,8 @@ fn link_rlib<'a>(
let trailing_metadata = match flavor {
RlibFlavor::Normal => {
let (metadata, metadata_position) =
- create_rmeta_file(sess, codegen_results.metadata.raw_data());
- let metadata = emit_metadata(sess, &metadata, tmpdir);
+ create_wrapper_file(sess, b".rmeta".to_vec(), codegen_results.metadata.raw_data());
+ let metadata = emit_wrapper_file(sess, &metadata, tmpdir, METADATA_FILENAME);
match metadata_position {
MetadataPosition::First => {
// Most of the time metadata in rlib files is wrapped in a "dummy" object
@@ -376,12 +376,14 @@ fn link_rlib<'a>(
let location =
find_native_static_library(name.as_str(), lib.verbatim, &lib_search_paths, sess);
if sess.opts.unstable_opts.packed_bundled_libs && flavor == RlibFlavor::Normal {
- packed_bundled_libs.push(find_native_static_library(
- lib.filename.unwrap().as_str(),
- Some(true),
- &lib_search_paths,
- sess,
- ));
+ let filename = lib.filename.unwrap();
+ let lib_path =
+ find_native_static_library(filename.as_str(), true, &lib_search_paths, sess);
+ let src = read(lib_path)
+ .map_err(|e| sess.emit_fatal(errors::ReadFileError { message: e }))?;
+ let (data, _) = create_wrapper_file(sess, b".bundled_lib".to_vec(), &src);
+ let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str());
+ packed_bundled_libs.push(wrapper_file);
continue;
}
ab.add_archive(&location, Box::new(|_| false)).unwrap_or_else(|error| {
@@ -459,7 +461,7 @@ fn collate_raw_dylibs<'a, 'b>(
for lib in used_libraries {
if lib.kind == NativeLibKind::RawDylib {
- let ext = if matches!(lib.verbatim, Some(true)) { "" } else { ".dll" };
+ let ext = if lib.verbatim { "" } else { ".dll" };
let name = format!("{}{}", lib.name.expect("unnamed raw-dylib library"), ext);
let imports = dylib_table.entry(name.clone()).or_default();
for import in &lib.dll_imports {
@@ -670,8 +672,7 @@ fn link_dwarf_object<'a>(
thorin::MissingReferencedObjectBehaviour::Skip,
)?;
- let output = package.finish()?.write()?;
- let mut output_stream = BufWriter::new(
+ let output_stream = BufWriter::new(
OpenOptions::new()
.read(true)
.write(true)
@@ -679,8 +680,10 @@ fn link_dwarf_object<'a>(
.truncate(true)
.open(dwp_out_filename)?,
);
- output_stream.write_all(&output)?;
- output_stream.flush()?;
+ let mut output_stream = object::write::StreamingBuffer::new(output_stream);
+ package.finish()?.emit(&mut output_stream)?;
+ output_stream.result()?;
+ output_stream.into_inner().flush()?;
Ok(())
}) {
@@ -919,29 +922,17 @@ fn link_natively<'a>(
)
.is_some();
- sess.note_without_error("`link.exe` returned an unexpected error");
+ sess.emit_note(errors::LinkExeUnexpectedError);
if is_vs_installed && has_linker {
// the linker is broken
- sess.note_without_error(
- "the Visual Studio build tools may need to be repaired \
- using the Visual Studio installer",
- );
- sess.note_without_error(
- "or a necessary component may be missing from the \
- \"C++ build tools\" workload",
- );
+ sess.emit_note(errors::RepairVSBuildTools);
+ sess.emit_note(errors::MissingCppBuildToolComponent);
} else if is_vs_installed {
// the linker is not installed
- sess.note_without_error(
- "in the Visual Studio installer, ensure the \
- \"C++ build tools\" workload is selected",
- );
+ sess.emit_note(errors::SelectCppBuildToolWorkload);
} else {
// visual studio is not installed
- sess.note_without_error(
- "you may need to install Visual Studio build tools with the \
- \"C++ build tools\" workload",
- );
+ sess.emit_note(errors::VisualStudioNotInstalled);
}
}
}
@@ -954,35 +945,20 @@ fn link_natively<'a>(
Err(e) => {
let linker_not_found = e.kind() == io::ErrorKind::NotFound;
- let mut linker_error = {
- if linker_not_found {
- sess.struct_err(&format!("linker `{}` not found", linker_path.display()))
- } else {
- sess.struct_err(&format!(
- "could not exec the linker `{}`",
- linker_path.display()
- ))
- }
- };
-
- linker_error.note(&e.to_string());
-
- if !linker_not_found {
- linker_error.note(&format!("{:?}", &cmd));
+ if linker_not_found {
+ sess.emit_err(errors::LinkerNotFound { linker_path, error: e });
+ } else {
+ sess.emit_err(errors::UnableToExeLinker {
+ linker_path,
+ error: e,
+ command_formatted: format!("{:?}", &cmd),
+ });
}
- linker_error.emit();
-
if sess.target.is_like_msvc && linker_not_found {
- sess.note_without_error(
- "the msvc targets depend on the msvc linker \
- but `link.exe` was not found",
- );
- sess.note_without_error(
- "please ensure that Visual Studio 2017 or later, or Build Tools \
- for Visual Studio were installed with the Visual C++ option.",
- );
- sess.note_without_error("VS Code is a different product, and is not sufficient.");
+ sess.emit_note(errors::MsvcMissingLinker);
+ sess.emit_note(errors::CheckInstalledVisualStudio);
+ sess.emit_note(errors::UnsufficientVSCodeProduct);
}
sess.abort_if_errors();
}
@@ -1007,15 +983,13 @@ fn link_natively<'a>(
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
- sess.struct_warn(&format!(
- "processing debug info with `dsymutil` failed: {}",
- prog.status
- ))
- .note(&escape_string(&output))
- .emit();
+ sess.emit_warning(errors::ProcessingDymutilFailed {
+ status: prog.status,
+ output: escape_string(&output),
+ });
}
}
- Err(e) => sess.fatal(&format!("unable to run `dsymutil`: {}", e)),
+ Err(error) => sess.emit_fatal(errors::UnableToRunDsymutil { error }),
}
}
@@ -1092,15 +1066,14 @@ fn strip_symbols_with_external_utility<'a>(
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
- sess.struct_warn(&format!(
- "stripping debug info with `{}` failed: {}",
- util, prog.status
- ))
- .note(&escape_string(&output))
- .emit();
+ sess.emit_warning(errors::StrippingDebugInfoFailed {
+ util,
+ status: prog.status,
+ output: escape_string(&output),
+ });
}
}
- Err(e) => sess.fatal(&format!("unable to run `{}`: {}", util, e)),
+ Err(error) => sess.emit_fatal(errors::UnableToRun { util, error }),
}
}
@@ -1153,7 +1126,8 @@ fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) {
if path.exists() {
return session_tlib;
} else {
- let default_sysroot = filesearch::get_or_default_sysroot();
+ let default_sysroot =
+ filesearch::get_or_default_sysroot().expect("Failed finding sysroot");
let default_tlib = filesearch::make_target_lib_path(
&default_sysroot,
sess.opts.target_triple.triple(),
@@ -1201,7 +1175,7 @@ pub fn ignored_for_lto(sess: &Session, info: &CrateInfo, cnum: CrateNum) -> bool
&& (info.compiler_builtins == Some(cnum) || info.is_no_builtins.contains(&cnum))
}
-// This functions tries to determine the appropriate linker (and corresponding LinkerFlavor) to use
+/// This functions tries to determine the appropriate linker (and corresponding LinkerFlavor) to use
pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
fn infer_from(
sess: &Session,
@@ -1251,7 +1225,7 @@ pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
)),
(Some(linker), None) => {
let stem = linker.file_stem().and_then(|stem| stem.to_str()).unwrap_or_else(|| {
- sess.fatal("couldn't extract file stem from specified linker")
+ sess.emit_fatal(errors::LinkerFileStem);
});
let flavor = if stem == "emcc" {
@@ -1357,7 +1331,7 @@ fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLib]) {
NativeLibKind::Static { bundle: Some(false), .. }
| NativeLibKind::Dylib { .. }
| NativeLibKind::Unspecified => {
- let verbatim = lib.verbatim.unwrap_or(false);
+ let verbatim = lib.verbatim;
if sess.target.is_like_msvc {
Some(format!("{}{}", name, if verbatim { "" } else { ".lib" }))
} else if sess.target.linker_flavor.is_gnu() {
@@ -1378,13 +1352,9 @@ fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLib]) {
})
.collect();
if !lib_args.is_empty() {
- sess.note_without_error(
- "Link against the following native artifacts when linking \
- against this static library. The order and any duplication \
- can be significant on some platforms.",
- );
+ sess.emit_note(errors::StaticLibraryNativeArtifacts);
// Prefix for greppability
- sess.note_without_error(&format!("native-static-libs: {}", &lib_args.join(" ")));
+ sess.emit_note(errors::NativeStaticLibs { arguments: lib_args.join(" ") });
}
}
@@ -1615,6 +1585,9 @@ fn detect_self_contained_mingw(sess: &Session) -> bool {
/// We only provide such support for a very limited number of targets.
fn self_contained(sess: &Session, crate_type: CrateType) -> bool {
if let Some(self_contained) = sess.opts.cg.link_self_contained {
+ if sess.target.link_self_contained == LinkSelfContainedDefault::False {
+ sess.emit_err(errors::UnsupportedLinkSelfContained);
+ }
return self_contained;
}
@@ -1688,14 +1661,14 @@ fn add_link_script(cmd: &mut dyn Linker, sess: &Session, tmpdir: &Path, crate_ty
match (crate_type, &sess.target.link_script) {
(CrateType::Cdylib | CrateType::Executable, Some(script)) => {
if !sess.target.linker_flavor.is_gnu() {
- sess.fatal("can only use link script when linking with GNU-like linker");
+ sess.emit_fatal(errors::LinkScriptUnavailable);
}
let file_name = ["rustc", &sess.target.llvm_target, "linkfile.ld"].join("-");
let path = tmpdir.join(file_name);
- if let Err(e) = fs::write(&path, script.as_ref()) {
- sess.fatal(&format!("failed to write link script to {}: {}", path.display(), e));
+ if let Err(error) = fs::write(&path, script.as_ref()) {
+ sess.emit_fatal(errors::LinkScriptWriteFailure { path, error });
}
cmd.arg("--script");
@@ -1841,8 +1814,8 @@ fn add_linked_symbol_object(
let path = tmpdir.join("symbols.o");
let result = std::fs::write(&path, file.write().unwrap());
- if let Err(e) = result {
- sess.fatal(&format!("failed to write {}: {}", path.display(), e));
+ if let Err(error) = result {
+ sess.emit_fatal(errors::FailedToWrite { path, error });
}
cmd.add_object(&path);
}
@@ -2040,15 +2013,9 @@ fn linker_with_args<'a>(
cmd.add_as_needed();
// Local native libraries of all kinds.
- //
- // If `-Zlink-native-libraries=false` is set, then the assumption is that an
- // external build system already has the native dependencies defined, and it
- // will provide them to the linker itself.
- if sess.opts.unstable_opts.link_native_libraries {
- add_local_native_libraries(cmd, sess, codegen_results);
- }
+ add_local_native_libraries(cmd, sess, archive_builder_builder, codegen_results, tmpdir);
- // Upstream rust libraries and their (possibly bundled) static native libraries.
+ // Upstream rust crates and their non-dynamic native libraries.
add_upstream_rust_crates(
cmd,
sess,
@@ -2059,13 +2026,7 @@ fn linker_with_args<'a>(
);
// Dynamic native libraries from upstream crates.
- //
- // FIXME: Merge this to `add_upstream_rust_crates` so that all native libraries are linked
- // together with their respective upstream crates, and in their originally specified order.
- // This may be slightly breaking due to our use of `--as-needed` and needs a crater run.
- if sess.opts.unstable_opts.link_native_libraries {
- add_upstream_native_libraries(cmd, sess, codegen_results);
- }
+ add_upstream_native_libraries(cmd, sess, archive_builder_builder, codegen_results, tmpdir);
// Link with the import library generated for any raw-dylib functions.
for (raw_dylib_name, raw_dylib_imports) in
@@ -2299,56 +2260,56 @@ fn collect_natvis_visualizers(
visualizer_paths.push(visualizer_out_file);
}
Err(error) => {
- sess.warn(
- format!(
- "Unable to write debugger visualizer file `{}`: {} ",
- visualizer_out_file.display(),
- error
- )
- .as_str(),
- );
+ sess.emit_warning(errors::UnableToWriteDebuggerVisualizer {
+ path: visualizer_out_file,
+ error,
+ });
}
};
}
visualizer_paths
}
-/// # Native library linking
-///
-/// User-supplied library search paths (-L on the command line). These are the same paths used to
-/// find Rust crates, so some of them may have been added already by the previous crate linking
-/// code. This only allows them to be found at compile time so it is still entirely up to outside
-/// forces to make sure that library can be found at runtime.
-///
-/// Also note that the native libraries linked here are only the ones located in the current crate.
-/// Upstream crates with native library dependencies may have their native library pulled in above.
-fn add_local_native_libraries(
+fn add_native_libs_from_crate(
cmd: &mut dyn Linker,
sess: &Session,
+ archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: &CodegenResults,
+ tmpdir: &Path,
+ search_paths: &OnceCell<Vec<PathBuf>>,
+ bundled_libs: &FxHashSet<Symbol>,
+ cnum: CrateNum,
+ link_static: bool,
+ link_dynamic: bool,
) {
- let filesearch = sess.target_filesearch(PathKind::All);
- for search_path in filesearch.search_paths() {
- match search_path.kind {
- PathKind::Framework => {
- cmd.framework_path(&search_path.dir);
- }
- _ => {
- cmd.include_path(&fix_windows_verbatim_for_gcc(&search_path.dir));
- }
- }
+ if !sess.opts.unstable_opts.link_native_libraries {
+ // If `-Zlink-native-libraries=false` is set, then the assumption is that an
+ // external build system already has the native dependencies defined, and it
+ // will provide them to the linker itself.
+ return;
}
- let relevant_libs =
- codegen_results.crate_info.used_libraries.iter().filter(|l| relevant_lib(sess, l));
+ if link_static && cnum != LOCAL_CRATE && !bundled_libs.is_empty() {
+ // If rlib contains native libs as archives, unpack them to tmpdir.
+ let rlib = &codegen_results.crate_info.used_crate_source[&cnum].rlib.as_ref().unwrap().0;
+ archive_builder_builder
+ .extract_bundled_libs(rlib, tmpdir, &bundled_libs)
+ .unwrap_or_else(|e| sess.emit_fatal(e));
+ }
- let search_path = OnceCell::new();
- let mut last = (None, NativeLibKind::Unspecified, None);
- for lib in relevant_libs {
+ let native_libs = match cnum {
+ LOCAL_CRATE => &codegen_results.crate_info.used_libraries,
+ _ => &codegen_results.crate_info.native_libraries[&cnum],
+ };
+
+ let mut last = (None, NativeLibKind::Unspecified, false);
+ for lib in native_libs {
let Some(name) = lib.name else {
continue;
};
- let name = name.as_str();
+ if !relevant_lib(sess, lib) {
+ continue;
+ }
// Skip if this library is the same as the last.
last = if (lib.name, lib.kind, lib.verbatim) == last {
@@ -2357,46 +2318,110 @@ fn add_local_native_libraries(
(lib.name, lib.kind, lib.verbatim)
};
- let verbatim = lib.verbatim.unwrap_or(false);
+ let name = name.as_str();
+ let verbatim = lib.verbatim;
match lib.kind {
+ NativeLibKind::Static { bundle, whole_archive } => {
+ if link_static {
+ let bundle = bundle.unwrap_or(true);
+ let whole_archive = whole_archive == Some(true)
+ // Backward compatibility case: this can be a rlib (so `+whole-archive`
+ // cannot be added explicitly if necessary, see the error in `fn link_rlib`)
+ // compiled as an executable due to `--test`. Use whole-archive implicitly,
+ // like before the introduction of native lib modifiers.
+ || (whole_archive == None
+ && bundle
+ && cnum == LOCAL_CRATE
+ && sess.opts.test);
+
+ if bundle && cnum != LOCAL_CRATE {
+ if let Some(filename) = lib.filename {
+ // If rlib contains native libs as archives, they are unpacked to tmpdir.
+ let path = tmpdir.join(filename.as_str());
+ if whole_archive {
+ cmd.link_whole_rlib(&path);
+ } else {
+ cmd.link_rlib(&path);
+ }
+ }
+ } else {
+ if whole_archive {
+ cmd.link_whole_staticlib(
+ name,
+ verbatim,
+ &search_paths.get_or_init(|| archive_search_paths(sess)),
+ );
+ } else {
+ cmd.link_staticlib(name, verbatim)
+ }
+ }
+ }
+ }
NativeLibKind::Dylib { as_needed } => {
- cmd.link_dylib(name, verbatim, as_needed.unwrap_or(true))
+ if link_dynamic {
+ cmd.link_dylib(name, verbatim, as_needed.unwrap_or(true))
+ }
}
- NativeLibKind::Unspecified => cmd.link_dylib(name, verbatim, true),
- NativeLibKind::Framework { as_needed } => {
- cmd.link_framework(name, as_needed.unwrap_or(true))
+ NativeLibKind::Unspecified => {
+ if link_dynamic {
+ cmd.link_dylib(name, verbatim, true);
+ }
}
- NativeLibKind::Static { whole_archive, bundle, .. } => {
- if whole_archive == Some(true)
- // Backward compatibility case: this can be a rlib (so `+whole-archive` cannot
- // be added explicitly if necessary, see the error in `fn link_rlib`) compiled
- // as an executable due to `--test`. Use whole-archive implicitly, like before
- // the introduction of native lib modifiers.
- || (whole_archive == None && bundle != Some(false) && sess.opts.test)
- {
- cmd.link_whole_staticlib(
- name,
- verbatim,
- &search_path.get_or_init(|| archive_search_paths(sess)),
- );
- } else {
- cmd.link_staticlib(name, verbatim)
+ NativeLibKind::Framework { as_needed } => {
+ if link_dynamic {
+ cmd.link_framework(name, as_needed.unwrap_or(true))
}
}
NativeLibKind::RawDylib => {
- // Ignore RawDylib here, they are handled separately in linker_with_args().
+ // Handled separately in `linker_with_args`.
}
NativeLibKind::LinkArg => {
- cmd.arg(name);
+ if link_static {
+ cmd.arg(name);
+ }
}
}
}
}
-/// # Linking Rust crates and their non-bundled static libraries
-///
-/// Rust crates are not considered at all when creating an rlib output. All dependencies will be
-/// linked when producing the final output (instead of the intermediate rlib version).
+fn add_local_native_libraries(
+ cmd: &mut dyn Linker,
+ sess: &Session,
+ archive_builder_builder: &dyn ArchiveBuilderBuilder,
+ codegen_results: &CodegenResults,
+ tmpdir: &Path,
+) {
+ if sess.opts.unstable_opts.link_native_libraries {
+ // User-supplied library search paths (-L on the command line). These are the same paths
+ // used to find Rust crates, so some of them may have been added already by the previous
+ // crate linking code. This only allows them to be found at compile time so it is still
+ // entirely up to outside forces to make sure that library can be found at runtime.
+ for search_path in sess.target_filesearch(PathKind::All).search_paths() {
+ match search_path.kind {
+ PathKind::Framework => cmd.framework_path(&search_path.dir),
+ _ => cmd.include_path(&fix_windows_verbatim_for_gcc(&search_path.dir)),
+ }
+ }
+ }
+
+ let search_paths = OnceCell::new();
+ // All static and dynamic native library dependencies are linked to the local crate.
+ let link_static = true;
+ let link_dynamic = true;
+ add_native_libs_from_crate(
+ cmd,
+ sess,
+ archive_builder_builder,
+ codegen_results,
+ tmpdir,
+ &search_paths,
+ &Default::default(),
+ LOCAL_CRATE,
+ link_static,
+ link_dynamic,
+ );
+}
+
fn add_upstream_rust_crates<'a>(
cmd: &mut dyn Linker,
sess: &'a Session,
@@ -2412,7 +2437,6 @@ fn add_upstream_rust_crates<'a>(
// Linking to a rlib involves just passing it to the linker (the linker
// will slurp up the object files inside), and linking to a dynamic library
// involves just passing the right -l flag.
-
let (_, data) = codegen_results
.crate_info
.dependency_formats
@@ -2420,346 +2444,234 @@ fn add_upstream_rust_crates<'a>(
.find(|(ty, _)| *ty == crate_type)
.expect("failed to find crate type in dependency format list");
- // Invoke get_used_crates to ensure that we get a topological sorting of
- // crates.
- let deps = &codegen_results.crate_info.used_crates;
-
- let mut compiler_builtins = None;
- let search_path = OnceCell::new();
-
- for &cnum in deps.iter() {
- // We may not pass all crates through to the linker. Some crates may
- // appear statically in an existing dylib, meaning we'll pick up all the
- // symbols from the dylib.
- let src = &codegen_results.crate_info.used_crate_source[&cnum];
- match data[cnum.as_usize() - 1] {
- _ if codegen_results.crate_info.profiler_runtime == Some(cnum) => {
- add_static_crate(
- cmd,
- sess,
- archive_builder_builder,
- codegen_results,
- tmpdir,
- cnum,
- &Default::default(),
- );
- }
- // compiler-builtins are always placed last to ensure that they're
- // linked correctly.
- _ if codegen_results.crate_info.compiler_builtins == Some(cnum) => {
- assert!(compiler_builtins.is_none());
- compiler_builtins = Some(cnum);
- }
- Linkage::NotLinked | Linkage::IncludedFromDylib => {}
- Linkage::Static => {
- let bundled_libs = if sess.opts.unstable_opts.packed_bundled_libs {
- codegen_results.crate_info.native_libraries[&cnum]
+ let search_paths = OnceCell::new();
+ for &cnum in &codegen_results.crate_info.used_crates {
+ // We may not pass all crates through to the linker. Some crates may appear statically in
+ // an existing dylib, meaning we'll pick up all the symbols from the dylib.
+ // We must always link crates `compiler_builtins` and `profiler_builtins` statically.
+ // Even if they were already included into a dylib
+ // (e.g. `libstd` when `-C prefer-dynamic` is used).
+ // FIXME: `dependency_formats` can report `profiler_builtins` as `NotLinked` for some
+ // reason, it shouldn't do that because `profiler_builtins` should indeed be linked.
+ let linkage = data[cnum.as_usize() - 1];
+ let link_static_crate = linkage == Linkage::Static
+ || (linkage == Linkage::IncludedFromDylib || linkage == Linkage::NotLinked)
+ && (codegen_results.crate_info.compiler_builtins == Some(cnum)
+ || codegen_results.crate_info.profiler_runtime == Some(cnum));
+
+ let mut bundled_libs = Default::default();
+ match linkage {
+ Linkage::Static | Linkage::IncludedFromDylib | Linkage::NotLinked => {
+ if link_static_crate {
+ bundled_libs = codegen_results.crate_info.native_libraries[&cnum]
.iter()
.filter_map(|lib| lib.filename)
- .collect::<FxHashSet<_>>()
- } else {
- Default::default()
- };
- add_static_crate(
- cmd,
- sess,
- archive_builder_builder,
- codegen_results,
- tmpdir,
- cnum,
- &bundled_libs,
- );
-
- // Link static native libs with "-bundle" modifier only if the crate they originate from
- // is being linked statically to the current crate. If it's linked dynamically
- // or is an rlib already included via some other dylib crate, the symbols from
- // native libs will have already been included in that dylib.
- //
- // If `-Zlink-native-libraries=false` is set, then the assumption is that an
- // external build system already has the native dependencies defined, and it
- // will provide them to the linker itself.
- if sess.opts.unstable_opts.link_native_libraries {
- if sess.opts.unstable_opts.packed_bundled_libs {
- // If rlib contains native libs as archives, unpack them to tmpdir.
- let rlib = &src.rlib.as_ref().unwrap().0;
- archive_builder_builder
- .extract_bundled_libs(rlib, tmpdir, &bundled_libs)
- .unwrap_or_else(|e| sess.fatal(e));
- }
-
- let mut last = (None, NativeLibKind::Unspecified, None);
- for lib in &codegen_results.crate_info.native_libraries[&cnum] {
- let Some(name) = lib.name else {
- continue;
- };
- let name = name.as_str();
- if !relevant_lib(sess, lib) {
- continue;
- }
-
- // Skip if this library is the same as the last.
- last = if (lib.name, lib.kind, lib.verbatim) == last {
- continue;
- } else {
- (lib.name, lib.kind, lib.verbatim)
- };
-
- match lib.kind {
- NativeLibKind::Static {
- bundle: Some(false),
- whole_archive: Some(true),
- } => {
- cmd.link_whole_staticlib(
- name,
- lib.verbatim.unwrap_or(false),
- search_path.get_or_init(|| archive_search_paths(sess)),
- );
- }
- NativeLibKind::Static {
- bundle: Some(false),
- whole_archive: Some(false) | None,
- } => {
- // HACK/FIXME: Fixup a circular dependency between libgcc and libc
- // with glibc. This logic should be moved to the libc crate.
- if sess.target.os == "linux"
- && sess.target.env == "gnu"
- && name == "c"
- {
- cmd.link_staticlib("gcc", false);
- }
- cmd.link_staticlib(name, lib.verbatim.unwrap_or(false));
- }
- NativeLibKind::LinkArg => {
- cmd.arg(name);
- }
- NativeLibKind::Dylib { .. }
- | NativeLibKind::Framework { .. }
- | NativeLibKind::Unspecified
- | NativeLibKind::RawDylib => {}
- NativeLibKind::Static { bundle: Some(true) | None, whole_archive } => {
- if sess.opts.unstable_opts.packed_bundled_libs {
- // If rlib contains native libs as archives, they are unpacked to tmpdir.
- let path = tmpdir.join(lib.filename.unwrap().as_str());
- if whole_archive == Some(true) {
- cmd.link_whole_rlib(&path);
- } else {
- cmd.link_rlib(&path);
- }
- }
- }
- }
- }
+ .collect();
+ add_static_crate(
+ cmd,
+ sess,
+ archive_builder_builder,
+ codegen_results,
+ tmpdir,
+ cnum,
+ &bundled_libs,
+ );
}
}
- Linkage::Dynamic => add_dynamic_crate(cmd, sess, &src.dylib.as_ref().unwrap().0),
+ Linkage::Dynamic => {
+ let src = &codegen_results.crate_info.used_crate_source[&cnum];
+ add_dynamic_crate(cmd, sess, &src.dylib.as_ref().unwrap().0);
+ }
}
- }
- // compiler-builtins are always placed last to ensure that they're
- // linked correctly.
- // We must always link the `compiler_builtins` crate statically. Even if it
- // was already "included" in a dylib (e.g., `libstd` when `-C prefer-dynamic`
- // is used)
- if let Some(cnum) = compiler_builtins {
- add_static_crate(
+ // Static libraries are linked for a subset of linked upstream crates.
+ // 1. If the upstream crate is a directly linked rlib then we must link the native library
+ // because the rlib is just an archive.
+ // 2. If the upstream crate is a dylib or a rlib linked through dylib, then we do not link
+ // the native library because it is already linked into the dylib, and even if
+ // inline/const/generic functions from the dylib can refer to symbols from the native
+ // library, those symbols should be exported and available from the dylib anyway.
+ // 3. Libraries bundled into `(compiler,profiler)_builtins` are special, see above.
+ let link_static = link_static_crate;
+ // Dynamic libraries are not linked here, see the FIXME in `add_upstream_native_libraries`.
+ let link_dynamic = false;
+ add_native_libs_from_crate(
cmd,
sess,
archive_builder_builder,
codegen_results,
tmpdir,
+ &search_paths,
+ &bundled_libs,
cnum,
- &Default::default(),
+ link_static,
+ link_dynamic,
);
}
+}
- // Converts a library file-stem into a cc -l argument
- fn unlib<'a>(target: &Target, stem: &'a str) -> &'a str {
- if stem.starts_with("lib") && !target.is_like_windows { &stem[3..] } else { stem }
+fn add_upstream_native_libraries(
+ cmd: &mut dyn Linker,
+ sess: &Session,
+ archive_builder_builder: &dyn ArchiveBuilderBuilder,
+ codegen_results: &CodegenResults,
+ tmpdir: &Path,
+) {
+ let search_path = OnceCell::new();
+ for &cnum in &codegen_results.crate_info.used_crates {
+ // Static libraries are not linked here, they are linked in `add_upstream_rust_crates`.
+ // FIXME: Merge this function to `add_upstream_rust_crates` so that all native libraries
+ // are linked together with their respective upstream crates, and in their originally
+ // specified order. This is slightly breaking due to our use of `--as-needed` (see crater
+ // results in https://github.com/rust-lang/rust/pull/102832#issuecomment-1279772306).
+ let link_static = false;
+ // Dynamic libraries are linked for all linked upstream crates.
+ // 1. If the upstream crate is a directly linked rlib then we must link the native library
+ // because the rlib is just an archive.
+ // 2. If the upstream crate is a dylib or a rlib linked through dylib, then we have to link
+ // the native library too because inline/const/generic functions from the dylib can refer
+ // to symbols from the native library, so the native library providing those symbols should
+ // be available when linking our final binary.
+ let link_dynamic = true;
+ add_native_libs_from_crate(
+ cmd,
+ sess,
+ archive_builder_builder,
+ codegen_results,
+ tmpdir,
+ &search_path,
+ &Default::default(),
+ cnum,
+ link_static,
+ link_dynamic,
+ );
}
+}
- // Adds the static "rlib" versions of all crates to the command line.
- // There's a bit of magic which happens here specifically related to LTO,
- // namely that we remove upstream object files.
- //
- // When performing LTO, almost(*) all of the bytecode from the upstream
- // libraries has already been included in our object file output. As a
- // result we need to remove the object files in the upstream libraries so
- // the linker doesn't try to include them twice (or whine about duplicate
- // symbols). We must continue to include the rest of the rlib, however, as
- // it may contain static native libraries which must be linked in.
- //
- // (*) Crates marked with `#![no_builtins]` don't participate in LTO and
- // their bytecode wasn't included. The object files in those libraries must
- // still be passed to the linker.
- //
- // Note, however, that if we're not doing LTO we can just pass the rlib
- // blindly to the linker (fast) because it's fine if it's not actually
- // included as we're at the end of the dependency chain.
- fn add_static_crate<'a>(
- cmd: &mut dyn Linker,
- sess: &'a Session,
- archive_builder_builder: &dyn ArchiveBuilderBuilder,
- codegen_results: &CodegenResults,
- tmpdir: &Path,
- cnum: CrateNum,
- bundled_lib_file_names: &FxHashSet<Symbol>,
- ) {
- let src = &codegen_results.crate_info.used_crate_source[&cnum];
- let cratepath = &src.rlib.as_ref().unwrap().0;
-
- let mut link_upstream = |path: &Path| {
- cmd.link_rlib(&fix_windows_verbatim_for_gcc(path));
- };
-
- // See the comment above in `link_staticlib` and `link_rlib` for why if
- // there's a static library that's not relevant we skip all object
- // files.
- let native_libs = &codegen_results.crate_info.native_libraries[&cnum];
- let skip_native = native_libs.iter().any(|lib| {
- matches!(lib.kind, NativeLibKind::Static { bundle: None | Some(true), .. })
- && !relevant_lib(sess, lib)
- });
-
- if (!are_upstream_rust_objects_already_included(sess)
- || ignored_for_lto(sess, &codegen_results.crate_info, cnum))
- && !skip_native
- {
- link_upstream(cratepath);
- return;
- }
-
- let dst = tmpdir.join(cratepath.file_name().unwrap());
- let name = cratepath.file_name().unwrap().to_str().unwrap();
- let name = &name[3..name.len() - 5]; // chop off lib/.rlib
- let bundled_lib_file_names = bundled_lib_file_names.clone();
-
- sess.prof.generic_activity_with_arg("link_altering_rlib", name).run(|| {
- let canonical_name = name.replace('-', "_");
- let upstream_rust_objects_already_included =
- are_upstream_rust_objects_already_included(sess);
- let is_builtins = sess.target.no_builtins
- || !codegen_results.crate_info.is_no_builtins.contains(&cnum);
-
- let mut archive = archive_builder_builder.new_archive_builder(sess);
- if let Err(e) = archive.add_archive(
- cratepath,
- Box::new(move |f| {
- if f == METADATA_FILENAME {
- return true;
- }
+// Adds the static "rlib" versions of all crates to the command line.
+// There's a bit of magic which happens here specifically related to LTO,
+// namely that we remove upstream object files.
+//
+// When performing LTO, almost(*) all of the bytecode from the upstream
+// libraries has already been included in our object file output. As a
+// result we need to remove the object files in the upstream libraries so
+// the linker doesn't try to include them twice (or whine about duplicate
+// symbols). We must continue to include the rest of the rlib, however, as
+// it may contain static native libraries which must be linked in.
+//
+// (*) Crates marked with `#![no_builtins]` don't participate in LTO and
+// their bytecode wasn't included. The object files in those libraries must
+// still be passed to the linker.
+//
+// Note, however, that if we're not doing LTO we can just pass the rlib
+// blindly to the linker (fast) because it's fine if it's not actually
+// included as we're at the end of the dependency chain.
+fn add_static_crate<'a>(
+ cmd: &mut dyn Linker,
+ sess: &'a Session,
+ archive_builder_builder: &dyn ArchiveBuilderBuilder,
+ codegen_results: &CodegenResults,
+ tmpdir: &Path,
+ cnum: CrateNum,
+ bundled_lib_file_names: &FxHashSet<Symbol>,
+) {
+ let src = &codegen_results.crate_info.used_crate_source[&cnum];
+ let cratepath = &src.rlib.as_ref().unwrap().0;
- let canonical = f.replace('-', "_");
-
- let is_rust_object =
- canonical.starts_with(&canonical_name) && looks_like_rust_object_file(&f);
-
- // If we've been requested to skip all native object files
- // (those not generated by the rust compiler) then we can skip
- // this file. See above for why we may want to do this.
- let skip_because_cfg_say_so = skip_native && !is_rust_object;
-
- // If we're performing LTO and this is a rust-generated object
- // file, then we don't need the object file as it's part of the
- // LTO module. Note that `#![no_builtins]` is excluded from LTO,
- // though, so we let that object file slide.
- let skip_because_lto =
- upstream_rust_objects_already_included && is_rust_object && is_builtins;
-
- // We skip native libraries because:
- // 1. This native libraries won't be used from the generated rlib,
- // so we can throw them away to avoid the copying work.
- // 2. We can't allow it to be a single remaining entry in archive
- // as some linkers may complain on that.
- if bundled_lib_file_names.contains(&Symbol::intern(f)) {
- return true;
- }
+ let mut link_upstream = |path: &Path| {
+ cmd.link_rlib(&fix_windows_verbatim_for_gcc(path));
+ };
- if skip_because_cfg_say_so || skip_because_lto {
- return true;
- }
+ // See the comment above in `link_staticlib` and `link_rlib` for why if
+ // there's a static library that's not relevant we skip all object
+ // files.
+ let native_libs = &codegen_results.crate_info.native_libraries[&cnum];
+ let skip_native = native_libs.iter().any(|lib| {
+ matches!(lib.kind, NativeLibKind::Static { bundle: None | Some(true), .. })
+ && !relevant_lib(sess, lib)
+ });
- false
- }),
- ) {
- sess.fatal(&format!("failed to build archive from rlib: {}", e));
- }
- if archive.build(&dst) {
- link_upstream(&dst);
- }
- });
+ if (!are_upstream_rust_objects_already_included(sess)
+ || ignored_for_lto(sess, &codegen_results.crate_info, cnum))
+ && !skip_native
+ {
+ link_upstream(cratepath);
+ return;
}
- // Same thing as above, but for dynamic crates instead of static crates.
- fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) {
- // Just need to tell the linker about where the library lives and
- // what its name is
- let parent = cratepath.parent();
- if let Some(dir) = parent {
- cmd.include_path(&fix_windows_verbatim_for_gcc(dir));
- }
- let filestem = cratepath.file_stem().unwrap().to_str().unwrap();
- cmd.link_rust_dylib(
- &unlib(&sess.target, filestem),
- parent.unwrap_or_else(|| Path::new("")),
- );
- }
-}
+ let dst = tmpdir.join(cratepath.file_name().unwrap());
+ let name = cratepath.file_name().unwrap().to_str().unwrap();
+ let name = &name[3..name.len() - 5]; // chop off lib/.rlib
+ let bundled_lib_file_names = bundled_lib_file_names.clone();
-/// Link in all of our upstream crates' native dependencies. Remember that all of these upstream
-/// native dependencies are all non-static dependencies. We've got two cases then:
-///
-/// 1. The upstream crate is an rlib. In this case we *must* link in the native dependency because
-/// the rlib is just an archive.
-///
-/// 2. The upstream crate is a dylib. In order to use the dylib, we have to have the dependency
-/// present on the system somewhere. Thus, we don't gain a whole lot from not linking in the
-/// dynamic dependency to this crate as well.
-///
-/// The use case for this is a little subtle. In theory the native dependencies of a crate are
-/// purely an implementation detail of the crate itself, but the problem arises with generic and
-/// inlined functions. If a generic function calls a native function, then the generic function
-/// must be instantiated in the target crate, meaning that the native symbol must also be resolved
-/// in the target crate.
-fn add_upstream_native_libraries(
- cmd: &mut dyn Linker,
- sess: &Session,
- codegen_results: &CodegenResults,
-) {
- let mut last = (None, NativeLibKind::Unspecified, None);
- for &cnum in &codegen_results.crate_info.used_crates {
- for lib in codegen_results.crate_info.native_libraries[&cnum].iter() {
- let Some(name) = lib.name else {
- continue;
- };
- let name = name.as_str();
- if !relevant_lib(sess, &lib) {
- continue;
- }
+ sess.prof.generic_activity_with_arg("link_altering_rlib", name).run(|| {
+ let canonical_name = name.replace('-', "_");
+ let upstream_rust_objects_already_included =
+ are_upstream_rust_objects_already_included(sess);
+ let is_builtins =
+ sess.target.no_builtins || !codegen_results.crate_info.is_no_builtins.contains(&cnum);
- // Skip if this library is the same as the last.
- last = if (lib.name, lib.kind, lib.verbatim) == last {
- continue;
- } else {
- (lib.name, lib.kind, lib.verbatim)
- };
+ let mut archive = archive_builder_builder.new_archive_builder(sess);
+ if let Err(e) = archive.add_archive(
+ cratepath,
+ Box::new(move |f| {
+ if f == METADATA_FILENAME {
+ return true;
+ }
- let verbatim = lib.verbatim.unwrap_or(false);
- match lib.kind {
- NativeLibKind::Dylib { as_needed } => {
- cmd.link_dylib(name, verbatim, as_needed.unwrap_or(true))
+ let canonical = f.replace('-', "_");
+
+ let is_rust_object =
+ canonical.starts_with(&canonical_name) && looks_like_rust_object_file(&f);
+
+ // If we've been requested to skip all native object files
+ // (those not generated by the rust compiler) then we can skip
+ // this file. See above for why we may want to do this.
+ let skip_because_cfg_say_so = skip_native && !is_rust_object;
+
+ // If we're performing LTO and this is a rust-generated object
+ // file, then we don't need the object file as it's part of the
+ // LTO module. Note that `#![no_builtins]` is excluded from LTO,
+ // though, so we let that object file slide.
+ let skip_because_lto =
+ upstream_rust_objects_already_included && is_rust_object && is_builtins;
+
+ // We skip native libraries because:
+ // 1. This native libraries won't be used from the generated rlib,
+ // so we can throw them away to avoid the copying work.
+ // 2. We can't allow it to be a single remaining entry in archive
+ // as some linkers may complain on that.
+ if bundled_lib_file_names.contains(&Symbol::intern(f)) {
+ return true;
}
- NativeLibKind::Unspecified => cmd.link_dylib(name, verbatim, true),
- NativeLibKind::Framework { as_needed } => {
- cmd.link_framework(name, as_needed.unwrap_or(true))
+
+ if skip_because_cfg_say_so || skip_because_lto {
+ return true;
}
- // ignore static native libraries here as we've
- // already included them in add_local_native_libraries and
- // add_upstream_rust_crates
- NativeLibKind::Static { .. } => {}
- NativeLibKind::RawDylib | NativeLibKind::LinkArg => {}
- }
+
+ false
+ }),
+ ) {
+ sess.fatal(&format!("failed to build archive from rlib: {}", e));
}
- }
+ if archive.build(&dst) {
+ link_upstream(&dst);
+ }
+ });
+}
+
+// Same thing as above, but for dynamic crates instead of static crates.
+fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) {
+ // Just need to tell the linker about where the library lives and
+ // what its name is
+ let parent = cratepath.parent();
+ if let Some(dir) = parent {
+ cmd.include_path(&fix_windows_verbatim_for_gcc(dir));
+ }
+ let stem = cratepath.file_stem().unwrap().to_str().unwrap();
+ // Convert library file-stem into a cc -l argument.
+ let prefix = if stem.starts_with("lib") && !sess.target.is_like_windows { 3 } else { 0 };
+ cmd.link_rust_dylib(&stem[prefix..], parent.unwrap_or_else(|| Path::new("")));
}
fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool {
@@ -2813,14 +2725,14 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
("arm", "watchos") => "watchos",
(_, "macos") => "macosx",
_ => {
- sess.err(&format!("unsupported arch `{}` for os `{}`", arch, os));
+ sess.emit_err(errors::UnsupportedArch { arch, os });
return;
}
};
let sdk_root = match get_apple_sdk_root(sdk_name) {
Ok(s) => s,
Err(e) => {
- sess.err(&e);
+ sess.emit_err(e);
return;
}
};
@@ -2836,7 +2748,7 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
}
}
-fn get_apple_sdk_root(sdk_name: &str) -> Result<String, String> {
+fn get_apple_sdk_root(sdk_name: &str) -> Result<String, errors::AppleSdkRootError<'_>> {
// Following what clang does
// (https://github.com/llvm/llvm-project/blob/
// 296a80102a9b72c3eda80558fb78a3ed8849b341/clang/lib/Driver/ToolChains/Darwin.cpp#L1661-L1678)
@@ -2886,7 +2798,7 @@ fn get_apple_sdk_root(sdk_name: &str) -> Result<String, String> {
match res {
Ok(output) => Ok(output.trim().to_string()),
- Err(e) => Err(format!("failed to get {} SDK path: {}", sdk_name, e)),
+ Err(error) => Err(errors::AppleSdkRootError::SdkPath { sdk_name, error }),
}
}
@@ -2919,7 +2831,7 @@ fn add_gcc_ld_path(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
}
}
} else {
- sess.fatal("option `-Z gcc-ld` is used even though linker flavor is not gcc");
+ sess.emit_fatal(errors::OptionGccOnly);
}
}
}
diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs
index c49b19bdf..f087d903e 100644
--- a/compiler/rustc_codegen_ssa/src/back/linker.rs
+++ b/compiler/rustc_codegen_ssa/src/back/linker.rs
@@ -34,9 +34,9 @@ pub fn disable_localization(linker: &mut Command) {
linker.env("VSLANG", "1033");
}
-// The third parameter is for env vars, used on windows to set up the
-// path for MSVC to find its DLLs, and gcc to find its bundled
-// toolchain
+/// The third parameter is for env vars, used on windows to set up the
+/// path for MSVC to find its DLLs, and gcc to find its bundled
+/// toolchain
pub fn get_linker<'a>(
sess: &'a Session,
linker: &Path,
@@ -515,7 +515,7 @@ impl<'a> Linker for GccLinker<'a> {
// -force_load is the macOS equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
self.linker_arg("-force_load");
- let lib = find_native_static_library(lib, Some(verbatim), search_path, &self.sess);
+ let lib = find_native_static_library(lib, verbatim, search_path, &self.sess);
self.linker_arg(&lib);
}
}
@@ -1260,11 +1260,11 @@ impl<'a> Linker for WasmLd<'a> {
}
fn link_whole_staticlib(&mut self, lib: &str, _verbatim: bool, _search_path: &[PathBuf]) {
- self.cmd.arg("-l").arg(lib);
+ self.cmd.arg("--whole-archive").arg("-l").arg(lib).arg("--no-whole-archive");
}
fn link_whole_rlib(&mut self, lib: &Path) {
- self.cmd.arg(lib);
+ self.cmd.arg("--whole-archive").arg(lib).arg("--no-whole-archive");
}
fn gc_sections(&mut self, _keep_metadata: bool) {
diff --git a/compiler/rustc_codegen_ssa/src/back/metadata.rs b/compiler/rustc_codegen_ssa/src/back/metadata.rs
index 99ddd1764..51c5c375d 100644
--- a/compiler/rustc_codegen_ssa/src/back/metadata.rs
+++ b/compiler/rustc_codegen_ssa/src/back/metadata.rs
@@ -60,7 +60,7 @@ impl MetadataLoader for DefaultMetadataLoader {
let data = entry
.data(data)
.map_err(|e| format!("failed to parse rlib '{}': {}", path.display(), e))?;
- return search_for_metadata(path, data, ".rmeta");
+ return search_for_section(path, data, ".rmeta");
}
}
@@ -69,11 +69,11 @@ impl MetadataLoader for DefaultMetadataLoader {
}
fn get_dylib_metadata(&self, _target: &Target, path: &Path) -> Result<MetadataRef, String> {
- load_metadata_with(path, |data| search_for_metadata(path, data, ".rustc"))
+ load_metadata_with(path, |data| search_for_section(path, data, ".rustc"))
}
}
-fn search_for_metadata<'a>(
+pub(super) fn search_for_section<'a>(
path: &Path,
bytes: &'a [u8],
section: &str,
@@ -191,39 +191,43 @@ pub enum MetadataPosition {
Last,
}
-// For rlibs we "pack" rustc metadata into a dummy object file.
-//
-// Historically it was needed because rustc linked rlibs as whole-archive in some cases.
-// In that case linkers try to include all files located in an archive, so if metadata is stored
-// in an archive then it needs to be of a form that the linker is able to process.
-// Now it's not clear whether metadata still needs to be wrapped into an object file or not.
-//
-// Note, though, that we don't actually want this metadata to show up in any
-// final output of the compiler. Instead this is purely for rustc's own
-// metadata tracking purposes.
-//
-// With the above in mind, each "flavor" of object format gets special
-// handling here depending on the target:
-//
-// * MachO - macos-like targets will insert the metadata into a section that
-// is sort of fake dwarf debug info. Inspecting the source of the macos
-// linker this causes these sections to be skipped automatically because
-// it's not in an allowlist of otherwise well known dwarf section names to
-// go into the final artifact.
-//
-// * WebAssembly - we actually don't have any container format for this
-// target. WebAssembly doesn't support the `dylib` crate type anyway so
-// there's no need for us to support this at this time. Consequently the
-// metadata bytes are simply stored as-is into an rlib.
-//
-// * COFF - Windows-like targets create an object with a section that has
-// the `IMAGE_SCN_LNK_REMOVE` flag set which ensures that if the linker
-// ever sees the section it doesn't process it and it's removed.
-//
-// * ELF - All other targets are similar to Windows in that there's a
-// `SHF_EXCLUDE` flag we can set on sections in an object file to get
-// automatically removed from the final output.
-pub fn create_rmeta_file(sess: &Session, metadata: &[u8]) -> (Vec<u8>, MetadataPosition) {
+/// For rlibs we "pack" rustc metadata into a dummy object file.
+///
+/// Historically it was needed because rustc linked rlibs as whole-archive in some cases.
+/// In that case linkers try to include all files located in an archive, so if metadata is stored
+/// in an archive then it needs to be of a form that the linker is able to process.
+/// Now it's not clear whether metadata still needs to be wrapped into an object file or not.
+///
+/// Note, though, that we don't actually want this metadata to show up in any
+/// final output of the compiler. Instead this is purely for rustc's own
+/// metadata tracking purposes.
+///
+/// With the above in mind, each "flavor" of object format gets special
+/// handling here depending on the target:
+///
+/// * MachO - macos-like targets will insert the metadata into a section that
+/// is sort of fake dwarf debug info. Inspecting the source of the macos
+/// linker this causes these sections to be skipped automatically because
+/// it's not in an allowlist of otherwise well known dwarf section names to
+/// go into the final artifact.
+///
+/// * WebAssembly - we actually don't have any container format for this
+/// target. WebAssembly doesn't support the `dylib` crate type anyway so
+/// there's no need for us to support this at this time. Consequently the
+/// metadata bytes are simply stored as-is into an rlib.
+///
+/// * COFF - Windows-like targets create an object with a section that has
+/// the `IMAGE_SCN_LNK_REMOVE` flag set which ensures that if the linker
+/// ever sees the section it doesn't process it and it's removed.
+///
+/// * ELF - All other targets are similar to Windows in that there's a
+/// `SHF_EXCLUDE` flag we can set on sections in an object file to get
+/// automatically removed from the final output.
+pub fn create_wrapper_file(
+ sess: &Session,
+ section_name: Vec<u8>,
+ data: &[u8],
+) -> (Vec<u8>, MetadataPosition) {
let Some(mut file) = create_object_file(sess) else {
// This is used to handle all "other" targets. This includes targets
// in two categories:
@@ -241,11 +245,11 @@ pub fn create_rmeta_file(sess: &Session, metadata: &[u8]) -> (Vec<u8>, MetadataP
// WebAssembly and for targets not supported by the `object` crate
// yet it means that work will need to be done in the `object` crate
// to add a case above.
- return (metadata.to_vec(), MetadataPosition::Last);
+ return (data.to_vec(), MetadataPosition::Last);
};
let section = file.add_section(
file.segment_name(StandardSegment::Debug).to_vec(),
- b".rmeta".to_vec(),
+ section_name,
SectionKind::Debug,
);
match file.format() {
@@ -259,7 +263,7 @@ pub fn create_rmeta_file(sess: &Session, metadata: &[u8]) -> (Vec<u8>, MetadataP
}
_ => {}
};
- file.append_section_data(section, metadata, 1);
+ file.append_section_data(section, data, 1);
(file.write().unwrap(), MetadataPosition::First)
}
diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs
index c2ecc4160..22f534d90 100644
--- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs
+++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs
@@ -180,7 +180,8 @@ fn exported_symbols_provider_local<'tcx>(
.collect();
if tcx.entry_fn(()).is_some() {
- let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, "main"));
+ let exported_symbol =
+ ExportedSymbol::NoDefId(SymbolName::new(tcx, tcx.sess.target.entry_name.as_ref()));
symbols.push((
exported_symbol,
@@ -193,8 +194,11 @@ fn exported_symbols_provider_local<'tcx>(
}
if tcx.allocator_kind(()).is_some() {
- for method in ALLOCATOR_METHODS {
- let symbol_name = format!("__rust_{}", method.name);
+ for symbol_name in ALLOCATOR_METHODS
+ .iter()
+ .map(|method| format!("__rust_{}", method.name))
+ .chain(["__rust_alloc_error_handler".to_string(), OomStrategy::SYMBOL.to_string()])
+ {
let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, &symbol_name));
symbols.push((
diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs
index d0ac016b0..12fca6496 100644
--- a/compiler/rustc_codegen_ssa/src/back/write.rs
+++ b/compiler/rustc_codegen_ssa/src/back/write.rs
@@ -15,10 +15,8 @@ use rustc_data_structures::profiling::TimingGuard;
use rustc_data_structures::profiling::VerboseTimingGuard;
use rustc_data_structures::sync::Lrc;
use rustc_errors::emitter::Emitter;
-use rustc_errors::{
- translation::{to_fluent_args, Translate},
- DiagnosticId, FatalError, Handler, Level,
-};
+use rustc_errors::{translation::Translate, DiagnosticId, FatalError, Handler, Level};
+use rustc_errors::{DiagnosticMessage, Style};
use rustc_fs_util::link_or_copy;
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_incremental::{
@@ -38,6 +36,7 @@ use rustc_span::{BytePos, FileName, InnerSpan, Pos, Span};
use rustc_target::spec::{MergeFunctions, SanitizerSet};
use std::any::Any;
+use std::borrow::Cow;
use std::fs;
use std::io;
use std::marker::PhantomData;
@@ -341,20 +340,20 @@ pub struct CodegenContext<B: WriteBackendMethods> {
pub split_debuginfo: rustc_target::spec::SplitDebuginfo,
pub split_dwarf_kind: rustc_session::config::SplitDwarfKind,
- // Number of cgus excluding the allocator/metadata modules
+ /// Number of cgus excluding the allocator/metadata modules
pub total_cgus: usize,
- // Handler to use for diagnostics produced during codegen.
+ /// Handler to use for diagnostics produced during codegen.
pub diag_emitter: SharedEmitter,
- // LLVM optimizations for which we want to print remarks.
+ /// LLVM optimizations for which we want to print remarks.
pub remark: Passes,
- // Worker thread number
+ /// Worker thread number
pub worker: usize,
- // The incremental compilation session directory, or None if we are not
- // compiling incrementally
+ /// The incremental compilation session directory, or None if we are not
+ /// compiling incrementally
pub incr_comp_session_dir: Option<PathBuf>,
- // Used to update CGU re-use information during the thinlto phase.
+ /// Used to update CGU re-use information during the thinlto phase.
pub cgu_reuse_tracker: CguReuseTracker,
- // Channel back to the main control thread to send messages to
+ /// Channel back to the main control thread to send messages to
pub coordinator_send: Sender<Box<dyn Any + Send>>,
}
@@ -757,7 +756,7 @@ fn execute_work_item<B: ExtraBackendMethods>(
}
}
-// Actual LTO type we end up choosing based on multiple factors.
+/// Actual LTO type we end up choosing based on multiple factors.
pub enum ComputedLtoType {
No,
Thin,
@@ -969,8 +968,11 @@ pub enum Message<B: WriteBackendMethods> {
CodegenAborted,
}
+type DiagnosticArgName<'source> = Cow<'source, str>;
+
struct Diagnostic {
- msg: String,
+ msg: Vec<(DiagnosticMessage, Style)>,
+ args: FxHashMap<DiagnosticArgName<'static>, rustc_errors::DiagnosticArgValue<'static>>,
code: Option<DiagnosticId>,
lvl: Level,
}
@@ -1743,15 +1745,18 @@ impl Translate for SharedEmitter {
impl Emitter for SharedEmitter {
fn emit_diagnostic(&mut self, diag: &rustc_errors::Diagnostic) {
- let fluent_args = to_fluent_args(diag.args());
+ let args: FxHashMap<Cow<'_, str>, rustc_errors::DiagnosticArgValue<'_>> =
+ diag.args().map(|(name, arg)| (name.clone(), arg.clone())).collect();
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
- msg: self.translate_messages(&diag.message, &fluent_args).to_string(),
+ msg: diag.message.clone(),
+ args: args.clone(),
code: diag.code.clone(),
lvl: diag.level(),
})));
for child in &diag.children {
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
- msg: self.translate_messages(&child.message, &fluent_args).to_string(),
+ msg: child.message.clone(),
+ args: args.clone(),
code: None,
lvl: child.level,
})));
@@ -1782,10 +1787,11 @@ impl SharedEmitterMain {
match message {
Ok(SharedEmitterMessage::Diagnostic(diag)) => {
let handler = sess.diagnostic();
- let mut d = rustc_errors::Diagnostic::new(diag.lvl, &diag.msg);
+ let mut d = rustc_errors::Diagnostic::new_with_messages(diag.lvl, diag.msg);
if let Some(code) = diag.code {
d.code(code);
}
+ d.replace_args(diag.args);
handler.emit_diagnostic(&mut d);
}
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg, level, source)) => {
diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs
index 84b89cd71..4f396e970 100644
--- a/compiler/rustc_codegen_ssa/src/base.rs
+++ b/compiler/rustc_codegen_ssa/src/base.rs
@@ -22,7 +22,6 @@ use rustc_data_structures::sync::ParallelIterator;
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::lang_items::LangItem;
-use rustc_hir::weak_lang_items::WEAK_ITEMS_SYMBOLS;
use rustc_index::vec::Idx;
use rustc_metadata::EncodedMetadata;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
@@ -639,7 +638,14 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
let llmod_id =
cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string();
let module_llvm = tcx.sess.time("write_allocator_module", || {
- backend.codegen_allocator(tcx, &llmod_id, kind, tcx.lang_items().oom().is_some())
+ backend.codegen_allocator(
+ tcx,
+ &llmod_id,
+ kind,
+ // If allocator_kind is Some then alloc_error_handler_kind must
+ // also be Some.
+ tcx.alloc_error_handler_kind(()).unwrap(),
+ )
});
Some(ModuleCodegen { name: llmod_id, module_llvm, kind: ModuleKind::Allocator })
@@ -827,20 +833,30 @@ impl CrateInfo {
//
// In order to get this left-to-right dependency ordering, we use the reverse
// postorder of all crates putting the leaves at the right-most positions.
- let used_crates = tcx
+ let mut compiler_builtins = None;
+ let mut used_crates: Vec<_> = tcx
.postorder_cnums(())
.iter()
.rev()
.copied()
- .filter(|&cnum| !tcx.dep_kind(cnum).macros_only())
+ .filter(|&cnum| {
+ let link = !tcx.dep_kind(cnum).macros_only();
+ if link && tcx.is_compiler_builtins(cnum) {
+ compiler_builtins = Some(cnum);
+ return false;
+ }
+ link
+ })
.collect();
+ // `compiler_builtins` are always placed last to ensure that they're linked correctly.
+ used_crates.extend(compiler_builtins);
let mut info = CrateInfo {
target_cpu,
exported_symbols,
linked_symbols,
local_crate_name,
- compiler_builtins: None,
+ compiler_builtins,
profiler_runtime: None,
is_no_builtins: Default::default(),
native_libraries: Default::default(),
@@ -866,9 +882,6 @@ impl CrateInfo {
let used_crate_source = tcx.used_crate_source(cnum);
info.used_crate_source.insert(cnum, used_crate_source.clone());
- if tcx.is_compiler_builtins(cnum) {
- info.compiler_builtins = Some(cnum);
- }
if tcx.is_profiler_runtime(cnum) {
info.profiler_runtime = Some(cnum);
}
@@ -887,14 +900,14 @@ impl CrateInfo {
// by the compiler, but that's ok because all this stuff is unstable anyway.
let target = &tcx.sess.target;
if !are_upstream_rust_objects_already_included(tcx.sess) {
- let missing_weak_lang_items: FxHashSet<&Symbol> = info
+ let missing_weak_lang_items: FxHashSet<Symbol> = info
.used_crates
.iter()
- .flat_map(|cnum| {
- tcx.missing_lang_items(*cnum)
- .iter()
- .filter(|l| lang_items::required(tcx, **l))
- .filter_map(|item| WEAK_ITEMS_SYMBOLS.get(item))
+ .flat_map(|&cnum| tcx.missing_lang_items(cnum))
+ .filter(|l| l.is_weak())
+ .filter_map(|&l| {
+ let name = l.link_name()?;
+ lang_items::required(tcx, l).then_some(name)
})
.collect();
let prefix = if target.is_like_windows && target.arch == "x86" { "_" } else { "" };
diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs
index e05646e1e..b004fbf85 100644
--- a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs
+++ b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs
@@ -1,4 +1,4 @@
-// Type Names for Debug Info.
+//! Type Names for Debug Info.
// Notes on targeting MSVC:
// In general, MSVC's debugger attempts to parse all arguments as C++ expressions,
@@ -26,10 +26,10 @@ use std::fmt::Write;
use crate::debuginfo::wants_c_like_enum_debuginfo;
-// Compute the name of the type as it should be stored in debuginfo. Does not do
-// any caching, i.e., calling the function twice with the same type will also do
-// the work twice. The `qualified` parameter only affects the first level of the
-// type name, further levels (i.e., type parameters) are always fully qualified.
+/// Compute the name of the type as it should be stored in debuginfo. Does not do
+/// any caching, i.e., calling the function twice with the same type will also do
+/// the work twice. The `qualified` parameter only affects the first level of the
+/// type name, further levels (i.e., type parameters) are always fully qualified.
pub fn compute_debuginfo_type_name<'tcx>(
tcx: TyCtxt<'tcx>,
t: Ty<'tcx>,
@@ -59,7 +59,13 @@ fn push_debuginfo_type_name<'tcx>(
match *t.kind() {
ty::Bool => output.push_str("bool"),
ty::Char => output.push_str("char"),
- ty::Str => output.push_str("str"),
+ ty::Str => {
+ if cpp_like_debuginfo {
+ output.push_str("str$")
+ } else {
+ output.push_str("str")
+ }
+ }
ty::Never => {
if cpp_like_debuginfo {
output.push_str("never$");
@@ -152,25 +158,19 @@ fn push_debuginfo_type_name<'tcx>(
}
}
ty::Ref(_, inner_type, mutbl) => {
- // Slices and `&str` are treated like C++ pointers when computing debug
- // info for MSVC debugger. However, wrapping these types' names in a synthetic type
- // causes the .natvis engine for WinDbg to fail to display their data, so we opt these
- // types out to aid debugging in MSVC.
- let is_slice_or_str = matches!(*inner_type.kind(), ty::Slice(_) | ty::Str);
-
- if !cpp_like_debuginfo {
- output.push('&');
- output.push_str(mutbl.prefix_str());
- } else if !is_slice_or_str {
+ if cpp_like_debuginfo {
match mutbl {
Mutability::Not => output.push_str("ref$<"),
Mutability::Mut => output.push_str("ref_mut$<"),
}
+ } else {
+ output.push('&');
+ output.push_str(mutbl.prefix_str());
}
push_debuginfo_type_name(tcx, inner_type, qualified, output, visited);
- if cpp_like_debuginfo && !is_slice_or_str {
+ if cpp_like_debuginfo {
push_close_angle_bracket(cpp_like_debuginfo, output);
}
}
@@ -195,7 +195,7 @@ fn push_debuginfo_type_name<'tcx>(
}
ty::Slice(inner_type) => {
if cpp_like_debuginfo {
- output.push_str("slice$<");
+ output.push_str("slice2$<");
} else {
output.push('[');
}
diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs
index ebb531f1c..e3b6fbf1b 100644
--- a/compiler/rustc_codegen_ssa/src/errors.rs
+++ b/compiler/rustc_codegen_ssa/src/errors.rs
@@ -354,3 +354,197 @@ impl IntoDiagnostic<'_> for LinkingFailed<'_> {
diag
}
}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_link_exe_unexpected_error)]
+pub struct LinkExeUnexpectedError;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_repair_vs_build_tools)]
+pub struct RepairVSBuildTools;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_missing_cpp_build_tool_component)]
+pub struct MissingCppBuildToolComponent;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_select_cpp_build_tool_workload)]
+pub struct SelectCppBuildToolWorkload;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_visual_studio_not_installed)]
+pub struct VisualStudioNotInstalled;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_linker_not_found)]
+#[note]
+pub struct LinkerNotFound {
+ pub linker_path: PathBuf,
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unable_to_exe_linker)]
+#[note]
+#[note(command_note)]
+pub struct UnableToExeLinker {
+ pub linker_path: PathBuf,
+ pub error: Error,
+ pub command_formatted: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_msvc_missing_linker)]
+pub struct MsvcMissingLinker;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_check_installed_visual_studio)]
+pub struct CheckInstalledVisualStudio;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unsufficient_vs_code_product)]
+pub struct UnsufficientVSCodeProduct;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_processing_dymutil_failed)]
+#[note]
+pub struct ProcessingDymutilFailed {
+ pub status: ExitStatus,
+ pub output: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unable_to_run_dsymutil)]
+#[note]
+pub struct UnableToRunDsymutil {
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_stripping_debu_info_failed)]
+#[note]
+pub struct StrippingDebugInfoFailed<'a> {
+ pub util: &'a str,
+ pub status: ExitStatus,
+ pub output: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unable_to_run)]
+pub struct UnableToRun<'a> {
+ pub util: &'a str,
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_linker_file_stem)]
+pub struct LinkerFileStem;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_static_library_native_artifacts)]
+pub struct StaticLibraryNativeArtifacts;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_native_static_libs)]
+pub struct NativeStaticLibs {
+ pub arguments: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_link_script_unavailable)]
+pub struct LinkScriptUnavailable;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_link_script_write_failure)]
+pub struct LinkScriptWriteFailure {
+ pub path: PathBuf,
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_failed_to_write)]
+pub struct FailedToWrite {
+ pub path: PathBuf,
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unable_to_write_debugger_visualizer)]
+pub struct UnableToWriteDebuggerVisualizer {
+ pub path: PathBuf,
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_rlib_archive_build_failure)]
+pub struct RlibArchiveBuildFailure {
+ pub error: Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_option_gcc_only)]
+pub struct OptionGccOnly;
+
+#[derive(Diagnostic)]
+pub enum ExtractBundledLibsError<'a> {
+ #[diag(codegen_ssa_extract_bundled_libs_open_file)]
+ OpenFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_mmap_file)]
+ MmapFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_parse_archive)]
+ ParseArchive { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_read_entry)]
+ ReadEntry { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_archive_member)]
+ ArchiveMember { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_convert_name)]
+ ConvertName { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_write_file)]
+ WriteFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
+
+ #[diag(codegen_ssa_extract_bundled_libs_write_file)]
+ ExtractSection { rlib: &'a Path, error: Box<dyn std::error::Error> },
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unsupported_arch)]
+pub struct UnsupportedArch<'a> {
+ pub arch: &'a str,
+ pub os: &'a str,
+}
+
+#[derive(Diagnostic)]
+pub enum AppleSdkRootError<'a> {
+ #[diag(codegen_ssa_apple_sdk_error_sdk_path)]
+ SdkPath { sdk_name: &'a str, error: Error },
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_read_file)]
+pub struct ReadFileError {
+ pub message: std::io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unsupported_link_self_contained)]
+pub struct UnsupportedLinkSelfContained;
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_archive_build_failure)]
+// Public for rustc_codegen_llvm::back::archive
+pub struct ArchiveBuildFailure {
+ pub error: std::io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(codegen_ssa_unknown_archive_kind)]
+// Public for rustc_codegen_llvm::back::archive
+pub struct UnknownArchiveKind<'a> {
+ pub kind: &'a str,
+}
diff --git a/compiler/rustc_codegen_ssa/src/glue.rs b/compiler/rustc_codegen_ssa/src/glue.rs
index e6f402ef1..6015d48de 100644
--- a/compiler/rustc_codegen_ssa/src/glue.rs
+++ b/compiler/rustc_codegen_ssa/src/glue.rs
@@ -15,7 +15,7 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
) -> (Bx::Value, Bx::Value) {
let layout = bx.layout_of(t);
debug!("size_and_align_of_dst(ty={}, info={:?}): layout: {:?}", t, info, layout);
- if !layout.is_unsized() {
+ if layout.is_sized() {
let size = bx.const_usize(layout.size.bytes());
let align = bx.const_usize(layout.align.abi.bytes());
return (size, align);
diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs
index ceebe4d41..def6390f6 100644
--- a/compiler/rustc_codegen_ssa/src/lib.rs
+++ b/compiler/rustc_codegen_ssa/src/lib.rs
@@ -1,12 +1,13 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
-#![feature(box_patterns)]
-#![feature(try_blocks)]
-#![feature(once_cell)]
#![feature(associated_type_bounds)]
-#![feature(strict_provenance)]
-#![feature(int_roundings)]
+#![feature(box_patterns)]
#![feature(if_let_guard)]
+#![feature(int_roundings)]
+#![feature(let_chains)]
#![feature(never_type)]
+#![feature(once_cell)]
+#![feature(strict_provenance)]
+#![feature(try_blocks)]
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]
@@ -115,7 +116,7 @@ pub struct NativeLib {
pub name: Option<Symbol>,
pub filename: Option<Symbol>,
pub cfg: Option<ast::MetaItem>,
- pub verbatim: Option<bool>,
+ pub verbatim: bool,
pub dll_imports: Vec<cstore::DllImport>,
}
@@ -126,7 +127,7 @@ impl From<&cstore::NativeLib> for NativeLib {
filename: lib.filename,
name: lib.name,
cfg: lib.cfg.clone(),
- verbatim: lib.verbatim,
+ verbatim: lib.verbatim.unwrap_or(false),
dll_imports: lib.dll_imports.clone(),
}
}
diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
index 29b7c9b0a..03d833fbb 100644
--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
@@ -1,7 +1,7 @@
use super::operand::OperandRef;
use super::operand::OperandValue::{Immediate, Pair, Ref};
use super::place::PlaceRef;
-use super::{FunctionCx, LocalRef};
+use super::{CachedLlbb, FunctionCx, LocalRef};
use crate::base;
use crate::common::{self, IntPredicate};
@@ -17,6 +17,7 @@ use rustc_middle::mir::{self, AssertKind, SwitchTargets};
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
use rustc_middle::ty::{self, Instance, Ty, TypeVisitable};
+use rustc_session::config::OptLevel;
use rustc_span::source_map::Span;
use rustc_span::{sym, Symbol};
use rustc_symbol_mangling::typeid::typeid_for_fnabi;
@@ -24,6 +25,15 @@ use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode, Reg};
use rustc_target::abi::{self, HasDataLayout, WrappingRange};
use rustc_target::spec::abi::Abi;
+// Indicates if we are in the middle of merging a BB's successor into it. This
+// can happen when BB jumps directly to its successor and the successor has no
+// other predecessors.
+#[derive(Debug, PartialEq)]
+enum MergingSucc {
+ False,
+ True,
+}
+
/// Used by `FunctionCx::codegen_terminator` for emitting common patterns
/// e.g., creating a basic block, calling a function, etc.
struct TerminatorCodegenHelper<'tcx> {
@@ -63,31 +73,6 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
}
}
- /// Get a basic block (creating it if necessary), possibly with a landing
- /// pad next to it.
- fn llbb_with_landing_pad<Bx: BuilderMethods<'a, 'tcx>>(
- &self,
- fx: &mut FunctionCx<'a, 'tcx, Bx>,
- target: mir::BasicBlock,
- ) -> (Bx::BasicBlock, bool) {
- let span = self.terminator.source_info.span;
- let lltarget = fx.llbb(target);
- let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
- match (self.funclet_bb, target_funclet) {
- (None, None) => (lltarget, false),
- // jump *into* cleanup - need a landing pad if GNU, cleanup pad if MSVC
- (None, Some(_)) => (fx.landing_pad_for(target), false),
- (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", self.terminator),
- (Some(f), Some(t_f)) => {
- if f == t_f || !base::wants_msvc_seh(fx.cx.tcx().sess) {
- (lltarget, false)
- } else {
- (fx.landing_pad_for(target), true)
- }
- }
- }
- }
-
/// Get a basic block (creating it if necessary), possibly with cleanup
/// stuff in it or next to it.
fn llbb_with_cleanup<Bx: BuilderMethods<'a, 'tcx>>(
@@ -95,7 +80,11 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
fx: &mut FunctionCx<'a, 'tcx, Bx>,
target: mir::BasicBlock,
) -> Bx::BasicBlock {
- let (lltarget, is_cleanupret) = self.llbb_with_landing_pad(fx, target);
+ let (needs_landing_pad, is_cleanupret) = self.llbb_characteristics(fx, target);
+ let mut lltarget = fx.llbb(target);
+ if needs_landing_pad {
+ lltarget = fx.landing_pad_for(target);
+ }
if is_cleanupret {
// MSVC cross-funclet jump - need a trampoline
debug_assert!(base::wants_msvc_seh(fx.cx.tcx().sess));
@@ -110,20 +99,54 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
}
}
+ fn llbb_characteristics<Bx: BuilderMethods<'a, 'tcx>>(
+ &self,
+ fx: &mut FunctionCx<'a, 'tcx, Bx>,
+ target: mir::BasicBlock,
+ ) -> (bool, bool) {
+ let target_funclet = fx.cleanup_kinds[target].funclet_bb(target);
+ let (needs_landing_pad, is_cleanupret) = match (self.funclet_bb, target_funclet) {
+ (None, None) => (false, false),
+ (None, Some(_)) => (true, false),
+ (Some(_), None) => {
+ let span = self.terminator.source_info.span;
+ span_bug!(span, "{:?} - jump out of cleanup?", self.terminator);
+ }
+ (Some(f), Some(t_f)) => {
+ if f == t_f || !base::wants_msvc_seh(fx.cx.tcx().sess) {
+ (false, false)
+ } else {
+ (true, true)
+ }
+ }
+ };
+ (needs_landing_pad, is_cleanupret)
+ }
+
fn funclet_br<Bx: BuilderMethods<'a, 'tcx>>(
&self,
fx: &mut FunctionCx<'a, 'tcx, Bx>,
bx: &mut Bx,
target: mir::BasicBlock,
- ) {
- let (lltarget, is_cleanupret) = self.llbb_with_landing_pad(fx, target);
- if is_cleanupret {
- // MSVC micro-optimization: generate a `ret` rather than a jump
- // to a trampoline.
- debug_assert!(base::wants_msvc_seh(fx.cx.tcx().sess));
- bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
+ mergeable_succ: bool,
+ ) -> MergingSucc {
+ let (needs_landing_pad, is_cleanupret) = self.llbb_characteristics(fx, target);
+ if mergeable_succ && !needs_landing_pad && !is_cleanupret {
+ // We can merge the successor into this bb, so no need for a `br`.
+ MergingSucc::True
} else {
- bx.br(lltarget);
+ let mut lltarget = fx.llbb(target);
+ if needs_landing_pad {
+ lltarget = fx.landing_pad_for(target);
+ }
+ if is_cleanupret {
+ // micro-optimization: generate a `ret` rather than a jump
+ // to a trampoline.
+ bx.cleanup_ret(self.funclet(fx).unwrap(), Some(lltarget));
+ } else {
+ bx.br(lltarget);
+ }
+ MergingSucc::False
}
}
@@ -139,7 +162,8 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
destination: Option<(ReturnDest<'tcx, Bx::Value>, mir::BasicBlock)>,
cleanup: Option<mir::BasicBlock>,
copied_constant_arguments: &[PlaceRef<'tcx, <Bx as BackendTypes>::Value>],
- ) {
+ mergeable_succ: bool,
+ ) -> MergingSucc {
// If there is a cleanup block and the function we're calling can unwind, then
// do an invoke, otherwise do a call.
let fn_ty = bx.fn_decl_backend_type(&fn_abi);
@@ -190,6 +214,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
}
fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret);
}
+ MergingSucc::False
} else {
let llret = bx.call(fn_ty, Some(&fn_abi), fn_ptr, &llargs, self.funclet(fx));
if fx.mir[self.bb].is_cleanup {
@@ -205,9 +230,10 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
bx.lifetime_end(tmp.llval, tmp.layout.size);
}
fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
- self.funclet_br(fx, bx, target);
+ self.funclet_br(fx, bx, target, mergeable_succ)
} else {
bx.unreachable();
+ MergingSucc::False
}
}
}
@@ -224,7 +250,8 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
destination: Option<mir::BasicBlock>,
cleanup: Option<mir::BasicBlock>,
instance: Instance<'_>,
- ) {
+ mergeable_succ: bool,
+ ) -> MergingSucc {
if let Some(cleanup) = cleanup {
let ret_llbb = if let Some(target) = destination {
fx.llbb(target)
@@ -240,13 +267,15 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
instance,
Some((ret_llbb, self.llbb_with_cleanup(fx, cleanup), self.funclet(fx))),
);
+ MergingSucc::False
} else {
bx.codegen_inline_asm(template, &operands, options, line_spans, instance, None);
if let Some(target) = destination {
- self.funclet_br(fx, bx, target);
+ self.funclet_br(fx, bx, target, mergeable_succ)
} else {
bx.unreachable();
+ MergingSucc::False
}
}
}
@@ -255,16 +284,16 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
/// Codegen implementations for some terminator variants.
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
/// Generates code for a `Resume` terminator.
- fn codegen_resume_terminator(&mut self, helper: TerminatorCodegenHelper<'tcx>, mut bx: Bx) {
+ fn codegen_resume_terminator(&mut self, helper: TerminatorCodegenHelper<'tcx>, bx: &mut Bx) {
if let Some(funclet) = helper.funclet(self) {
bx.cleanup_ret(funclet, None);
} else {
- let slot = self.get_personality_slot(&mut bx);
- let lp0 = slot.project_field(&mut bx, 0);
+ let slot = self.get_personality_slot(bx);
+ let lp0 = slot.project_field(bx, 0);
let lp0 = bx.load_operand(lp0).immediate();
- let lp1 = slot.project_field(&mut bx, 1);
+ let lp1 = slot.project_field(bx, 1);
let lp1 = bx.load_operand(lp1).immediate();
- slot.storage_dead(&mut bx);
+ slot.storage_dead(bx);
let mut lp = bx.const_undef(self.landing_pad_type());
lp = bx.insert_value(lp, lp0, 0);
@@ -276,22 +305,23 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn codegen_switchint_terminator(
&mut self,
helper: TerminatorCodegenHelper<'tcx>,
- mut bx: Bx,
+ bx: &mut Bx,
discr: &mir::Operand<'tcx>,
switch_ty: Ty<'tcx>,
targets: &SwitchTargets,
) {
- let discr = self.codegen_operand(&mut bx, &discr);
+ let discr = self.codegen_operand(bx, &discr);
// `switch_ty` is redundant, sanity-check that.
assert_eq!(discr.layout.ty, switch_ty);
let mut target_iter = targets.iter();
if target_iter.len() == 1 {
- // If there are two targets (one conditional, one fallback), emit br instead of switch
+ // If there are two targets (one conditional, one fallback), emit `br` instead of
+ // `switch`.
let (test_value, target) = target_iter.next().unwrap();
let lltrue = helper.llbb_with_cleanup(self, target);
let llfalse = helper.llbb_with_cleanup(self, targets.otherwise());
if switch_ty == bx.tcx().types.bool {
- // Don't generate trivial icmps when switching on bool
+ // Don't generate trivial icmps when switching on bool.
match test_value {
0 => bx.cond_br(discr.immediate(), llfalse, lltrue),
1 => bx.cond_br(discr.immediate(), lltrue, llfalse),
@@ -303,6 +333,30 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
bx.cond_br(cmp, lltrue, llfalse);
}
+ } else if self.cx.sess().opts.optimize == OptLevel::No
+ && target_iter.len() == 2
+ && self.mir[targets.otherwise()].is_empty_unreachable()
+ {
+ // In unoptimized builds, if there are two normal targets and the `otherwise` target is
+ // an unreachable BB, emit `br` instead of `switch`. This leaves behind the unreachable
+ // BB, which will usually (but not always) be dead code.
+ //
+ // Why only in unoptimized builds?
+ // - In unoptimized builds LLVM uses FastISel which does not support switches, so it
+ // must fall back to the to the slower SelectionDAG isel. Therefore, using `br` gives
+ // significant compile time speedups for unoptimized builds.
+ // - In optimized builds the above doesn't hold, and using `br` sometimes results in
+ // worse generated code because LLVM can no longer tell that the value being switched
+ // on can only have two values, e.g. 0 and 1.
+ //
+ let (test_value1, target1) = target_iter.next().unwrap();
+ let (_test_value2, target2) = target_iter.next().unwrap();
+ let ll1 = helper.llbb_with_cleanup(self, target1);
+ let ll2 = helper.llbb_with_cleanup(self, target2);
+ let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
+ let llval = bx.const_uint_big(switch_llty, test_value1);
+ let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
+ bx.cond_br(cmp, ll1, ll2);
} else {
bx.switch(
discr.immediate(),
@@ -312,7 +366,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}
- fn codegen_return_terminator(&mut self, mut bx: Bx) {
+ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
// Call `va_end` if this is the definition of a C-variadic function.
if self.fn_abi.c_variadic {
// The `VaList` "spoofed" argument is just after all the real arguments.
@@ -342,11 +396,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
PassMode::Direct(_) | PassMode::Pair(..) => {
- let op = self.codegen_consume(&mut bx, mir::Place::return_place().as_ref());
+ let op = self.codegen_consume(bx, mir::Place::return_place().as_ref());
if let Ref(llval, _, align) = op.val {
bx.load(bx.backend_type(op.layout), llval, align)
} else {
- op.immediate_or_packed_pair(&mut bx)
+ op.immediate_or_packed_pair(bx)
}
}
@@ -362,8 +416,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
let llslot = match op.val {
Immediate(_) | Pair(..) => {
- let scratch = PlaceRef::alloca(&mut bx, self.fn_abi.ret.layout);
- op.val.store(&mut bx, scratch);
+ let scratch = PlaceRef::alloca(bx, self.fn_abi.ret.layout);
+ op.val.store(bx, scratch);
scratch.llval
}
Ref(llval, _, align) => {
@@ -383,22 +437,22 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn codegen_drop_terminator(
&mut self,
helper: TerminatorCodegenHelper<'tcx>,
- mut bx: Bx,
+ bx: &mut Bx,
location: mir::Place<'tcx>,
target: mir::BasicBlock,
unwind: Option<mir::BasicBlock>,
- ) {
+ mergeable_succ: bool,
+ ) -> MergingSucc {
let ty = location.ty(self.mir, bx.tcx()).ty;
let ty = self.monomorphize(ty);
let drop_fn = Instance::resolve_drop_in_place(bx.tcx(), ty);
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
// we don't actually need to drop anything.
- helper.funclet_br(self, &mut bx, target);
- return;
+ return helper.funclet_br(self, bx, target, mergeable_succ);
}
- let place = self.codegen_place(&mut bx, location.as_ref());
+ let place = self.codegen_place(bx, location.as_ref());
let (args1, args2);
let mut args = if let Some(llextra) = place.llextra {
args2 = [place.llval, llextra];
@@ -436,7 +490,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
args = &args[..1];
(
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
- .get_fn(&mut bx, vtable, ty, &fn_abi),
+ .get_fn(bx, vtable, ty, &fn_abi),
fn_abi,
)
}
@@ -481,7 +535,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
debug!("args' = {:?}", args);
(
meth::VirtualIndex::from_index(ty::COMMON_VTABLE_ENTRIES_DROPINPLACE)
- .get_fn(&mut bx, vtable, ty, &fn_abi),
+ .get_fn(bx, vtable, ty, &fn_abi),
fn_abi,
)
}
@@ -489,29 +543,31 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
helper.do_call(
self,
- &mut bx,
+ bx,
fn_abi,
drop_fn,
args,
Some((ReturnDest::Nothing, target)),
unwind,
&[],
- );
+ mergeable_succ,
+ )
}
fn codegen_assert_terminator(
&mut self,
helper: TerminatorCodegenHelper<'tcx>,
- mut bx: Bx,
+ bx: &mut Bx,
terminator: &mir::Terminator<'tcx>,
cond: &mir::Operand<'tcx>,
expected: bool,
msg: &mir::AssertMessage<'tcx>,
target: mir::BasicBlock,
cleanup: Option<mir::BasicBlock>,
- ) {
+ mergeable_succ: bool,
+ ) -> MergingSucc {
let span = terminator.source_info.span;
- let cond = self.codegen_operand(&mut bx, cond).immediate();
+ let cond = self.codegen_operand(bx, cond).immediate();
let mut const_cond = bx.const_to_opt_u128(cond, false).map(|c| c == 1);
// This case can currently arise only from functions marked
@@ -529,8 +585,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// Don't codegen the panic block if success if known.
if const_cond == Some(expected) {
- helper.funclet_br(self, &mut bx, target);
- return;
+ return helper.funclet_br(self, bx, target, mergeable_succ);
}
// Pass the condition through llvm.expect for branch hinting.
@@ -547,16 +602,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// After this point, bx is the block for the call to panic.
bx.switch_to_block(panic_block);
- self.set_debug_loc(&mut bx, terminator.source_info);
+ self.set_debug_loc(bx, terminator.source_info);
// Get the location information.
- let location = self.get_caller_location(&mut bx, terminator.source_info).immediate();
+ let location = self.get_caller_location(bx, terminator.source_info).immediate();
// Put together the arguments to the panic entry point.
let (lang_item, args) = match msg {
AssertKind::BoundsCheck { ref len, ref index } => {
- let len = self.codegen_operand(&mut bx, len).immediate();
- let index = self.codegen_operand(&mut bx, index).immediate();
+ let len = self.codegen_operand(bx, len).immediate();
+ let index = self.codegen_operand(bx, index).immediate();
// It's `fn panic_bounds_check(index: usize, len: usize)`,
// and `#[track_caller]` adds an implicit third argument.
(LangItem::PanicBoundsCheck, vec![index, len, location])
@@ -569,29 +624,32 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
};
- let (fn_abi, llfn) = common::build_langcall(&bx, Some(span), lang_item);
+ let (fn_abi, llfn) = common::build_langcall(bx, Some(span), lang_item);
// Codegen the actual panic invoke/call.
- helper.do_call(self, &mut bx, fn_abi, llfn, &args, None, cleanup, &[]);
+ let merging_succ = helper.do_call(self, bx, fn_abi, llfn, &args, None, cleanup, &[], false);
+ assert_eq!(merging_succ, MergingSucc::False);
+ MergingSucc::False
}
fn codegen_abort_terminator(
&mut self,
helper: TerminatorCodegenHelper<'tcx>,
- mut bx: Bx,
+ bx: &mut Bx,
terminator: &mir::Terminator<'tcx>,
) {
let span = terminator.source_info.span;
- self.set_debug_loc(&mut bx, terminator.source_info);
+ self.set_debug_loc(bx, terminator.source_info);
// Obtain the panic entry point.
- let (fn_abi, llfn) = common::build_langcall(&bx, Some(span), LangItem::PanicNoUnwind);
+ let (fn_abi, llfn) = common::build_langcall(bx, Some(span), LangItem::PanicNoUnwind);
// Codegen the actual panic invoke/call.
- helper.do_call(self, &mut bx, fn_abi, llfn, &[], None, None, &[]);
+ let merging_succ = helper.do_call(self, bx, fn_abi, llfn, &[], None, None, &[], false);
+ assert_eq!(merging_succ, MergingSucc::False);
}
- /// Returns `true` if this is indeed a panic intrinsic and codegen is done.
+ /// Returns `Some` if this is indeed a panic intrinsic and codegen is done.
fn codegen_panic_intrinsic(
&mut self,
helper: &TerminatorCodegenHelper<'tcx>,
@@ -601,7 +659,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
source_info: mir::SourceInfo,
target: Option<mir::BasicBlock>,
cleanup: Option<mir::BasicBlock>,
- ) -> bool {
+ mergeable_succ: bool,
+ ) -> Option<MergingSucc> {
// Emit a panic or a no-op for `assert_*` intrinsics.
// These are intrinsics that compile to panics so that we can get a message
// which mentions the offending type, even from a const context.
@@ -627,7 +686,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
ZeroValid => !bx.tcx().permits_zero_init(layout),
UninitValid => !bx.tcx().permits_uninit_init(layout),
};
- if do_panic {
+ Some(if do_panic {
let msg_str = with_no_visible_paths!({
with_no_trimmed_paths!({
if layout.abi.is_uninhabited() {
@@ -660,22 +719,22 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
target.as_ref().map(|bb| (ReturnDest::Nothing, *bb)),
cleanup,
&[],
- );
+ mergeable_succ,
+ )
} else {
// a NOP
let target = target.unwrap();
- helper.funclet_br(self, bx, target)
- }
- true
+ helper.funclet_br(self, bx, target, mergeable_succ)
+ })
} else {
- false
+ None
}
}
fn codegen_call_terminator(
&mut self,
helper: TerminatorCodegenHelper<'tcx>,
- mut bx: Bx,
+ bx: &mut Bx,
terminator: &mir::Terminator<'tcx>,
func: &mir::Operand<'tcx>,
args: &[mir::Operand<'tcx>],
@@ -683,12 +742,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
target: Option<mir::BasicBlock>,
cleanup: Option<mir::BasicBlock>,
fn_span: Span,
- ) {
+ mergeable_succ: bool,
+ ) -> MergingSucc {
let source_info = terminator.source_info;
let span = source_info.span;
// Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
- let callee = self.codegen_operand(&mut bx, func);
+ let callee = self.codegen_operand(bx, func);
let (instance, mut llfn) = match *callee.layout.ty.kind() {
ty::FnDef(def_id, substs) => (
@@ -708,8 +768,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if let Some(ty::InstanceDef::DropGlue(_, None)) = def {
// Empty drop glue; a no-op.
let target = target.unwrap();
- helper.funclet_br(self, &mut bx, target);
- return;
+ return helper.funclet_br(self, bx, target, mergeable_succ);
}
// FIXME(eddyb) avoid computing this if possible, when `instance` is
@@ -736,9 +795,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
if intrinsic == Some(sym::transmute) {
- if let Some(target) = target {
- self.codegen_transmute(&mut bx, &args[0], destination);
- helper.funclet_br(self, &mut bx, target);
+ return if let Some(target) = target {
+ self.codegen_transmute(bx, &args[0], destination);
+ helper.funclet_br(self, bx, target, mergeable_succ)
} else {
// If we are trying to transmute to an uninhabited type,
// it is likely there is no allotted destination. In fact,
@@ -748,20 +807,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// it must be unreachable.
assert_eq!(fn_abi.ret.layout.abi, abi::Abi::Uninhabited);
bx.unreachable();
- }
- return;
+ MergingSucc::False
+ };
}
- if self.codegen_panic_intrinsic(
+ if let Some(merging_succ) = self.codegen_panic_intrinsic(
&helper,
- &mut bx,
+ bx,
intrinsic,
instance,
source_info,
target,
cleanup,
+ mergeable_succ,
) {
- return;
+ return merging_succ;
}
// The arguments we'll be passing. Plus one to account for outptr, if used.
@@ -771,23 +831,24 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// Prepare the return value destination
let ret_dest = if target.is_some() {
let is_intrinsic = intrinsic.is_some();
- self.make_return_dest(&mut bx, destination, &fn_abi.ret, &mut llargs, is_intrinsic)
+ self.make_return_dest(bx, destination, &fn_abi.ret, &mut llargs, is_intrinsic)
} else {
ReturnDest::Nothing
};
if intrinsic == Some(sym::caller_location) {
- if let Some(target) = target {
- let location = self
- .get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
+ return if let Some(target) = target {
+ let location =
+ self.get_caller_location(bx, mir::SourceInfo { span: fn_span, ..source_info });
if let ReturnDest::IndirectOperand(tmp, _) = ret_dest {
- location.val.store(&mut bx, tmp);
+ location.val.store(bx, tmp);
}
- self.store_return(&mut bx, ret_dest, &fn_abi.ret, location.immediate());
- helper.funclet_br(self, &mut bx, target);
- }
- return;
+ self.store_return(bx, ret_dest, &fn_abi.ret, location.immediate());
+ helper.funclet_br(self, bx, target, mergeable_succ)
+ } else {
+ MergingSucc::False
+ };
}
match intrinsic {
@@ -831,12 +892,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}
- self.codegen_operand(&mut bx, arg)
+ self.codegen_operand(bx, arg)
})
.collect();
Self::codegen_intrinsic_call(
- &mut bx,
+ bx,
*instance.as_ref().unwrap(),
&fn_abi,
&args,
@@ -845,16 +906,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
);
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
- self.store_return(&mut bx, ret_dest, &fn_abi.ret, dst.llval);
+ self.store_return(bx, ret_dest, &fn_abi.ret, dst.llval);
}
- if let Some(target) = target {
- helper.funclet_br(self, &mut bx, target);
+ return if let Some(target) = target {
+ helper.funclet_br(self, bx, target, mergeable_succ)
} else {
bx.unreachable();
- }
-
- return;
+ MergingSucc::False
+ };
}
}
@@ -868,7 +928,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let mut copied_constant_arguments = vec![];
'make_args: for (i, arg) in first_args.iter().enumerate() {
- let mut op = self.codegen_operand(&mut bx, arg);
+ let mut op = self.codegen_operand(bx, arg);
if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
match op.val {
@@ -878,12 +938,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// that is understood elsewhere in the compiler as a method on
// `dyn Trait`.
// To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
- // we get a value of a built-in pointer type
+ // we get a value of a built-in pointer type.
+ //
+ // This is also relevant for `Pin<&mut Self>`, where we need to peel the `Pin`.
'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
&& !op.layout.ty.is_region_ptr()
{
for i in 0..op.layout.fields.count() {
- let field = op.extract_field(&mut bx, i);
+ let field = op.extract_field(bx, i);
if !field.layout.is_zst() {
// we found the one non-zero-sized field that is allowed
// now find *its* non-zero-sized field, or stop if it's a
@@ -900,7 +962,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// data pointer and vtable. Look up the method in the vtable, and pass
// the data pointer as the first argument
llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
- &mut bx,
+ bx,
meta,
op.layout.ty,
&fn_abi,
@@ -911,7 +973,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
Ref(data_ptr, Some(meta), _) => {
// by-value dynamic dispatch
llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
- &mut bx,
+ bx,
meta,
op.layout.ty,
&fn_abi,
@@ -920,19 +982,35 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
continue;
}
Immediate(_) => {
- let ty::Ref(_, ty, _) = op.layout.ty.kind() else {
- span_bug!(span, "can't codegen a virtual call on {:#?}", op);
- };
- if !ty.is_dyn_star() {
+ // See comment above explaining why we peel these newtypes
+ 'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
+ && !op.layout.ty.is_region_ptr()
+ {
+ for i in 0..op.layout.fields.count() {
+ let field = op.extract_field(bx, i);
+ if !field.layout.is_zst() {
+ // we found the one non-zero-sized field that is allowed
+ // now find *its* non-zero-sized field, or stop if it's a
+ // pointer
+ op = field;
+ continue 'descend_newtypes;
+ }
+ }
+
+ span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
+ }
+
+ // Make sure that we've actually unwrapped the rcvr down
+ // to a pointer or ref to `dyn* Trait`.
+ if !op.layout.ty.builtin_deref(true).unwrap().ty.is_dyn_star() {
span_bug!(span, "can't codegen a virtual call on {:#?}", op);
}
- // FIXME(dyn-star): Make sure this is done on a &dyn* receiver
let place = op.deref(bx.cx());
- let data_ptr = place.project_field(&mut bx, 0);
- let meta_ptr = place.project_field(&mut bx, 1);
+ let data_ptr = place.project_field(bx, 0);
+ let meta_ptr = place.project_field(bx, 1);
let meta = bx.load_operand(meta_ptr);
llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
- &mut bx,
+ bx,
meta.immediate(),
op.layout.ty,
&fn_abi,
@@ -951,24 +1029,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
match (arg, op.val) {
(&mir::Operand::Copy(_), Ref(_, None, _))
| (&mir::Operand::Constant(_), Ref(_, None, _)) => {
- let tmp = PlaceRef::alloca(&mut bx, op.layout);
+ let tmp = PlaceRef::alloca(bx, op.layout);
bx.lifetime_start(tmp.llval, tmp.layout.size);
- op.val.store(&mut bx, tmp);
+ op.val.store(bx, tmp);
op.val = Ref(tmp.llval, None, tmp.align);
copied_constant_arguments.push(tmp);
}
_ => {}
}
- self.codegen_argument(&mut bx, op, &mut llargs, &fn_abi.args[i]);
+ self.codegen_argument(bx, op, &mut llargs, &fn_abi.args[i]);
}
let num_untupled = untuple.map(|tup| {
- self.codegen_arguments_untupled(
- &mut bx,
- tup,
- &mut llargs,
- &fn_abi.args[first_args.len()..],
- )
+ self.codegen_arguments_untupled(bx, tup, &mut llargs, &fn_abi.args[first_args.len()..])
});
let needs_location =
@@ -988,14 +1061,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn_abi,
);
let location =
- self.get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
+ self.get_caller_location(bx, mir::SourceInfo { span: fn_span, ..source_info });
debug!(
"codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
terminator, location, fn_span
);
let last_arg = fn_abi.args.last().unwrap();
- self.codegen_argument(&mut bx, location, &mut llargs, last_arg);
+ self.codegen_argument(bx, location, &mut llargs, last_arg);
}
let (is_indirect_call, fn_ptr) = match (llfn, instance) {
@@ -1020,40 +1093,43 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx.cond_br(cond, bb_pass, bb_fail);
bx.switch_to_block(bb_pass);
- helper.do_call(
+ let merging_succ = helper.do_call(
self,
- &mut bx,
+ bx,
fn_abi,
fn_ptr,
&llargs,
target.as_ref().map(|&target| (ret_dest, target)),
cleanup,
&copied_constant_arguments,
+ false,
);
+ assert_eq!(merging_succ, MergingSucc::False);
bx.switch_to_block(bb_fail);
bx.abort();
bx.unreachable();
- return;
+ return MergingSucc::False;
}
helper.do_call(
self,
- &mut bx,
+ bx,
fn_abi,
fn_ptr,
&llargs,
target.as_ref().map(|&target| (ret_dest, target)),
cleanup,
&copied_constant_arguments,
- );
+ mergeable_succ,
+ )
}
fn codegen_asm_terminator(
&mut self,
helper: TerminatorCodegenHelper<'tcx>,
- mut bx: Bx,
+ bx: &mut Bx,
terminator: &mir::Terminator<'tcx>,
template: &[ast::InlineAsmTemplatePiece],
operands: &[mir::InlineAsmOperand<'tcx>],
@@ -1062,24 +1138,25 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
destination: Option<mir::BasicBlock>,
cleanup: Option<mir::BasicBlock>,
instance: Instance<'_>,
- ) {
+ mergeable_succ: bool,
+ ) -> MergingSucc {
let span = terminator.source_info.span;
let operands: Vec<_> = operands
.iter()
.map(|op| match *op {
mir::InlineAsmOperand::In { reg, ref value } => {
- let value = self.codegen_operand(&mut bx, value);
+ let value = self.codegen_operand(bx, value);
InlineAsmOperandRef::In { reg, value }
}
mir::InlineAsmOperand::Out { reg, late, ref place } => {
- let place = place.map(|place| self.codegen_place(&mut bx, place.as_ref()));
+ let place = place.map(|place| self.codegen_place(bx, place.as_ref()));
InlineAsmOperandRef::Out { reg, late, place }
}
mir::InlineAsmOperand::InOut { reg, late, ref in_value, ref out_place } => {
- let in_value = self.codegen_operand(&mut bx, in_value);
+ let in_value = self.codegen_operand(bx, in_value);
let out_place =
- out_place.map(|out_place| self.codegen_place(&mut bx, out_place.as_ref()));
+ out_place.map(|out_place| self.codegen_place(bx, out_place.as_ref()));
InlineAsmOperandRef::InOut { reg, late, in_value, out_place }
}
mir::InlineAsmOperand::Const { ref value } => {
@@ -1117,7 +1194,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
helper.do_inlineasm(
self,
- &mut bx,
+ bx,
template,
&operands,
options,
@@ -1125,71 +1202,128 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
destination,
cleanup,
instance,
- );
+ mergeable_succ,
+ )
}
}
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
- pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
- let llbb = self.llbb(bb);
- let mut bx = Bx::build(self.cx, llbb);
+ pub fn codegen_block(&mut self, mut bb: mir::BasicBlock) {
+ let llbb = match self.try_llbb(bb) {
+ Some(llbb) => llbb,
+ None => return,
+ };
+ let bx = &mut Bx::build(self.cx, llbb);
let mir = self.mir;
- let data = &mir[bb];
- debug!("codegen_block({:?}={:?})", bb, data);
+ // MIR basic blocks stop at any function call. This may not be the case
+ // for the backend's basic blocks, in which case we might be able to
+ // combine multiple MIR basic blocks into a single backend basic block.
+ loop {
+ let data = &mir[bb];
- for statement in &data.statements {
- bx = self.codegen_statement(bx, statement);
- }
+ debug!("codegen_block({:?}={:?})", bb, data);
+
+ for statement in &data.statements {
+ self.codegen_statement(bx, statement);
+ }
+
+ let merging_succ = self.codegen_terminator(bx, bb, data.terminator());
+ if let MergingSucc::False = merging_succ {
+ break;
+ }
- self.codegen_terminator(bx, bb, data.terminator());
+ // We are merging the successor into the produced backend basic
+ // block. Record that the successor should be skipped when it is
+ // reached.
+ //
+ // Note: we must not have already generated code for the successor.
+ // This is implicitly ensured by the reverse postorder traversal,
+ // and the assertion explicitly guarantees that.
+ let mut successors = data.terminator().successors();
+ let succ = successors.next().unwrap();
+ assert!(matches!(self.cached_llbbs[succ], CachedLlbb::None));
+ self.cached_llbbs[succ] = CachedLlbb::Skip;
+ bb = succ;
+ }
}
fn codegen_terminator(
&mut self,
- mut bx: Bx,
+ bx: &mut Bx,
bb: mir::BasicBlock,
terminator: &'tcx mir::Terminator<'tcx>,
- ) {
+ ) -> MergingSucc {
debug!("codegen_terminator: {:?}", terminator);
// Create the cleanup bundle, if needed.
let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
let helper = TerminatorCodegenHelper { bb, terminator, funclet_bb };
- self.set_debug_loc(&mut bx, terminator.source_info);
+ let mergeable_succ = || {
+ // Note: any call to `switch_to_block` will invalidate a `true` value
+ // of `mergeable_succ`.
+ let mut successors = terminator.successors();
+ if let Some(succ) = successors.next()
+ && successors.next().is_none()
+ && let &[succ_pred] = self.mir.basic_blocks.predecessors()[succ].as_slice()
+ {
+ // bb has a single successor, and bb is its only predecessor. This
+ // makes it a candidate for merging.
+ assert_eq!(succ_pred, bb);
+ true
+ } else {
+ false
+ }
+ };
+
+ self.set_debug_loc(bx, terminator.source_info);
match terminator.kind {
- mir::TerminatorKind::Resume => self.codegen_resume_terminator(helper, bx),
+ mir::TerminatorKind::Resume => {
+ self.codegen_resume_terminator(helper, bx);
+ MergingSucc::False
+ }
mir::TerminatorKind::Abort => {
self.codegen_abort_terminator(helper, bx, terminator);
+ MergingSucc::False
}
mir::TerminatorKind::Goto { target } => {
- helper.funclet_br(self, &mut bx, target);
+ helper.funclet_br(self, bx, target, mergeable_succ())
}
mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref targets } => {
self.codegen_switchint_terminator(helper, bx, discr, switch_ty, targets);
+ MergingSucc::False
}
mir::TerminatorKind::Return => {
self.codegen_return_terminator(bx);
+ MergingSucc::False
}
mir::TerminatorKind::Unreachable => {
bx.unreachable();
+ MergingSucc::False
}
mir::TerminatorKind::Drop { place, target, unwind } => {
- self.codegen_drop_terminator(helper, bx, place, target, unwind);
+ self.codegen_drop_terminator(helper, bx, place, target, unwind, mergeable_succ())
}
- mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
- self.codegen_assert_terminator(
- helper, bx, terminator, cond, expected, msg, target, cleanup,
- );
- }
+ mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => self
+ .codegen_assert_terminator(
+ helper,
+ bx,
+ terminator,
+ cond,
+ expected,
+ msg,
+ target,
+ cleanup,
+ mergeable_succ(),
+ ),
mir::TerminatorKind::DropAndReplace { .. } => {
bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
@@ -1203,19 +1337,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
cleanup,
from_hir_call: _,
fn_span,
- } => {
- self.codegen_call_terminator(
- helper,
- bx,
- terminator,
- func,
- args,
- destination,
- target,
- cleanup,
- fn_span,
- );
- }
+ } => self.codegen_call_terminator(
+ helper,
+ bx,
+ terminator,
+ func,
+ args,
+ destination,
+ target,
+ cleanup,
+ fn_span,
+ mergeable_succ(),
+ ),
mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Yield { .. } => {
bug!("generator ops in codegen")
}
@@ -1230,20 +1363,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
line_spans,
destination,
cleanup,
- } => {
- self.codegen_asm_terminator(
- helper,
- bx,
- terminator,
- template,
- operands,
- options,
- line_spans,
- destination,
- cleanup,
- self.instance,
- );
- }
+ } => self.codegen_asm_terminator(
+ helper,
+ bx,
+ terminator,
+ template,
+ operands,
+ options,
+ line_spans,
+ destination,
+ cleanup,
+ self.instance,
+ mergeable_succ(),
+ ),
}
}
@@ -1561,12 +1693,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// FIXME(eddyb) rename `llbb` and other `ll`-prefixed things to use a
// more backend-agnostic prefix such as `cg` (i.e. this would be `cgbb`).
pub fn llbb(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
- self.cached_llbbs[bb].unwrap_or_else(|| {
- // FIXME(eddyb) only name the block if `fewer_names` is `false`.
- let llbb = Bx::append_block(self.cx, self.llfn, &format!("{:?}", bb));
- self.cached_llbbs[bb] = Some(llbb);
- llbb
- })
+ self.try_llbb(bb).unwrap()
+ }
+
+ /// Like `llbb`, but may fail if the basic block should be skipped.
+ pub fn try_llbb(&mut self, bb: mir::BasicBlock) -> Option<Bx::BasicBlock> {
+ match self.cached_llbbs[bb] {
+ CachedLlbb::None => {
+ // FIXME(eddyb) only name the block if `fewer_names` is `false`.
+ let llbb = Bx::append_block(self.cx, self.llfn, &format!("{:?}", bb));
+ self.cached_llbbs[bb] = CachedLlbb::Some(llbb);
+ Some(llbb)
+ }
+ CachedLlbb::Some(llbb) => Some(llbb),
+ CachedLlbb::Skip => None,
+ }
}
fn make_return_dest(
diff --git a/compiler/rustc_codegen_ssa/src/mir/constant.rs b/compiler/rustc_codegen_ssa/src/mir/constant.rs
index 4c6ab457c..53ff3c240 100644
--- a/compiler/rustc_codegen_ssa/src/mir/constant.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/constant.rs
@@ -42,7 +42,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
self.cx.tcx().const_eval_resolve(ty::ParamEnv::reveal_all(), uv, None).map_err(|err| {
- self.cx.tcx().sess.span_err(constant.span, "erroneous constant encountered");
+ match err {
+ ErrorHandled::Reported(_) => {
+ self.cx.tcx().sess.span_err(constant.span, "erroneous constant encountered");
+ }
+ ErrorHandled::TooGeneric => {
+ span_bug!(constant.span, "codegen encountered polymorphic constant: {:?}", err);
+ }
+ }
err
})
}
diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
index 157c1c823..99283d3bb 100644
--- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
@@ -14,6 +14,8 @@ use super::operand::{OperandRef, OperandValue};
use super::place::PlaceRef;
use super::{FunctionCx, LocalRef};
+use std::ops::Range;
+
pub struct FunctionDebugContext<S, L> {
pub scopes: IndexVec<mir::SourceScope, DebugScope<S, L>>,
}
@@ -25,7 +27,7 @@ pub enum VariableKind {
}
/// Like `mir::VarDebugInfo`, but within a `mir::Local`.
-#[derive(Copy, Clone)]
+#[derive(Clone)]
pub struct PerLocalVarDebugInfo<'tcx, D> {
pub name: Symbol,
pub source_info: mir::SourceInfo,
@@ -33,6 +35,10 @@ pub struct PerLocalVarDebugInfo<'tcx, D> {
/// `DIVariable` returned by `create_dbg_var`.
pub dbg_var: Option<D>,
+ /// Byte range in the `dbg_var` covered by this fragment,
+ /// if this is a fragment of a composite `VarDebugInfo`.
+ pub fragment: Option<Range<Size>>,
+
/// `.place.projection` from `mir::VarDebugInfo`.
pub projection: &'tcx ty::List<mir::PlaceElem<'tcx>>,
}
@@ -145,7 +151,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
Some(per_local) => &per_local[local],
None => return,
};
- let whole_local_var = vars.iter().find(|var| var.projection.is_empty()).copied();
+ let whole_local_var = vars.iter().find(|var| var.projection.is_empty()).cloned();
let has_proj = || vars.iter().any(|var| !var.projection.is_empty());
let fallback_var = if self.mir.local_kind(local) == mir::LocalKind::Arg {
@@ -187,6 +193,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
name,
source_info: decl.source_info,
dbg_var,
+ fragment: None,
projection: ty::List::empty(),
})
}
@@ -199,7 +206,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let name = if bx.sess().fewer_names() {
None
} else {
- Some(match whole_local_var.or(fallback_var) {
+ Some(match whole_local_var.or(fallback_var.clone()) {
Some(var) if var.name != kw::Empty => var.name.to_string(),
_ => format!("{:?}", local),
})
@@ -249,7 +256,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
LocalRef::UnsizedPlace(_) => return,
};
- let vars = vars.iter().copied().chain(fallback_var);
+ let vars = vars.iter().cloned().chain(fallback_var);
for var in vars {
let Some(dbg_var) = var.dbg_var else { continue };
@@ -312,9 +319,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx.store(place.llval, alloca.llval, alloca.align);
// Point the debug info to `*alloca` for the current variable
- bx.dbg_var_addr(dbg_var, dbg_loc, alloca.llval, Size::ZERO, &[Size::ZERO]);
+ bx.dbg_var_addr(dbg_var, dbg_loc, alloca.llval, Size::ZERO, &[Size::ZERO], None);
} else {
- bx.dbg_var_addr(dbg_var, dbg_loc, base.llval, direct_offset, &indirect_offsets);
+ bx.dbg_var_addr(
+ dbg_var,
+ dbg_loc,
+ base.llval,
+ direct_offset,
+ &indirect_offsets,
+ None,
+ );
}
}
}
@@ -382,6 +396,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let ty = self.monomorphize(c.ty());
(ty, VariableKind::LocalVariable)
}
+ mir::VarDebugInfoContents::Composite { ty, fragments: _ } => {
+ let ty = self.monomorphize(ty);
+ (ty, VariableKind::LocalVariable)
+ }
};
self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span)
@@ -393,6 +411,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
name: var.name,
source_info: var.source_info,
dbg_var,
+ fragment: None,
projection: place.projection,
});
}
@@ -407,10 +426,48 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx,
);
- bx.dbg_var_addr(dbg_var, dbg_loc, base.llval, Size::ZERO, &[]);
+ bx.dbg_var_addr(dbg_var, dbg_loc, base.llval, Size::ZERO, &[], None);
}
}
}
+ mir::VarDebugInfoContents::Composite { ty, ref fragments } => {
+ let var_ty = self.monomorphize(ty);
+ let var_layout = self.cx.layout_of(var_ty);
+ for fragment in fragments {
+ let mut fragment_start = Size::ZERO;
+ let mut fragment_layout = var_layout;
+
+ for elem in &fragment.projection {
+ match *elem {
+ mir::ProjectionElem::Field(field, _) => {
+ let i = field.index();
+ fragment_start += fragment_layout.fields.offset(i);
+ fragment_layout = fragment_layout.field(self.cx, i);
+ }
+ _ => span_bug!(
+ var.source_info.span,
+ "unsupported fragment projection `{:?}`",
+ elem,
+ ),
+ }
+ }
+
+ let place = fragment.contents;
+ per_local[place.local].push(PerLocalVarDebugInfo {
+ name: var.name,
+ source_info: var.source_info,
+ dbg_var,
+ fragment: if fragment_layout.size == var_layout.size {
+ // Fragment covers entire variable, so as far as
+ // DWARF is concerned, it's not really a fragment.
+ None
+ } else {
+ Some(fragment_start..fragment_start + fragment_layout.size)
+ },
+ projection: place.projection,
+ });
+ }
+ }
}
}
Some(per_local)
diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs
index da9aaf00e..79c66a955 100644
--- a/compiler/rustc_codegen_ssa/src/mir/mod.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs
@@ -16,6 +16,18 @@ use rustc_middle::mir::traversal;
use self::operand::{OperandRef, OperandValue};
+// Used for tracking the state of generated basic blocks.
+enum CachedLlbb<T> {
+ /// Nothing created yet.
+ None,
+
+ /// Has been created.
+ Some(T),
+
+ /// Nothing created yet, and nothing should be.
+ Skip,
+}
+
/// Master context for codegenning from MIR.
pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
instance: Instance<'tcx>,
@@ -43,7 +55,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
/// as-needed (e.g. RPO reaching it or another block branching to it).
// FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
// more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
- cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
+ cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
/// The funclet status of each basic block
cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
@@ -155,11 +167,13 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
}
let cleanup_kinds = analyze::cleanup_kinds(&mir);
- let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> = mir
- .basic_blocks
- .indices()
- .map(|bb| if bb == mir::START_BLOCK { Some(start_llbb) } else { None })
- .collect();
+ let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
+ mir.basic_blocks
+ .indices()
+ .map(|bb| {
+ if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
+ })
+ .collect();
let mut fx = FunctionCx {
instance,
@@ -189,7 +203,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
all_consts_ok = false;
match err {
// errored or at least linted
- ErrorHandled::Reported(_) | ErrorHandled::Linted => {}
+ ErrorHandled::Reported(_) => {}
ErrorHandled::TooGeneric => {
span_bug!(const_.span, "codegen encountered polymorphic constant: {:?}", err)
}
diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs
index e6ba642a7..34a5b638d 100644
--- a/compiler/rustc_codegen_ssa/src/mir/operand.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs
@@ -40,10 +40,10 @@ pub enum OperandValue<V> {
/// instead.
#[derive(Copy, Clone)]
pub struct OperandRef<'tcx, V> {
- // The value.
+ /// The value.
pub val: OperandValue<V>,
- // The layout of value, based on its Rust type.
+ /// The layout of value, based on its Rust type.
pub layout: TyAndLayout<'tcx>,
}
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index 9c18df564..fbe30154a 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -29,7 +29,7 @@ pub struct PlaceRef<'tcx, V> {
impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
PlaceRef { llval, llextra: None, layout, align: layout.align.abi }
}
@@ -38,7 +38,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
layout: TyAndLayout<'tcx>,
align: Align,
) -> PlaceRef<'tcx, V> {
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
PlaceRef { llval, llextra: None, layout, align }
}
@@ -48,7 +48,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
bx: &mut Bx,
layout: TyAndLayout<'tcx>,
) -> Self {
- assert!(!layout.is_unsized(), "tried to statically allocate unsized place");
+ assert!(layout.is_sized(), "tried to statically allocate unsized place");
let tmp = bx.alloca(bx.cx().backend_type(layout), layout.align.abi);
Self::new_sized(tmp, layout)
}
@@ -145,7 +145,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
);
return simple();
}
- _ if !field.is_unsized() => return simple(),
+ _ if field.is_sized() => return simple(),
ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(),
ty::Adt(def, _) => {
if def.repr().packed() {
@@ -209,7 +209,9 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
bx: &mut Bx,
cast_to: Ty<'tcx>,
) -> V {
- let cast_to = bx.cx().immediate_backend_type(bx.cx().layout_of(cast_to));
+ let cast_to_layout = bx.cx().layout_of(cast_to);
+ let cast_to_size = cast_to_layout.layout.size();
+ let cast_to = bx.cx().immediate_backend_type(cast_to_layout);
if self.layout.abi.is_uninhabited() {
return bx.cx().const_undef(cast_to);
}
@@ -229,7 +231,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
// Read the tag/niche-encoded discriminant from memory.
let tag = self.project_field(bx, tag_field);
- let tag = bx.load_operand(tag);
+ let tag_op = bx.load_operand(tag);
+ let tag_imm = tag_op.immediate();
// Decode the discriminant (specifically if it's niche-encoded).
match *tag_encoding {
@@ -242,68 +245,170 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
Int(_, signed) => !tag_scalar.is_bool() && signed,
_ => false,
};
- bx.intcast(tag.immediate(), cast_to, signed)
+ bx.intcast(tag_imm, cast_to, signed)
}
TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start } => {
- // Rebase from niche values to discriminants, and check
- // whether the result is in range for the niche variants.
- let niche_llty = bx.cx().immediate_backend_type(tag.layout);
- let tag = tag.immediate();
-
- // We first compute the "relative discriminant" (wrt `niche_variants`),
- // that is, if `n = niche_variants.end() - niche_variants.start()`,
- // we remap `niche_start..=niche_start + n` (which may wrap around)
- // to (non-wrap-around) `0..=n`, to be able to check whether the
- // discriminant corresponds to a niche variant with one comparison.
- // We also can't go directly to the (variant index) discriminant
- // and check that it is in the range `niche_variants`, because
- // that might not fit in the same type, on top of needing an extra
- // comparison (see also the comment on `let niche_discr`).
- let relative_discr = if niche_start == 0 {
- // Avoid subtracting `0`, which wouldn't work for pointers.
- // FIXME(eddyb) check the actual primitive type here.
- tag
+ // Cast to an integer so we don't have to treat a pointer as a
+ // special case.
+ let (tag, tag_llty) = if tag_scalar.primitive().is_ptr() {
+ let t = bx.type_isize();
+ let tag = bx.ptrtoint(tag_imm, t);
+ (tag, t)
} else {
- bx.sub(tag, bx.cx().const_uint_big(niche_llty, niche_start))
+ (tag_imm, bx.cx().immediate_backend_type(tag_op.layout))
};
+
+ let tag_size = tag_scalar.size(bx.cx());
+ let max_unsigned = tag_size.unsigned_int_max();
+ let max_signed = tag_size.signed_int_max() as u128;
+ let min_signed = max_signed + 1;
let relative_max = niche_variants.end().as_u32() - niche_variants.start().as_u32();
- let is_niche = if relative_max == 0 {
- // Avoid calling `const_uint`, which wouldn't work for pointers.
- // Also use canonical == 0 instead of non-canonical u<= 0.
- // FIXME(eddyb) check the actual primitive type here.
- bx.icmp(IntPredicate::IntEQ, relative_discr, bx.cx().const_null(niche_llty))
+ let niche_end = niche_start.wrapping_add(relative_max as u128) & max_unsigned;
+ let range = tag_scalar.valid_range(bx.cx());
+
+ let sle = |lhs: u128, rhs: u128| -> bool {
+ // Signed and unsigned comparisons give the same results,
+ // except that in signed comparisons an integer with the
+ // sign bit set is less than one with the sign bit clear.
+ // Toggle the sign bit to do a signed comparison.
+ (lhs ^ min_signed) <= (rhs ^ min_signed)
+ };
+
+ // We have a subrange `niche_start..=niche_end` inside `range`.
+ // If the value of the tag is inside this subrange, it's a
+ // "niche value", an increment of the discriminant. Otherwise it
+ // indicates the untagged variant.
+ // A general algorithm to extract the discriminant from the tag
+ // is:
+ // relative_tag = tag - niche_start
+ // is_niche = relative_tag <= (ule) relative_max
+ // discr = if is_niche {
+ // cast(relative_tag) + niche_variants.start()
+ // } else {
+ // untagged_variant
+ // }
+ // However, we will likely be able to emit simpler code.
+
+ // Find the least and greatest values in `range`, considered
+ // both as signed and unsigned.
+ let (low_unsigned, high_unsigned) = if range.start <= range.end {
+ (range.start, range.end)
} else {
- let relative_max = bx.cx().const_uint(niche_llty, relative_max as u64);
- bx.icmp(IntPredicate::IntULE, relative_discr, relative_max)
+ (0, max_unsigned)
+ };
+ let (low_signed, high_signed) = if sle(range.start, range.end) {
+ (range.start, range.end)
+ } else {
+ (min_signed, max_signed)
};
- // NOTE(eddyb) this addition needs to be performed on the final
- // type, in case the niche itself can't represent all variant
- // indices (e.g. `u8` niche with more than `256` variants,
- // but enough uninhabited variants so that the remaining variants
- // fit in the niche).
- // In other words, `niche_variants.end - niche_variants.start`
- // is representable in the niche, but `niche_variants.end`
- // might not be, in extreme cases.
- let niche_discr = {
- let relative_discr = if relative_max == 0 {
- // HACK(eddyb) since we have only one niche, we know which
- // one it is, and we can avoid having a dynamic value here.
- bx.cx().const_uint(cast_to, 0)
+ let niches_ule = niche_start <= niche_end;
+ let niches_sle = sle(niche_start, niche_end);
+ let cast_smaller = cast_to_size <= tag_size;
+
+ // In the algorithm above, we can change
+ // cast(relative_tag) + niche_variants.start()
+ // into
+ // cast(tag + (niche_variants.start() - niche_start))
+ // if either the casted type is no larger than the original
+ // type, or if the niche values are contiguous (in either the
+ // signed or unsigned sense).
+ let can_incr = cast_smaller || niches_ule || niches_sle;
+
+ let data_for_boundary_niche = || -> Option<(IntPredicate, u128)> {
+ if !can_incr {
+ None
+ } else if niche_start == low_unsigned {
+ Some((IntPredicate::IntULE, niche_end))
+ } else if niche_end == high_unsigned {
+ Some((IntPredicate::IntUGE, niche_start))
+ } else if niche_start == low_signed {
+ Some((IntPredicate::IntSLE, niche_end))
+ } else if niche_end == high_signed {
+ Some((IntPredicate::IntSGE, niche_start))
} else {
- bx.intcast(relative_discr, cast_to, false)
+ None
+ }
+ };
+
+ let (is_niche, tagged_discr, delta) = if relative_max == 0 {
+ // Best case scenario: only one tagged variant. This will
+ // likely become just a comparison and a jump.
+ // The algorithm is:
+ // is_niche = tag == niche_start
+ // discr = if is_niche {
+ // niche_start
+ // } else {
+ // untagged_variant
+ // }
+ let niche_start = bx.cx().const_uint_big(tag_llty, niche_start);
+ let is_niche = bx.icmp(IntPredicate::IntEQ, tag, niche_start);
+ let tagged_discr =
+ bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64);
+ (is_niche, tagged_discr, 0)
+ } else if let Some((predicate, constant)) = data_for_boundary_niche() {
+ // The niche values are either the lowest or the highest in
+ // `range`. We can avoid the first subtraction in the
+ // algorithm.
+ // The algorithm is now this:
+ // is_niche = tag <= niche_end
+ // discr = if is_niche {
+ // cast(tag + (niche_variants.start() - niche_start))
+ // } else {
+ // untagged_variant
+ // }
+ // (the first line may instead be tag >= niche_start,
+ // and may be a signed or unsigned comparison)
+ // The arithmetic must be done before the cast, so we can
+ // have the correct wrapping behavior. See issue #104519 for
+ // the consequences of getting this wrong.
+ let is_niche =
+ bx.icmp(predicate, tag, bx.cx().const_uint_big(tag_llty, constant));
+ let delta = (niche_variants.start().as_u32() as u128).wrapping_sub(niche_start);
+ let incr_tag = if delta == 0 {
+ tag
+ } else {
+ bx.add(tag, bx.cx().const_uint_big(tag_llty, delta))
};
- bx.add(
+
+ let cast_tag = if cast_smaller {
+ bx.intcast(incr_tag, cast_to, false)
+ } else if niches_ule {
+ bx.zext(incr_tag, cast_to)
+ } else {
+ bx.sext(incr_tag, cast_to)
+ };
+
+ (is_niche, cast_tag, 0)
+ } else {
+ // The special cases don't apply, so we'll have to go with
+ // the general algorithm.
+ let relative_discr = bx.sub(tag, bx.cx().const_uint_big(tag_llty, niche_start));
+ let cast_tag = bx.intcast(relative_discr, cast_to, false);
+ let is_niche = bx.icmp(
+ IntPredicate::IntULE,
relative_discr,
- bx.cx().const_uint(cast_to, niche_variants.start().as_u32() as u64),
- )
+ bx.cx().const_uint(tag_llty, relative_max as u64),
+ );
+ (is_niche, cast_tag, niche_variants.start().as_u32() as u128)
+ };
+
+ let tagged_discr = if delta == 0 {
+ tagged_discr
+ } else {
+ bx.add(tagged_discr, bx.cx().const_uint_big(cast_to, delta))
};
- bx.select(
+ let discr = bx.select(
is_niche,
- niche_discr,
+ tagged_discr,
bx.cx().const_uint(cast_to, untagged_variant.as_u32() as u64),
- )
+ );
+
+ // In principle we could insert assumes on the possible range of `discr`, but
+ // currently in LLVM this seems to be a pessimization.
+
+ discr
}
}
}
diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
index 4aab31fbf..9ad96f7a4 100644
--- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
@@ -18,17 +18,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
#[instrument(level = "trace", skip(self, bx))]
pub fn codegen_rvalue(
&mut self,
- mut bx: Bx,
+ bx: &mut Bx,
dest: PlaceRef<'tcx, Bx::Value>,
rvalue: &mir::Rvalue<'tcx>,
- ) -> Bx {
+ ) {
match *rvalue {
mir::Rvalue::Use(ref operand) => {
- let cg_operand = self.codegen_operand(&mut bx, operand);
+ let cg_operand = self.codegen_operand(bx, operand);
// FIXME: consider not copying constants through stack. (Fixable by codegen'ing
// constants into `OperandValue::Ref`; why don’t we do that yet if we don’t?)
- cg_operand.val.store(&mut bx, dest);
- bx
+ cg_operand.val.store(bx, dest);
}
mir::Rvalue::Cast(mir::CastKind::Pointer(PointerCast::Unsize), ref source, _) => {
@@ -37,16 +36,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if bx.cx().is_backend_scalar_pair(dest.layout) {
// Into-coerce of a thin pointer to a fat pointer -- just
// use the operand path.
- let (mut bx, temp) = self.codegen_rvalue_operand(bx, rvalue);
- temp.val.store(&mut bx, dest);
- return bx;
+ let temp = self.codegen_rvalue_operand(bx, rvalue);
+ temp.val.store(bx, dest);
+ return;
}
// Unsize of a nontrivial struct. I would prefer for
// this to be eliminated by MIR building, but
// `CoerceUnsized` can be passed by a where-clause,
// so the (generic) MIR may not be able to expand it.
- let operand = self.codegen_operand(&mut bx, source);
+ let operand = self.codegen_operand(bx, source);
match operand.val {
OperandValue::Pair(..) | OperandValue::Immediate(_) => {
// Unsize from an immediate structure. We don't
@@ -56,63 +55,62 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// index into the struct, and this case isn't
// important enough for it.
debug!("codegen_rvalue: creating ugly alloca");
- let scratch = PlaceRef::alloca(&mut bx, operand.layout);
- scratch.storage_live(&mut bx);
- operand.val.store(&mut bx, scratch);
- base::coerce_unsized_into(&mut bx, scratch, dest);
- scratch.storage_dead(&mut bx);
+ let scratch = PlaceRef::alloca(bx, operand.layout);
+ scratch.storage_live(bx);
+ operand.val.store(bx, scratch);
+ base::coerce_unsized_into(bx, scratch, dest);
+ scratch.storage_dead(bx);
}
OperandValue::Ref(llref, None, align) => {
let source = PlaceRef::new_sized_aligned(llref, operand.layout, align);
- base::coerce_unsized_into(&mut bx, source, dest);
+ base::coerce_unsized_into(bx, source, dest);
}
OperandValue::Ref(_, Some(_), _) => {
bug!("unsized coercion on an unsized rvalue");
}
}
- bx
}
mir::Rvalue::Repeat(ref elem, count) => {
- let cg_elem = self.codegen_operand(&mut bx, elem);
+ let cg_elem = self.codegen_operand(bx, elem);
// Do not generate the loop for zero-sized elements or empty arrays.
if dest.layout.is_zst() {
- return bx;
+ return;
}
if let OperandValue::Immediate(v) = cg_elem.val {
let zero = bx.const_usize(0);
- let start = dest.project_index(&mut bx, zero).llval;
+ let start = dest.project_index(bx, zero).llval;
let size = bx.const_usize(dest.layout.size.bytes());
// Use llvm.memset.p0i8.* to initialize all zero arrays
if bx.cx().const_to_opt_u128(v, false) == Some(0) {
let fill = bx.cx().const_u8(0);
bx.memset(start, fill, size, dest.align, MemFlags::empty());
- return bx;
+ return;
}
// Use llvm.memset.p0i8.* to initialize byte arrays
let v = bx.from_immediate(v);
if bx.cx().val_ty(v) == bx.cx().type_i8() {
bx.memset(start, v, size, dest.align, MemFlags::empty());
- return bx;
+ return;
}
}
let count =
self.monomorphize(count).eval_usize(bx.cx().tcx(), ty::ParamEnv::reveal_all());
- bx.write_operand_repeatedly(cg_elem, count, dest)
+ bx.write_operand_repeatedly(cg_elem, count, dest);
}
mir::Rvalue::Aggregate(ref kind, ref operands) => {
let (dest, active_field_index) = match **kind {
mir::AggregateKind::Adt(adt_did, variant_index, _, _, active_field_index) => {
- dest.codegen_set_discr(&mut bx, variant_index);
+ dest.codegen_set_discr(bx, variant_index);
if bx.tcx().adt_def(adt_did).is_enum() {
- (dest.project_downcast(&mut bx, variant_index), active_field_index)
+ (dest.project_downcast(bx, variant_index), active_field_index)
} else {
(dest, active_field_index)
}
@@ -120,37 +118,35 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
_ => (dest, None),
};
for (i, operand) in operands.iter().enumerate() {
- let op = self.codegen_operand(&mut bx, operand);
+ let op = self.codegen_operand(bx, operand);
// Do not generate stores and GEPis for zero-sized fields.
if !op.layout.is_zst() {
let field_index = active_field_index.unwrap_or(i);
let field = if let mir::AggregateKind::Array(_) = **kind {
let llindex = bx.cx().const_usize(field_index as u64);
- dest.project_index(&mut bx, llindex)
+ dest.project_index(bx, llindex)
} else {
- dest.project_field(&mut bx, field_index)
+ dest.project_field(bx, field_index)
};
- op.val.store(&mut bx, field);
+ op.val.store(bx, field);
}
}
- bx
}
_ => {
assert!(self.rvalue_creates_operand(rvalue, DUMMY_SP));
- let (mut bx, temp) = self.codegen_rvalue_operand(bx, rvalue);
- temp.val.store(&mut bx, dest);
- bx
+ let temp = self.codegen_rvalue_operand(bx, rvalue);
+ temp.val.store(bx, dest);
}
}
}
pub fn codegen_rvalue_unsized(
&mut self,
- mut bx: Bx,
+ bx: &mut Bx,
indirect_dest: PlaceRef<'tcx, Bx::Value>,
rvalue: &mir::Rvalue<'tcx>,
- ) -> Bx {
+ ) {
debug!(
"codegen_rvalue_unsized(indirect_dest.llval={:?}, rvalue={:?})",
indirect_dest.llval, rvalue
@@ -158,9 +154,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
match *rvalue {
mir::Rvalue::Use(ref operand) => {
- let cg_operand = self.codegen_operand(&mut bx, operand);
- cg_operand.val.store_unsized(&mut bx, indirect_dest);
- bx
+ let cg_operand = self.codegen_operand(bx, operand);
+ cg_operand.val.store_unsized(bx, indirect_dest);
}
_ => bug!("unsized assignment other than `Rvalue::Use`"),
@@ -169,9 +164,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn codegen_rvalue_operand(
&mut self,
- mut bx: Bx,
+ bx: &mut Bx,
rvalue: &mir::Rvalue<'tcx>,
- ) -> (Bx, OperandRef<'tcx, Bx::Value>) {
+ ) -> OperandRef<'tcx, Bx::Value> {
assert!(
self.rvalue_creates_operand(rvalue, DUMMY_SP),
"cannot codegen {:?} to operand",
@@ -180,7 +175,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
match *rvalue {
mir::Rvalue::Cast(ref kind, ref source, mir_cast_ty) => {
- let operand = self.codegen_operand(&mut bx, source);
+ let operand = self.codegen_operand(bx, source);
debug!("cast operand is {:?}", operand);
let cast = bx.cx().layout_of(self.monomorphize(mir_cast_ty));
@@ -245,7 +240,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
};
let (lldata, llextra) =
- base::unsize_ptr(&mut bx, lldata, operand.layout.ty, cast.ty, llextra);
+ base::unsize_ptr(bx, lldata, operand.layout.ty, cast.ty, llextra);
OperandValue::Pair(lldata, llextra)
}
mir::CastKind::Pointer(PointerCast::MutToConstPointer)
@@ -278,7 +273,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Pair(v, l) => (v, Some(l)),
};
let (lldata, llextra) =
- base::cast_to_dyn_star(&mut bx, lldata, operand.layout, cast.ty, llextra);
+ base::cast_to_dyn_star(bx, lldata, operand.layout, cast.ty, llextra);
OperandValue::Pair(lldata, llextra)
}
mir::CastKind::Pointer(
@@ -299,7 +294,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let ll_t_out = bx.cx().immediate_backend_type(cast);
if operand.layout.abi.is_uninhabited() {
let val = OperandValue::Immediate(bx.cx().const_undef(ll_t_out));
- return (bx, OperandRef { val, layout: cast });
+ return OperandRef { val, layout: cast };
}
let r_t_in =
CastTy::from_ty(operand.layout.ty).expect("bad input type for cast");
@@ -348,7 +343,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Immediate(newval)
}
};
- (bx, OperandRef { val, layout: cast })
+ OperandRef { val, layout: cast }
}
mir::Rvalue::Ref(_, bk, place) => {
@@ -361,10 +356,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
self.codegen_place_to_pointer(bx, place, mk_ref)
}
- mir::Rvalue::CopyForDeref(place) => {
- let operand = self.codegen_operand(&mut bx, &Operand::Copy(place));
- (bx, operand)
- }
+ mir::Rvalue::CopyForDeref(place) => self.codegen_operand(bx, &Operand::Copy(place)),
mir::Rvalue::AddressOf(mutability, place) => {
let mk_ptr = move |tcx: TyCtxt<'tcx>, ty: Ty<'tcx>| {
tcx.mk_ptr(ty::TypeAndMut { ty, mutbl: mutability })
@@ -373,23 +365,22 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
mir::Rvalue::Len(place) => {
- let size = self.evaluate_array_len(&mut bx, place);
- let operand = OperandRef {
+ let size = self.evaluate_array_len(bx, place);
+ OperandRef {
val: OperandValue::Immediate(size),
layout: bx.cx().layout_of(bx.tcx().types.usize),
- };
- (bx, operand)
+ }
}
mir::Rvalue::BinaryOp(op, box (ref lhs, ref rhs)) => {
- let lhs = self.codegen_operand(&mut bx, lhs);
- let rhs = self.codegen_operand(&mut bx, rhs);
+ let lhs = self.codegen_operand(bx, lhs);
+ let rhs = self.codegen_operand(bx, rhs);
let llresult = match (lhs.val, rhs.val) {
(
OperandValue::Pair(lhs_addr, lhs_extra),
OperandValue::Pair(rhs_addr, rhs_extra),
) => self.codegen_fat_ptr_binop(
- &mut bx,
+ bx,
op,
lhs_addr,
lhs_extra,
@@ -399,22 +390,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
),
(OperandValue::Immediate(lhs_val), OperandValue::Immediate(rhs_val)) => {
- self.codegen_scalar_binop(&mut bx, op, lhs_val, rhs_val, lhs.layout.ty)
+ self.codegen_scalar_binop(bx, op, lhs_val, rhs_val, lhs.layout.ty)
}
_ => bug!(),
};
- let operand = OperandRef {
+ OperandRef {
val: OperandValue::Immediate(llresult),
layout: bx.cx().layout_of(op.ty(bx.tcx(), lhs.layout.ty, rhs.layout.ty)),
- };
- (bx, operand)
+ }
}
mir::Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
- let lhs = self.codegen_operand(&mut bx, lhs);
- let rhs = self.codegen_operand(&mut bx, rhs);
+ let lhs = self.codegen_operand(bx, lhs);
+ let rhs = self.codegen_operand(bx, rhs);
let result = self.codegen_scalar_checked_binop(
- &mut bx,
+ bx,
op,
lhs.immediate(),
rhs.immediate(),
@@ -422,13 +412,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
);
let val_ty = op.ty(bx.tcx(), lhs.layout.ty, rhs.layout.ty);
let operand_ty = bx.tcx().intern_tup(&[val_ty, bx.tcx().types.bool]);
- let operand = OperandRef { val: result, layout: bx.cx().layout_of(operand_ty) };
-
- (bx, operand)
+ OperandRef { val: result, layout: bx.cx().layout_of(operand_ty) }
}
mir::Rvalue::UnaryOp(op, ref operand) => {
- let operand = self.codegen_operand(&mut bx, operand);
+ let operand = self.codegen_operand(bx, operand);
let lloperand = operand.immediate();
let is_float = operand.layout.ty.is_floating_point();
let llval = match op {
@@ -441,22 +429,17 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
}
};
- (bx, OperandRef { val: OperandValue::Immediate(llval), layout: operand.layout })
+ OperandRef { val: OperandValue::Immediate(llval), layout: operand.layout }
}
mir::Rvalue::Discriminant(ref place) => {
let discr_ty = rvalue.ty(self.mir, bx.tcx());
let discr_ty = self.monomorphize(discr_ty);
- let discr = self
- .codegen_place(&mut bx, place.as_ref())
- .codegen_get_discr(&mut bx, discr_ty);
- (
- bx,
- OperandRef {
- val: OperandValue::Immediate(discr),
- layout: self.cx.layout_of(discr_ty),
- },
- )
+ let discr = self.codegen_place(bx, place.as_ref()).codegen_get_discr(bx, discr_ty);
+ OperandRef {
+ val: OperandValue::Immediate(discr),
+ layout: self.cx.layout_of(discr_ty),
+ }
}
mir::Rvalue::NullaryOp(null_op, ty) => {
@@ -469,36 +452,27 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
let val = bx.cx().const_usize(val);
let tcx = self.cx.tcx();
- (
- bx,
- OperandRef {
- val: OperandValue::Immediate(val),
- layout: self.cx.layout_of(tcx.types.usize),
- },
- )
+ OperandRef {
+ val: OperandValue::Immediate(val),
+ layout: self.cx.layout_of(tcx.types.usize),
+ }
}
mir::Rvalue::ThreadLocalRef(def_id) => {
assert!(bx.cx().tcx().is_static(def_id));
let static_ = bx.get_static(def_id);
let layout = bx.layout_of(bx.cx().tcx().static_ptr_ty(def_id));
- let operand = OperandRef::from_immediate_or_packed_pair(&mut bx, static_, layout);
- (bx, operand)
- }
- mir::Rvalue::Use(ref operand) => {
- let operand = self.codegen_operand(&mut bx, operand);
- (bx, operand)
+ OperandRef::from_immediate_or_packed_pair(bx, static_, layout)
}
+ mir::Rvalue::Use(ref operand) => self.codegen_operand(bx, operand),
mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) => {
// According to `rvalue_creates_operand`, only ZST
// aggregate rvalues are allowed to be operands.
let ty = rvalue.ty(self.mir, self.cx.tcx());
- let operand =
- OperandRef::new_zst(&mut bx, self.cx.layout_of(self.monomorphize(ty)));
- (bx, operand)
+ OperandRef::new_zst(bx, self.cx.layout_of(self.monomorphize(ty)))
}
mir::Rvalue::ShallowInitBox(ref operand, content_ty) => {
- let operand = self.codegen_operand(&mut bx, operand);
+ let operand = self.codegen_operand(bx, operand);
let lloperand = operand.immediate();
let content_ty = self.monomorphize(content_ty);
@@ -506,8 +480,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let llty_ptr = bx.cx().backend_type(box_layout);
let val = bx.pointercast(lloperand, llty_ptr);
- let operand = OperandRef { val: OperandValue::Immediate(val), layout: box_layout };
- (bx, operand)
+ OperandRef { val: OperandValue::Immediate(val), layout: box_layout }
}
}
}
@@ -531,11 +504,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
/// Codegen an `Rvalue::AddressOf` or `Rvalue::Ref`
fn codegen_place_to_pointer(
&mut self,
- mut bx: Bx,
+ bx: &mut Bx,
place: mir::Place<'tcx>,
mk_ptr_ty: impl FnOnce(TyCtxt<'tcx>, Ty<'tcx>) -> Ty<'tcx>,
- ) -> (Bx, OperandRef<'tcx, Bx::Value>) {
- let cg_place = self.codegen_place(&mut bx, place.as_ref());
+ ) -> OperandRef<'tcx, Bx::Value> {
+ let cg_place = self.codegen_place(bx, place.as_ref());
let ty = cg_place.layout.ty;
@@ -546,7 +519,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
} else {
OperandValue::Pair(cg_place.llval, cg_place.llextra.unwrap())
};
- (bx, OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) })
+ OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) }
}
pub fn codegen_scalar_binop(
diff --git a/compiler/rustc_codegen_ssa/src/mir/statement.rs b/compiler/rustc_codegen_ssa/src/mir/statement.rs
index 1db0fb3a6..19452c8cd 100644
--- a/compiler/rustc_codegen_ssa/src/mir/statement.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/statement.rs
@@ -8,8 +8,8 @@ use crate::traits::*;
impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
#[instrument(level = "debug", skip(self, bx))]
- pub fn codegen_statement(&mut self, mut bx: Bx, statement: &mir::Statement<'tcx>) -> Bx {
- self.set_debug_loc(&mut bx, statement.source_info);
+ pub fn codegen_statement(&mut self, bx: &mut Bx, statement: &mir::Statement<'tcx>) {
+ self.set_debug_loc(bx, statement.source_info);
match statement.kind {
mir::StatementKind::Assign(box (ref place, ref rvalue)) => {
if let Some(index) = place.as_local() {
@@ -19,10 +19,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
self.codegen_rvalue_unsized(bx, cg_indirect_dest, rvalue)
}
LocalRef::Operand(None) => {
- let (mut bx, operand) = self.codegen_rvalue_operand(bx, rvalue);
+ let operand = self.codegen_rvalue_operand(bx, rvalue);
self.locals[index] = LocalRef::Operand(Some(operand));
- self.debug_introduce_local(&mut bx, index);
- bx
+ self.debug_introduce_local(bx, index);
}
LocalRef::Operand(Some(op)) => {
if !op.layout.is_zst() {
@@ -35,59 +34,52 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// If the type is zero-sized, it's already been set here,
// but we still need to make sure we codegen the operand
- self.codegen_rvalue_operand(bx, rvalue).0
+ self.codegen_rvalue_operand(bx, rvalue);
}
}
} else {
- let cg_dest = self.codegen_place(&mut bx, place.as_ref());
- self.codegen_rvalue(bx, cg_dest, rvalue)
+ let cg_dest = self.codegen_place(bx, place.as_ref());
+ self.codegen_rvalue(bx, cg_dest, rvalue);
}
}
mir::StatementKind::SetDiscriminant { box ref place, variant_index } => {
- self.codegen_place(&mut bx, place.as_ref())
- .codegen_set_discr(&mut bx, variant_index);
- bx
+ self.codegen_place(bx, place.as_ref()).codegen_set_discr(bx, variant_index);
}
mir::StatementKind::Deinit(..) => {
// For now, don't codegen this to anything. In the future it may be worth
// experimenting with what kind of information we can emit to LLVM without hurting
// perf here
- bx
}
mir::StatementKind::StorageLive(local) => {
if let LocalRef::Place(cg_place) = self.locals[local] {
- cg_place.storage_live(&mut bx);
+ cg_place.storage_live(bx);
} else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] {
- cg_indirect_place.storage_live(&mut bx);
+ cg_indirect_place.storage_live(bx);
}
- bx
}
mir::StatementKind::StorageDead(local) => {
if let LocalRef::Place(cg_place) = self.locals[local] {
- cg_place.storage_dead(&mut bx);
+ cg_place.storage_dead(bx);
} else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] {
- cg_indirect_place.storage_dead(&mut bx);
+ cg_indirect_place.storage_dead(bx);
}
- bx
}
mir::StatementKind::Coverage(box ref coverage) => {
- self.codegen_coverage(&mut bx, coverage.clone(), statement.source_info.scope);
- bx
+ self.codegen_coverage(bx, coverage.clone(), statement.source_info.scope);
}
mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(ref op)) => {
- let op_val = self.codegen_operand(&mut bx, op);
+ let op_val = self.codegen_operand(bx, op);
bx.assume(op_val.immediate());
- bx
}
mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
mir::CopyNonOverlapping { ref count, ref src, ref dst },
)) => {
- let dst_val = self.codegen_operand(&mut bx, dst);
- let src_val = self.codegen_operand(&mut bx, src);
- let count = self.codegen_operand(&mut bx, count).immediate();
+ let dst_val = self.codegen_operand(bx, dst);
+ let src_val = self.codegen_operand(bx, src);
+ let count = self.codegen_operand(bx, count).immediate();
let pointee_layout = dst_val
.layout
- .pointee_info_at(&bx, rustc_target::abi::Size::ZERO)
+ .pointee_info_at(bx, rustc_target::abi::Size::ZERO)
.expect("Expected pointer");
let bytes = bx.mul(count, bx.const_usize(pointee_layout.size.bytes()));
@@ -95,12 +87,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let dst = dst_val.immediate();
let src = src_val.immediate();
bx.memcpy(dst, align, src, align, bytes, crate::MemFlags::empty());
- bx
}
mir::StatementKind::FakeRead(..)
| mir::StatementKind::Retag { .. }
| mir::StatementKind::AscribeUserType(..)
- | mir::StatementKind::Nop => bx,
+ | mir::StatementKind::Nop => {}
}
}
}
diff --git a/compiler/rustc_codegen_ssa/src/mono_item.rs b/compiler/rustc_codegen_ssa/src/mono_item.rs
index 5006a2157..27da33581 100644
--- a/compiler/rustc_codegen_ssa/src/mono_item.rs
+++ b/compiler/rustc_codegen_ssa/src/mono_item.rs
@@ -40,12 +40,12 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
.iter()
.map(|(op, op_sp)| match *op {
hir::InlineAsmOperand::Const { ref anon_const } => {
- let anon_const_def_id =
- cx.tcx().hir().local_def_id(anon_const.hir_id).to_def_id();
- let const_value =
- cx.tcx().const_eval_poly(anon_const_def_id).unwrap_or_else(
- |_| span_bug!(*op_sp, "asm const cannot be resolved"),
- );
+ let const_value = cx
+ .tcx()
+ .const_eval_poly(anon_const.def_id.to_def_id())
+ .unwrap_or_else(|_| {
+ span_bug!(*op_sp, "asm const cannot be resolved")
+ });
let ty = cx
.tcx()
.typeck_body(anon_const.body)
diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs
index 83407ee8f..301683e8e 100644
--- a/compiler/rustc_codegen_ssa/src/target_features.rs
+++ b/compiler/rustc_codegen_ssa/src/target_features.rs
@@ -179,6 +179,7 @@ const X86_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("f16c", Some(sym::f16c_target_feature)),
("fma", None),
("fxsr", None),
+ ("gfni", Some(sym::avx512_target_feature)),
("lzcnt", None),
("movbe", Some(sym::movbe_target_feature)),
("pclmulqdq", None),
@@ -195,6 +196,8 @@ const X86_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("sse4a", Some(sym::sse4a_target_feature)),
("ssse3", None),
("tbm", Some(sym::tbm_target_feature)),
+ ("vaes", Some(sym::avx512_target_feature)),
+ ("vpclmulqdq", Some(sym::avx512_target_feature)),
("xsave", None),
("xsavec", None),
("xsaveopt", None),
@@ -212,6 +215,7 @@ const HEXAGON_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
const POWERPC_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
// tidy-alphabetical-start
("altivec", Some(sym::powerpc_target_feature)),
+ ("power10-vector", Some(sym::powerpc_target_feature)),
("power8-altivec", Some(sym::powerpc_target_feature)),
("power8-vector", Some(sym::powerpc_target_feature)),
("power9-altivec", Some(sym::powerpc_target_feature)),
@@ -267,6 +271,7 @@ const WASM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
// tidy-alphabetical-start
("atomics", Some(sym::wasm_target_feature)),
("bulk-memory", Some(sym::wasm_target_feature)),
+ ("multivalue", Some(sym::wasm_target_feature)),
("mutable-globals", Some(sym::wasm_target_feature)),
("nontrapping-fptoint", Some(sym::wasm_target_feature)),
("reference-types", Some(sym::wasm_target_feature)),
diff --git a/compiler/rustc_codegen_ssa/src/traits/backend.rs b/compiler/rustc_codegen_ssa/src/traits/backend.rs
index 87e347c61..5c35070ea 100644
--- a/compiler/rustc_codegen_ssa/src/traits/backend.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/backend.rs
@@ -119,7 +119,7 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se
tcx: TyCtxt<'tcx>,
module_name: &str,
kind: AllocatorKind,
- has_alloc_error_handler: bool,
+ alloc_error_handler_kind: AllocatorKind,
) -> Self::Module;
/// This generates the codegen unit and returns it along with
/// a `u64` giving an estimate of the unit's processing cost.
diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs
index 01408f39f..bc679a5dc 100644
--- a/compiler/rustc_codegen_ssa/src/traits/builder.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs
@@ -151,11 +151,11 @@ pub trait BuilderMethods<'a, 'tcx>:
/// Called for Rvalue::Repeat when the elem is neither a ZST nor optimizable using memset.
fn write_operand_repeatedly(
- self,
+ &mut self,
elem: OperandRef<'tcx, Self::Value>,
count: u64,
dest: PlaceRef<'tcx, Self::Value>,
- ) -> Self;
+ );
fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
fn nonnull_metadata(&mut self, load: Self::Value);
diff --git a/compiler/rustc_codegen_ssa/src/traits/debuginfo.rs b/compiler/rustc_codegen_ssa/src/traits/debuginfo.rs
index f310789d1..63fecaf34 100644
--- a/compiler/rustc_codegen_ssa/src/traits/debuginfo.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/debuginfo.rs
@@ -6,6 +6,8 @@ use rustc_span::{SourceFile, Span, Symbol};
use rustc_target::abi::call::FnAbi;
use rustc_target::abi::Size;
+use std::ops::Range;
+
pub trait DebugInfoMethods<'tcx>: BackendTypes {
fn create_vtable_debuginfo(
&self,
@@ -72,6 +74,9 @@ pub trait DebugInfoBuilderMethods: BackendTypes {
direct_offset: Size,
// NB: each offset implies a deref (i.e. they're steps in a pointer chain).
indirect_offsets: &[Size],
+ // Byte range in the `dbg_var` covered by this fragment,
+ // if this is a fragment of a composite `DIVariable`.
+ fragment: Option<Range<Size>>,
);
fn set_dbg_loc(&mut self, dbg_loc: Self::DILocation);
fn insert_reference_to_gdb_debug_scripts_section_global(&mut self);
diff --git a/compiler/rustc_codegen_ssa/src/traits/type_.rs b/compiler/rustc_codegen_ssa/src/traits/type_.rs
index bdc6a91cf..86481d5d7 100644
--- a/compiler/rustc_codegen_ssa/src/traits/type_.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/type_.rs
@@ -22,6 +22,7 @@ pub trait BaseTypeMethods<'tcx>: Backend<'tcx> {
fn type_f32(&self) -> Self::Type;
fn type_f64(&self) -> Self::Type;
+ fn type_array(&self, ty: Self::Type, len: u64) -> Self::Type;
fn type_func(&self, args: &[Self::Type], ret: Self::Type) -> Self::Type;
fn type_struct(&self, els: &[Self::Type], packed: bool) -> Self::Type;
fn type_kind(&self, ty: Self::Type) -> TypeKind;
diff --git a/compiler/rustc_const_eval/Cargo.toml b/compiler/rustc_const_eval/Cargo.toml
index e09a6d1d6..51489e293 100644
--- a/compiler/rustc_const_eval/Cargo.toml
+++ b/compiler/rustc_const_eval/Cargo.toml
@@ -7,6 +7,7 @@ edition = "2021"
[dependencies]
tracing = "0.1"
+either = "1"
rustc_apfloat = { path = "../rustc_apfloat" }
rustc_ast = { path = "../rustc_ast" }
rustc_attr = { path = "../rustc_attr" }
diff --git a/compiler/rustc_const_eval/src/const_eval/error.rs b/compiler/rustc_const_eval/src/const_eval/error.rs
index 4977a5d6b..c60d6e4fe 100644
--- a/compiler/rustc_const_eval/src/const_eval/error.rs
+++ b/compiler/rustc_const_eval/src/const_eval/error.rs
@@ -55,7 +55,7 @@ impl Error for ConstEvalErrKind {}
/// When const-evaluation errors, this type is constructed with the resulting information,
/// and then used to emit the error as a lint or hard error.
#[derive(Debug)]
-pub struct ConstEvalErr<'tcx> {
+pub(super) struct ConstEvalErr<'tcx> {
pub span: Span,
pub error: InterpError<'tcx>,
pub stacktrace: Vec<FrameInfo<'tcx>>,
@@ -82,8 +82,8 @@ impl<'tcx> ConstEvalErr<'tcx> {
ConstEvalErr { error: error.into_kind(), stacktrace, span }
}
- pub fn report_as_error(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
- self.struct_error(tcx, message, |_| {})
+ pub(super) fn report(&self, tcx: TyCtxtAt<'tcx>, message: &str) -> ErrorHandled {
+ self.report_decorated(tcx, message, |_| {})
}
/// Create a diagnostic for this const eval error.
@@ -95,7 +95,7 @@ impl<'tcx> ConstEvalErr<'tcx> {
/// If `lint_root.is_some()` report it as a lint, else report it as a hard error.
/// (Except that for some errors, we ignore all that -- see `must_error` below.)
#[instrument(skip(self, tcx, decorate), level = "debug")]
- pub fn struct_error(
+ pub(super) fn report_decorated(
&self,
tcx: TyCtxtAt<'tcx>,
message: &str,
@@ -123,14 +123,14 @@ impl<'tcx> ConstEvalErr<'tcx> {
// Helper closure to print duplicated lines.
let mut flush_last_line = |last_frame, times| {
if let Some((line, span)) = last_frame {
- err.span_label(span, &line);
+ err.span_note(span, &line);
// Don't print [... additional calls ...] if the number of lines is small
if times < 3 {
for _ in 0..times {
- err.span_label(span, &line);
+ err.span_note(span, &line);
}
} else {
- err.span_label(
+ err.span_note(
span,
format!("[... {} additional calls {} ...]", times, &line),
);
diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
index 1b1052fdf..c27790d88 100644
--- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
+++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs
@@ -1,10 +1,7 @@
-use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr};
-use crate::interpret::eval_nullary_intrinsic;
-use crate::interpret::{
- intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
- Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
- RefTracking, StackPopCleanup,
-};
+use std::borrow::Cow;
+use std::convert::TryInto;
+
+use either::{Left, Right};
use rustc_hir::def::DefKind;
use rustc_middle::mir;
@@ -16,8 +13,14 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::source_map::Span;
use rustc_target::abi::{self, Abi};
-use std::borrow::Cow;
-use std::convert::TryInto;
+
+use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr};
+use crate::interpret::eval_nullary_intrinsic;
+use crate::interpret::{
+ intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
+ Immediate, InternKind, InterpCx, InterpError, InterpResult, MPlaceTy, MemoryKind, OpTy,
+ RefTracking, StackPopCleanup,
+};
const NOTE_ON_UNDEFINED_BEHAVIOR_ERROR: &str = "The rules on what exactly is undefined behavior aren't clear, \
so this check might be overzealous. Please open an issue on the rustc \
@@ -46,7 +49,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.tcx.def_kind(cid.instance.def_id())
);
let layout = ecx.layout_of(body.bound_return_ty().subst(tcx, cid.instance.substs))?;
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
trace!(
@@ -63,7 +66,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
)?;
// The main interpreter loop.
- ecx.run()?;
+ while ecx.step()? {}
// Intern the result
let intern_kind = if cid.promoted.is_some() {
@@ -135,14 +138,14 @@ pub(super) fn op_to_const<'tcx>(
_ => false,
};
let immediate = if try_as_immediate {
- Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
+ Right(ecx.read_immediate(op).expect("normalization works on validated constants"))
} else {
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
// When we come back from raw const eval, we are always by-ref. The only way our op here is
// by-val is if we are in destructure_mir_constant, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such.
- op.try_as_mplace()
+ op.as_mplace_or_imm()
};
debug!(?immediate);
@@ -168,9 +171,9 @@ pub(super) fn op_to_const<'tcx>(
}
};
match immediate {
- Ok(ref mplace) => to_const_value(mplace),
+ Left(ref mplace) => to_const_value(mplace),
// see comment on `let try_as_immediate` above
- Err(imm) => match *imm {
+ Right(imm) => match *imm {
_ if imm.layout.is_zst() => ConstValue::ZeroSized,
Immediate::Scalar(x) => ConstValue::Scalar(x),
Immediate::ScalarPair(a, b) => {
@@ -255,7 +258,7 @@ pub fn eval_to_const_value_raw_provider<'tcx>(
return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| {
let span = tcx.def_span(def_id);
let error = ConstEvalErr { error: error.into_kind(), stacktrace: vec![], span };
- error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic")
+ error.report(tcx.at(span), "could not evaluate nullary intrinsic")
});
}
@@ -333,7 +336,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
}
};
- Err(err.report_as_error(ecx.tcx.at(err.span), &msg))
+ Err(err.report(ecx.tcx.at(err.span), &msg))
}
Ok(mplace) => {
// Since evaluation had no errors, validate the resulting constant.
@@ -358,7 +361,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
if let Err(error) = validation {
// Validation failed, report an error. This is always a hard error.
let err = ConstEvalErr::new(&ecx, error, None);
- Err(err.struct_error(
+ Err(err.report_decorated(
ecx.tcx,
"it is undefined behavior to use this value",
|diag| {
diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs
index 35d58d2f6..3dfded2d9 100644
--- a/compiler/rustc_const_eval/src/const_eval/machine.rs
+++ b/compiler/rustc_const_eval/src/const_eval/machine.rs
@@ -1,8 +1,12 @@
use rustc_hir::def::DefKind;
+use rustc_hir::LangItem;
use rustc_middle::mir;
+use rustc_middle::mir::interpret::PointerArithmetic;
+use rustc_middle::ty::layout::FnAbiOf;
use rustc_middle::ty::{self, Ty, TyCtxt};
use std::borrow::Borrow;
use std::hash::Hash;
+use std::ops::ControlFlow;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::fx::IndexEntry;
@@ -17,58 +21,12 @@ use rustc_target::abi::{Align, Size};
use rustc_target::spec::abi::Abi as CallAbi;
use crate::interpret::{
- self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult,
- OpTy, PlaceTy, Pointer, Scalar, StackPopUnwind,
+ self, compile_time_machine, AllocId, ConstAllocation, FnVal, Frame, ImmTy, InterpCx,
+ InterpResult, OpTy, PlaceTy, Pointer, Scalar, StackPopUnwind,
};
use super::error::*;
-impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
- /// "Intercept" a function call to a panic-related function
- /// because we have something special to do for it.
- /// If this returns successfully (`Ok`), the function should just be evaluated normally.
- fn hook_special_const_fn(
- &mut self,
- instance: ty::Instance<'tcx>,
- args: &[OpTy<'tcx>],
- ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
- // All `#[rustc_do_not_const_check]` functions should be hooked here.
- let def_id = instance.def_id();
-
- if Some(def_id) == self.tcx.lang_items().panic_display()
- || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
- {
- // &str or &&str
- assert!(args.len() == 1);
-
- let mut msg_place = self.deref_operand(&args[0])?;
- while msg_place.layout.ty.is_ref() {
- msg_place = self.deref_operand(&msg_place.into())?;
- }
-
- let msg = Symbol::intern(self.read_str(&msg_place)?);
- let span = self.find_closest_untracked_caller_location();
- let (file, line, col) = self.location_triple_for_span(span);
- return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
- } else if Some(def_id) == self.tcx.lang_items().panic_fmt() {
- // For panic_fmt, call const_panic_fmt instead.
- if let Some(const_panic_fmt) = self.tcx.lang_items().const_panic_fmt() {
- return Ok(Some(
- ty::Instance::resolve(
- *self.tcx,
- ty::ParamEnv::reveal_all(),
- const_panic_fmt,
- self.tcx.intern_substs(&[]),
- )
- .unwrap()
- .unwrap(),
- ));
- }
- }
- Ok(None)
- }
-}
-
/// Extra machine state for CTFE, and the Machine instance
pub struct CompileTimeInterpreter<'mir, 'tcx> {
/// For now, the number of terminators that can be evaluated before we throw a resource
@@ -191,6 +149,125 @@ impl interpret::MayLeak for ! {
}
impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
+ /// "Intercept" a function call, because we have something special to do for it.
+ /// All `#[rustc_do_not_const_check]` functions should be hooked here.
+ /// If this returns `Some` function, which may be `instance` or a different function with
+ /// compatible arguments, then evaluation should continue with that function.
+ /// If this returns `None`, the function call has been handled and the function has returned.
+ fn hook_special_const_fn(
+ &mut self,
+ instance: ty::Instance<'tcx>,
+ args: &[OpTy<'tcx>],
+ dest: &PlaceTy<'tcx>,
+ ret: Option<mir::BasicBlock>,
+ ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
+ let def_id = instance.def_id();
+
+ if Some(def_id) == self.tcx.lang_items().panic_display()
+ || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
+ {
+ // &str or &&str
+ assert!(args.len() == 1);
+
+ let mut msg_place = self.deref_operand(&args[0])?;
+ while msg_place.layout.ty.is_ref() {
+ msg_place = self.deref_operand(&msg_place.into())?;
+ }
+
+ let msg = Symbol::intern(self.read_str(&msg_place)?);
+ let span = self.find_closest_untracked_caller_location();
+ let (file, line, col) = self.location_triple_for_span(span);
+ return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
+ } else if Some(def_id) == self.tcx.lang_items().panic_fmt() {
+ // For panic_fmt, call const_panic_fmt instead.
+ let const_def_id = self.tcx.require_lang_item(LangItem::ConstPanicFmt, None);
+ let new_instance = ty::Instance::resolve(
+ *self.tcx,
+ ty::ParamEnv::reveal_all(),
+ const_def_id,
+ instance.substs,
+ )
+ .unwrap()
+ .unwrap();
+
+ return Ok(Some(new_instance));
+ } else if Some(def_id) == self.tcx.lang_items().align_offset_fn() {
+ // For align_offset, we replace the function call if the pointer has no address.
+ match self.align_offset(instance, args, dest, ret)? {
+ ControlFlow::Continue(()) => return Ok(Some(instance)),
+ ControlFlow::Break(()) => return Ok(None),
+ }
+ }
+ Ok(Some(instance))
+ }
+
+ /// `align_offset(ptr, target_align)` needs special handling in const eval, because the pointer
+ /// may not have an address.
+ ///
+ /// If `ptr` does have a known address, then we return `CONTINUE` and the function call should
+ /// proceed as normal.
+ ///
+ /// If `ptr` doesn't have an address, but its underlying allocation's alignment is at most
+ /// `target_align`, then we call the function again with an dummy address relative to the
+ /// allocation.
+ ///
+ /// If `ptr` doesn't have an address and `target_align` is stricter than the underlying
+ /// allocation's alignment, then we return `usize::MAX` immediately.
+ fn align_offset(
+ &mut self,
+ instance: ty::Instance<'tcx>,
+ args: &[OpTy<'tcx>],
+ dest: &PlaceTy<'tcx>,
+ ret: Option<mir::BasicBlock>,
+ ) -> InterpResult<'tcx, ControlFlow<()>> {
+ assert_eq!(args.len(), 2);
+
+ let ptr = self.read_pointer(&args[0])?;
+ let target_align = self.read_scalar(&args[1])?.to_machine_usize(self)?;
+
+ if !target_align.is_power_of_two() {
+ throw_ub_format!("`align_offset` called with non-power-of-two align: {}", target_align);
+ }
+
+ match self.ptr_try_get_alloc_id(ptr) {
+ Ok((alloc_id, offset, _extra)) => {
+ let (_size, alloc_align, _kind) = self.get_alloc_info(alloc_id);
+
+ if target_align <= alloc_align.bytes() {
+ // Extract the address relative to the allocation base that is definitely
+ // sufficiently aligned and call `align_offset` again.
+ let addr = ImmTy::from_uint(offset.bytes(), args[0].layout).into();
+ let align = ImmTy::from_uint(target_align, args[1].layout).into();
+ let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
+
+ // We replace the entire function call with a "tail call".
+ // Note that this happens before the frame of the original function
+ // is pushed on the stack.
+ self.eval_fn_call(
+ FnVal::Instance(instance),
+ (CallAbi::Rust, fn_abi),
+ &[addr, align],
+ /* with_caller_location = */ false,
+ dest,
+ ret,
+ StackPopUnwind::NotAllowed,
+ )?;
+ Ok(ControlFlow::BREAK)
+ } else {
+ // Not alignable in const, return `usize::MAX`.
+ let usize_max = Scalar::from_machine_usize(self.machine_usize_max(), self);
+ self.write_scalar(usize_max, dest)?;
+ self.return_to_block(ret)?;
+ Ok(ControlFlow::BREAK)
+ }
+ }
+ Err(_addr) => {
+ // The pointer has an address, continue with function call.
+ Ok(ControlFlow::CONTINUE)
+ }
+ }
+ }
+
/// See documentation on the `ptr_guaranteed_cmp` intrinsic.
fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
Ok(match (a, b) {
@@ -271,8 +348,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
instance: ty::Instance<'tcx>,
_abi: CallAbi,
args: &[OpTy<'tcx>],
- _dest: &PlaceTy<'tcx>,
- _ret: Option<mir::BasicBlock>,
+ dest: &PlaceTy<'tcx>,
+ ret: Option<mir::BasicBlock>,
_unwind: StackPopUnwind, // unwinding is not supported in consts
) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
debug!("find_mir_or_eval_fn: {:?}", instance);
@@ -291,7 +368,11 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
}
}
- if let Some(new_instance) = ecx.hook_special_const_fn(instance, args)? {
+ let Some(new_instance) = ecx.hook_special_const_fn(instance, args, dest, ret)? else {
+ return Ok(None);
+ };
+
+ if new_instance != instance {
// We call another const fn instead.
// However, we return the *original* instance to make backtraces work out
// (and we hope this does not confuse the FnAbi checks too much).
@@ -300,13 +381,14 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
new_instance,
_abi,
args,
- _dest,
- _ret,
+ dest,
+ ret,
_unwind,
)?
.map(|(body, _instance)| (body, instance)));
}
}
+
// This is a const fn. Call it.
Ok(Some((ecx.load_mir(instance.def, None)?, instance)))
}
diff --git a/compiler/rustc_const_eval/src/const_eval/mod.rs b/compiler/rustc_const_eval/src/const_eval/mod.rs
index 1c33e7845..01b2b4b5d 100644
--- a/compiler/rustc_const_eval/src/const_eval/mod.rs
+++ b/compiler/rustc_const_eval/src/const_eval/mod.rs
@@ -103,7 +103,7 @@ pub(crate) fn try_destructure_mir_constant<'tcx>(
) -> InterpResult<'tcx, mir::DestructuredConstant<'tcx>> {
trace!("destructure_mir_constant: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
- let op = ecx.const_to_op(&val, None)?;
+ let op = ecx.eval_mir_constant(&val, None, None)?;
// We go to `usize` as we cannot allocate anything bigger anyway.
let (field_count, variant, down) = match val.ty().kind() {
@@ -139,7 +139,7 @@ pub(crate) fn deref_mir_constant<'tcx>(
val: mir::ConstantKind<'tcx>,
) -> mir::ConstantKind<'tcx> {
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
- let op = ecx.const_to_op(&val, None).unwrap();
+ let op = ecx.eval_mir_constant(&val, None, None).unwrap();
let mplace = ecx.deref_operand(&op).unwrap();
if let Some(alloc_id) = mplace.ptr.provenance {
assert_eq!(
diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs
index a9063ad31..0b2809f1d 100644
--- a/compiler/rustc_const_eval/src/interpret/eval_context.rs
+++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs
@@ -2,10 +2,12 @@ use std::cell::Cell;
use std::fmt;
use std::mem;
+use either::{Either, Left, Right};
+
use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
use rustc_index::vec::IndexVec;
use rustc_middle::mir;
-use rustc_middle::mir::interpret::{InterpError, InvalidProgramInfo};
+use rustc_middle::mir::interpret::{ErrorHandled, InterpError, InvalidProgramInfo};
use rustc_middle::ty::layout::{
self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
TyAndLayout,
@@ -15,7 +17,7 @@ use rustc_middle::ty::{
};
use rustc_mir_dataflow::storage::always_storage_live_locals;
use rustc_session::Limit;
-use rustc_span::{Pos, Span};
+use rustc_span::Span;
use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
use super::{
@@ -23,7 +25,7 @@ use super::{
MemPlaceMeta, Memory, MemoryKind, Operand, Place, PlaceTy, PointerArithmetic, Provenance,
Scalar, StackPopJump,
};
-use crate::transform::validate::equal_up_to_regions;
+use crate::util;
pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// Stores the `Machine` instance.
@@ -121,13 +123,12 @@ pub struct Frame<'mir, 'tcx, Prov: Provenance = AllocId, Extra = ()> {
////////////////////////////////////////////////////////////////////////////////
// Current position within the function
////////////////////////////////////////////////////////////////////////////////
- /// If this is `Err`, we are not currently executing any particular statement in
+ /// If this is `Right`, we are not currently executing any particular statement in
/// this frame (can happen e.g. during frame initialization, and during unwinding on
/// frames without cleanup code).
- /// We basically abuse `Result` as `Either`.
///
/// Needs to be public because ConstProp does unspeakable things to it.
- pub loc: Result<mir::Location, Span>,
+ pub loc: Either<mir::Location, Span>,
}
/// What we store about a frame in an interpreter backtrace.
@@ -227,25 +228,24 @@ impl<'mir, 'tcx, Prov: Provenance> Frame<'mir, 'tcx, Prov> {
impl<'mir, 'tcx, Prov: Provenance, Extra> Frame<'mir, 'tcx, Prov, Extra> {
/// Get the current location within the Frame.
///
- /// If this is `Err`, we are not currently executing any particular statement in
+ /// If this is `Left`, we are not currently executing any particular statement in
/// this frame (can happen e.g. during frame initialization, and during unwinding on
/// frames without cleanup code).
- /// We basically abuse `Result` as `Either`.
///
/// Used by priroda.
- pub fn current_loc(&self) -> Result<mir::Location, Span> {
+ pub fn current_loc(&self) -> Either<mir::Location, Span> {
self.loc
}
/// Return the `SourceInfo` of the current instruction.
pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
- self.loc.ok().map(|loc| self.body.source_info(loc))
+ self.loc.left().map(|loc| self.body.source_info(loc))
}
pub fn current_span(&self) -> Span {
match self.loc {
- Ok(loc) => self.body.source_info(loc).span,
- Err(span) => span,
+ Left(loc) => self.body.source_info(loc).span,
+ Right(span) => span,
}
}
}
@@ -256,25 +256,13 @@ impl<'tcx> fmt::Display for FrameInfo<'tcx> {
if tcx.def_key(self.instance.def_id()).disambiguated_data.data
== DefPathData::ClosureExpr
{
- write!(f, "inside closure")?;
+ write!(f, "inside closure")
} else {
// Note: this triggers a `good_path_bug` state, which means that if we ever get here
// we must emit a diagnostic. We should never display a `FrameInfo` unless we
// actually want to emit a warning or error to the user.
- write!(f, "inside `{}`", self.instance)?;
- }
- if !self.span.is_dummy() {
- let sm = tcx.sess.source_map();
- let lo = sm.lookup_char_pos(self.span.lo());
- write!(
- f,
- " at {}:{}:{}",
- sm.filename_for_diagnostics(&lo.file.name),
- lo.line,
- lo.col.to_usize() + 1
- )?;
+ write!(f, "inside `{}`", self.instance)
}
- Ok(())
})
}
}
@@ -354,8 +342,8 @@ pub(super) fn mir_assign_valid_types<'tcx>(
// Type-changing assignments can happen when subtyping is used. While
// all normal lifetimes are erased, higher-ranked types with their
// late-bound lifetimes are still around and can lead to type
- // differences. So we compare ignoring lifetimes.
- if equal_up_to_regions(tcx, param_env, src.ty, dest.ty) {
+ // differences.
+ if util::is_subtype(tcx, param_env, src.ty, dest.ty) {
// Make sure the layout is equal, too -- just to be safe. Miri really
// needs layout equality. For performance reason we skip this check when
// the types are equal. Equal types *can* have different layouts when
@@ -572,7 +560,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
metadata: &MemPlaceMeta<M::Provenance>,
layout: &TyAndLayout<'tcx>,
) -> InterpResult<'tcx, Option<(Size, Align)>> {
- if !layout.is_unsized() {
+ if layout.is_sized() {
return Ok(Some((layout.size, layout.align.abi)));
}
match layout.ty.kind() {
@@ -598,7 +586,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// the last field). Can't have foreign types here, how would we
// adjust alignment and size for them?
let field = layout.field(self, layout.fields.count() - 1);
- let Some((unsized_size, unsized_align)) = self.size_and_align_of(metadata, &field)? else {
+ let Some((unsized_size, mut unsized_align)) = self.size_and_align_of(metadata, &field)? else {
// A field with an extern type. We don't know the actual dynamic size
// or the alignment.
return Ok(None);
@@ -614,6 +602,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Return the sum of sizes and max of aligns.
let size = sized_size + unsized_size; // `Size` addition
+ // Packed types ignore the alignment of their fields.
+ if let ty::Adt(def, _) = layout.ty.kind() {
+ if def.repr().packed() {
+ unsized_align = sized_align;
+ }
+ }
+
// Choose max of two known alignments (combined value must
// be aligned according to more restrictive of the two).
let align = sized_align.max(unsized_align);
@@ -669,10 +664,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> {
trace!("body: {:#?}", body);
+ // Clobber previous return place contents, nobody is supposed to be able to see them any more
+ // This also checks dereferenceable, but not align. We rely on all constructed places being
+ // sufficiently aligned (in particular we rely on `deref_operand` checking alignment).
+ self.write_uninit(return_place)?;
// first push a stack frame so we have access to the local substs
let pre_frame = Frame {
body,
- loc: Err(body.span), // Span used for errors caused during preamble.
+ loc: Right(body.span), // Span used for errors caused during preamble.
return_to_block,
return_place: return_place.clone(),
// empty local array, we fill it in below, after we are inside the stack frame and
@@ -689,12 +688,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
for ct in &body.required_consts {
let span = ct.span;
let ct = self.subst_from_current_frame_and_normalize_erasing_regions(ct.literal)?;
- self.const_to_op(&ct, None).map_err(|err| {
- // If there was an error, set the span of the current frame to this constant.
- // Avoiding doing this when evaluation succeeds.
- self.frame_mut().loc = Err(span);
- err
- })?;
+ self.eval_mir_constant(&ct, Some(span), None)?;
}
// Most locals are initially dead.
@@ -711,7 +705,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// done
self.frame_mut().locals = locals;
M::after_stack_push(self)?;
- self.frame_mut().loc = Ok(mir::Location::START);
+ self.frame_mut().loc = Left(mir::Location::START);
let span = info_span!("frame", "{}", instance);
self.frame_mut().tracing_span.enter(span);
@@ -722,7 +716,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Jump to the given block.
#[inline]
pub fn go_to_block(&mut self, target: mir::BasicBlock) {
- self.frame_mut().loc = Ok(mir::Location { block: target, statement_index: 0 });
+ self.frame_mut().loc = Left(mir::Location { block: target, statement_index: 0 });
}
/// *Return* to the given `target` basic block.
@@ -748,8 +742,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// unwinding, and doing so is UB.
pub fn unwind_to_block(&mut self, target: StackPopUnwind) -> InterpResult<'tcx> {
self.frame_mut().loc = match target {
- StackPopUnwind::Cleanup(block) => Ok(mir::Location { block, statement_index: 0 }),
- StackPopUnwind::Skip => Err(self.frame_mut().body.span),
+ StackPopUnwind::Cleanup(block) => Left(mir::Location { block, statement_index: 0 }),
+ StackPopUnwind::Skip => Right(self.frame_mut().body.span),
StackPopUnwind::NotAllowed => {
throw_ub_format!("unwinding past a stack frame that does not allow unwinding")
}
@@ -781,8 +775,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
assert_eq!(
unwinding,
match self.frame().loc {
- Ok(loc) => self.body().basic_blocks[loc.block].is_cleanup,
- Err(_) => true,
+ Left(loc) => self.body().basic_blocks[loc.block].is_cleanup,
+ Right(_) => true,
}
);
if unwinding && self.frame_idx() == 0 {
@@ -905,9 +899,32 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(())
}
- pub fn eval_to_allocation(
+ /// Call a query that can return `ErrorHandled`. If `span` is `Some`, point to that span when an error occurs.
+ pub fn ctfe_query<T>(
+ &self,
+ span: Option<Span>,
+ query: impl FnOnce(TyCtxtAt<'tcx>) -> Result<T, ErrorHandled>,
+ ) -> InterpResult<'tcx, T> {
+ // Use a precise span for better cycle errors.
+ query(self.tcx.at(span.unwrap_or_else(|| self.cur_span()))).map_err(|err| {
+ match err {
+ ErrorHandled::Reported(err) => {
+ if let Some(span) = span {
+ // To make it easier to figure out where this error comes from, also add a note at the current location.
+ self.tcx.sess.span_note_without_error(span, "erroneous constant used");
+ }
+ err_inval!(AlreadyReported(err))
+ }
+ ErrorHandled::TooGeneric => err_inval!(TooGeneric),
+ }
+ .into()
+ })
+ }
+
+ pub fn eval_global(
&self,
gid: GlobalId<'tcx>,
+ span: Option<Span>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
// For statics we pick `ParamEnv::reveal_all`, because statics don't have generics
// and thus don't care about the parameter environment. While we could just use
@@ -920,8 +937,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.param_env
};
let param_env = param_env.with_const();
- // Use a precise span for better cycle errors.
- let val = self.tcx.at(self.cur_span()).eval_to_allocation_raw(param_env.and(gid))?;
+ let val = self.ctfe_query(span, |tcx| tcx.eval_to_allocation_raw(param_env.and(gid)))?;
self.raw_const_to_mplace(val)
}
diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs
index 6809a42dc..458cc6180 100644
--- a/compiler/rustc_const_eval/src/interpret/intern.rs
+++ b/compiler/rustc_const_eval/src/interpret/intern.rs
@@ -134,7 +134,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
- leftover_allocations.extend(alloc.provenance().iter().map(|&(_, alloc_id)| alloc_id));
+ leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
let alloc = tcx.intern_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
None
@@ -439,7 +439,7 @@ pub fn intern_const_alloc_recursive<
}
let alloc = tcx.intern_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
- for &(_, alloc_id) in alloc.inner().provenance().iter() {
+ for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
if leftover_allocations.insert(alloc_id) {
todo.push(alloc_id);
}
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
index 8637d6a77..7940efcd2 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs
+++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs
@@ -7,7 +7,9 @@ use std::convert::TryFrom;
use rustc_hir::def_id::DefId;
use rustc_middle::mir::{
self,
- interpret::{ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar},
+ interpret::{
+ Allocation, ConstAllocation, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar,
+ },
BinOp, NonDivergingIntrinsic,
};
use rustc_middle::ty;
@@ -23,7 +25,6 @@ use super::{
};
mod caller_location;
-mod type_name;
fn numeric_intrinsic<Prov>(name: Symbol, bits: u128, kind: Primitive) -> Scalar<Prov> {
let size = match kind {
@@ -42,6 +43,13 @@ fn numeric_intrinsic<Prov>(name: Symbol, bits: u128, kind: Primitive) -> Scalar<
Scalar::from_uint(bits_out, size)
}
+/// Directly returns an `Allocation` containing an absolute path representation of the given type.
+pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> {
+ let path = crate::util::type_name(tcx, ty);
+ let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes());
+ tcx.intern_const_alloc(alloc)
+}
+
/// The logic for all nullary intrinsics is implemented here. These intrinsics don't get evaluated
/// inside an `InterpCx` and instead have their value computed directly from rustc internal info.
pub(crate) fn eval_nullary_intrinsic<'tcx>(
@@ -55,7 +63,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
Ok(match name {
sym::type_name => {
ensure_monomorphic_enough(tcx, tp_ty)?;
- let alloc = type_name::alloc_type_name(tcx, tp_ty);
+ let alloc = alloc_type_name(tcx, tp_ty);
ConstValue::Slice { data: alloc, start: 0, end: alloc.inner().len() }
}
sym::needs_drop => {
@@ -169,8 +177,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
sym::type_name => self.tcx.mk_static_str(),
_ => bug!(),
};
- let val =
- self.tcx.const_eval_global_id(self.param_env, gid, Some(self.tcx.span))?;
+ let val = self.ctfe_query(None, |tcx| {
+ tcx.const_eval_global_id(self.param_env, gid, Some(tcx.span))
+ })?;
let val = self.const_val_to_op(val, ty, Some(dest.layout))?;
self.copy_op(&val, dest, /*allow_transmute*/ false)?;
}
@@ -234,6 +243,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let discr_val = self.read_discriminant(&place.into())?.0;
self.write_scalar(discr_val, dest)?;
}
+ sym::exact_div => {
+ let l = self.read_immediate(&args[0])?;
+ let r = self.read_immediate(&args[1])?;
+ self.exact_div(&l, &r, dest)?;
+ }
sym::unchecked_shl
| sym::unchecked_shr
| sym::unchecked_add
@@ -705,7 +719,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
rhs: &OpTy<'tcx, <M as Machine<'mir, 'tcx>>::Provenance>,
) -> InterpResult<'tcx, Scalar<M::Provenance>> {
let layout = self.layout_of(lhs.layout.ty.builtin_deref(true).unwrap().ty)?;
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
let get_bytes = |this: &InterpCx<'mir, 'tcx, M>,
op: &OpTy<'tcx, <M as Machine<'mir, 'tcx>>::Provenance>,
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs b/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs
index 0e3867557..7d94a22c4 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs
+++ b/compiler/rustc_const_eval/src/interpret/intrinsics/caller_location.rs
@@ -19,8 +19,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
debug!("find_closest_untracked_caller_location: checking frame {:?}", frame.instance);
// Assert that the frame we look at is actually executing code currently
- // (`loc` is `Err` when we are unwinding and the frame does not require cleanup).
- let loc = frame.loc.unwrap();
+ // (`loc` is `Right` when we are unwinding and the frame does not require cleanup).
+ let loc = frame.loc.left().unwrap();
// This could be a non-`Call` terminator (such as `Drop`), or not a terminator at all
// (such as `box`). Use the normal span by default.
diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs
index 351152eba..0604d5ee6 100644
--- a/compiler/rustc_const_eval/src/interpret/machine.rs
+++ b/compiler/rustc_const_eval/src/interpret/machine.rs
@@ -373,9 +373,21 @@ pub trait Machine<'mir, 'tcx>: Sized {
Ok(())
}
- /// Executes a retagging operation.
+ /// Executes a retagging operation for a single pointer.
+ /// Returns the possibly adjusted pointer.
#[inline]
- fn retag(
+ fn retag_ptr_value(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _kind: mir::RetagKind,
+ val: &ImmTy<'tcx, Self::Provenance>,
+ ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
+ Ok(val.clone())
+ }
+
+ /// Executes a retagging operation on a compound value.
+ /// Replaces all pointers stored in the given place.
+ #[inline]
+ fn retag_place_contents(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_kind: mir::RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
@@ -417,8 +429,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
}
}
-// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
-// (CTFE and ConstProp) use the same instance. Here, we share that code.
+/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
+/// (CTFE and ConstProp) use the same instance. Here, we share that code.
pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
type Provenance = AllocId;
type ProvenanceExtra = ();
diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs
index e5e015c1e..528c1cb06 100644
--- a/compiler/rustc_const_eval/src/interpret/memory.rs
+++ b/compiler/rustc_const_eval/src/interpret/memory.rs
@@ -112,7 +112,7 @@ pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// A reference to some allocation that was already bounds-checked for the given region
/// and had the on-access machine hooks run.
#[derive(Copy, Clone)]
-pub struct AllocRef<'a, 'tcx, Prov, Extra> {
+pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra> {
alloc: &'a Allocation<Prov, Extra>,
range: AllocRange,
tcx: TyCtxt<'tcx>,
@@ -120,7 +120,7 @@ pub struct AllocRef<'a, 'tcx, Prov, Extra> {
}
/// A reference to some allocation that was already bounds-checked for the given region
/// and had the on-access machine hooks run.
-pub struct AllocRefMut<'a, 'tcx, Prov, Extra> {
+pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra> {
alloc: &'a mut Allocation<Prov, Extra>,
range: AllocRange,
tcx: TyCtxt<'tcx>,
@@ -302,8 +302,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.into());
};
- debug!(?alloc);
-
if alloc.mutability == Mutability::Not {
throw_ub_format!("deallocating immutable allocation {alloc_id:?}");
}
@@ -503,8 +501,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
throw_unsup!(ReadExternStatic(def_id));
}
- // Use a precise span for better cycle errors.
- (self.tcx.at(self.cur_span()).eval_static_initializer(def_id)?, Some(def_id))
+ // We don't give a span -- statics don't need that, they cannot be generic or associated.
+ let val = self.ctfe_query(None, |tcx| tcx.eval_static_initializer(def_id))?;
+ (val, Some(def_id))
}
};
M::before_access_global(*self.tcx, &self.machine, id, alloc, def_id, is_write)?;
@@ -683,7 +682,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Use size and align of the type.
let ty = self.tcx.type_of(def_id);
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
(layout.size, layout.align.abi, AllocKind::LiveData)
}
Some(GlobalAlloc::Memory(alloc)) => {
@@ -797,7 +796,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// This is a new allocation, add the allocation it points to `todo`.
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
todo.extend(
- alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()),
+ alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
);
}
}
@@ -833,7 +832,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> std::fmt::Debug for DumpAllocs<'a,
allocs_to_print: &mut VecDeque<AllocId>,
alloc: &Allocation<Prov, Extra>,
) -> std::fmt::Result {
- for alloc_id in alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()) {
+ for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
+ {
allocs_to_print.push_back(alloc_id);
}
write!(fmt, "{}", display_allocation(tcx, alloc))
@@ -962,7 +962,7 @@ impl<'tcx, 'a, Prov: Provenance, Extra> AllocRef<'a, 'tcx, Prov, Extra> {
/// Returns whether the allocation has provenance anywhere in the range of the `AllocRef`.
pub(crate) fn has_provenance(&self) -> bool {
- self.alloc.range_has_provenance(&self.tcx, self.range)
+ !self.alloc.provenance().range_empty(self.range, &self.tcx)
}
}
@@ -1060,7 +1060,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Source alloc preparations and access hooks.
let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
- // Zero-sized *source*, that means dst is also zero-sized and we have nothing to do.
+ // Zero-sized *source*, that means dest is also zero-sized and we have nothing to do.
return Ok(());
};
let src_alloc = self.get_alloc_raw(src_alloc_id)?;
@@ -1079,22 +1079,18 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(());
};
- // Checks provenance edges on the src, which needs to happen before
- // `prepare_provenance_copy`.
- if src_alloc.range_has_provenance(&tcx, alloc_range(src_range.start, Size::ZERO)) {
- throw_unsup!(PartialPointerCopy(Pointer::new(src_alloc_id, src_range.start)));
- }
- if src_alloc.range_has_provenance(&tcx, alloc_range(src_range.end(), Size::ZERO)) {
- throw_unsup!(PartialPointerCopy(Pointer::new(src_alloc_id, src_range.end())));
- }
+ // Prepare getting source provenance.
let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation
// first copy the provenance to a temporary buffer, because
// `get_bytes_mut` will clear the provenance, which is correct,
// since we don't want to keep any provenance at the target.
- let provenance =
- src_alloc.prepare_provenance_copy(self, src_range, dest_offset, num_copies);
+ // This will also error if copying partial provenance is not supported.
+ let provenance = src_alloc
+ .provenance()
+ .prepare_copy(src_range, dest_offset, num_copies, self)
+ .map_err(|e| e.to_interp_error(dest_alloc_id))?;
// Prepare a copy of the initialization mask.
- let compressed = src_alloc.compress_uninit_range(src_range);
+ let init = src_alloc.init_mask().prepare_copy(src_range);
// Destination alloc preparations and access hooks.
let (dest_alloc, extra) = self.get_alloc_raw_mut(dest_alloc_id)?;
@@ -1111,7 +1107,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.map_err(|e| e.to_interp_error(dest_alloc_id))?
.as_mut_ptr();
- if compressed.no_bytes_init() {
+ if init.no_bytes_init() {
// Fast path: If all bytes are `uninit` then there is nothing to copy. The target range
// is marked as uninitialized but we otherwise omit changing the byte representation which may
// be arbitrary for uninitialized bytes.
@@ -1160,13 +1156,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
// now fill in all the "init" data
- dest_alloc.mark_compressed_init_range(
- &compressed,
+ dest_alloc.init_mask_apply_copy(
+ init,
alloc_range(dest_offset, size), // just a single copy (i.e., not full `dest_range`)
num_copies,
);
// copy the provenance to the destination
- dest_alloc.mark_provenance_range(provenance);
+ dest_alloc.provenance_apply_copy(provenance);
Ok(())
}
diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs
index 0c212cf59..221e359d2 100644
--- a/compiler/rustc_const_eval/src/interpret/operand.rs
+++ b/compiler/rustc_const_eval/src/interpret/operand.rs
@@ -1,11 +1,14 @@
//! Functions concerning immediate values and operands, and reading from operands.
//! All high-level functions to read from memory work on operands as sources.
+use either::{Either, Left, Right};
+
use rustc_hir::def::Namespace;
use rustc_middle::ty::layout::{LayoutOf, PrimitiveExt, TyAndLayout};
use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter};
-use rustc_middle::ty::{ConstInt, DelaySpanBugEmitted, Ty};
+use rustc_middle::ty::{ConstInt, Ty, ValTree};
use rustc_middle::{mir, ty};
+use rustc_span::Span;
use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants};
@@ -260,9 +263,9 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout,
) -> InterpResult<'tcx, Self> {
- match self.try_as_mplace() {
- Ok(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, cx)?.into()),
- Err(imm) => {
+ match self.as_mplace_or_imm() {
+ Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, cx)?.into()),
+ Right(imm) => {
assert!(
matches!(*imm, Immediate::Uninit),
"Scalar/ScalarPair cannot be offset into"
@@ -280,7 +283,7 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout,
) -> InterpResult<'tcx, Self> {
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
self.offset_with_meta(offset, MemPlaceMeta::None, layout, cx)
}
}
@@ -352,8 +355,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Try returning an immediate for the operand. If the layout does not permit loading this as an
/// immediate, return where in memory we can find the data.
- /// Note that for a given layout, this operation will either always fail or always
- /// succeed! Whether it succeeds depends on whether the layout can be represented
+ /// Note that for a given layout, this operation will either always return Left or Right!
+ /// succeed! Whether it returns Left depends on whether the layout can be represented
/// in an `Immediate`, not on which data is stored there currently.
///
/// This is an internal function that should not usually be used; call `read_immediate` instead.
@@ -361,22 +364,22 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn read_immediate_raw(
&self,
src: &OpTy<'tcx, M::Provenance>,
- ) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::Provenance>, MPlaceTy<'tcx, M::Provenance>>> {
- Ok(match src.try_as_mplace() {
- Ok(ref mplace) => {
+ ) -> InterpResult<'tcx, Either<MPlaceTy<'tcx, M::Provenance>, ImmTy<'tcx, M::Provenance>>> {
+ Ok(match src.as_mplace_or_imm() {
+ Left(ref mplace) => {
if let Some(val) = self.read_immediate_from_mplace_raw(mplace)? {
- Ok(val)
+ Right(val)
} else {
- Err(*mplace)
+ Left(*mplace)
}
}
- Err(val) => Ok(val),
+ Right(val) => Right(val),
})
}
/// Read an immediate from a place, asserting that that is possible with the given layout.
///
- /// If this suceeds, the `ImmTy` is never `Uninit`.
+ /// If this succeeds, the `ImmTy` is never `Uninit`.
#[inline(always)]
pub fn read_immediate(
&self,
@@ -389,7 +392,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) {
span_bug!(self.cur_span(), "primitive read not possible for type: {:?}", op.layout.ty);
}
- let imm = self.read_immediate_raw(op)?.unwrap();
+ let imm = self.read_immediate_raw(op)?.right().unwrap();
if matches!(*imm, Immediate::Uninit) {
throw_ub!(InvalidUninitBytes(None));
}
@@ -431,9 +434,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Basically we just transmute this place into an array following simd_size_and_type.
// This only works in memory, but repr(simd) types should never be immediates anyway.
assert!(op.layout.ty.is_simd());
- match op.try_as_mplace() {
- Ok(mplace) => self.mplace_to_simd(&mplace),
- Err(imm) => match *imm {
+ match op.as_mplace_or_imm() {
+ Left(mplace) => self.mplace_to_simd(&mplace),
+ Right(imm) => match *imm {
Immediate::Uninit => {
throw_ub!(InvalidUninitBytes(None))
}
@@ -527,14 +530,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Copy(place) | Move(place) => self.eval_place_to_op(place, layout)?,
Constant(ref constant) => {
- let val =
+ let c =
self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal)?;
// This can still fail:
// * During ConstProp, with `TooGeneric` or since the `required_consts` were not all
// checked yet.
// * During CTFE, since promoteds in `const`/`static` initializer bodies can fail.
- self.const_to_op(&val, layout)?
+ self.eval_mir_constant(&c, Some(constant.span), layout)?
}
};
trace!("{:?}: {:?}", mir_op, *op);
@@ -549,9 +552,37 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
ops.iter().map(|op| self.eval_operand(op, None)).collect()
}
- pub fn const_to_op(
+ fn eval_ty_constant(
+ &self,
+ val: ty::Const<'tcx>,
+ span: Option<Span>,
+ ) -> InterpResult<'tcx, ValTree<'tcx>> {
+ Ok(match val.kind() {
+ ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(..) => {
+ throw_inval!(TooGeneric)
+ }
+ // FIXME(generic_const_exprs): `ConstKind::Expr` should be able to be evaluated
+ ty::ConstKind::Expr(_) => throw_inval!(TooGeneric),
+ ty::ConstKind::Error(reported) => {
+ throw_inval!(AlreadyReported(reported))
+ }
+ ty::ConstKind::Unevaluated(uv) => {
+ let instance = self.resolve(uv.def, uv.substs)?;
+ let cid = GlobalId { instance, promoted: None };
+ self.ctfe_query(span, |tcx| tcx.eval_to_valtree(self.param_env.and(cid)))?
+ .unwrap_or_else(|| bug!("unable to create ValTree for {uv:?}"))
+ }
+ ty::ConstKind::Bound(..) | ty::ConstKind::Infer(..) => {
+ span_bug!(self.cur_span(), "unexpected ConstKind in ctfe: {val:?}")
+ }
+ ty::ConstKind::Value(valtree) => valtree,
+ })
+ }
+
+ pub fn eval_mir_constant(
&self,
val: &mir::ConstantKind<'tcx>,
+ span: Option<Span>,
layout: Option<TyAndLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
// FIXME(const_prop): normalization needed b/c const prop lint in
@@ -563,44 +594,20 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let val = self.tcx.normalize_erasing_regions(self.param_env, *val);
match val {
mir::ConstantKind::Ty(ct) => {
- match ct.kind() {
- ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(..) => {
- throw_inval!(TooGeneric)
- }
- ty::ConstKind::Error(DelaySpanBugEmitted { reported, .. }) => {
- throw_inval!(AlreadyReported(reported))
- }
- ty::ConstKind::Unevaluated(uv) => {
- // NOTE: We evaluate to a `ValTree` here as a check to ensure
- // we're working with valid constants, even though we never need it.
- let instance = self.resolve(uv.def, uv.substs)?;
- let cid = GlobalId { instance, promoted: None };
- let _valtree = self
- .tcx
- .eval_to_valtree(self.param_env.and(cid))?
- .unwrap_or_else(|| bug!("unable to create ValTree for {uv:?}"));
-
- Ok(self.eval_to_allocation(cid)?.into())
- }
- ty::ConstKind::Bound(..) | ty::ConstKind::Infer(..) => {
- span_bug!(self.cur_span(), "unexpected ConstKind in ctfe: {ct:?}")
- }
- ty::ConstKind::Value(valtree) => {
- let ty = ct.ty();
- let const_val = self.tcx.valtree_to_const_val((ty, valtree));
- self.const_val_to_op(const_val, ty, layout)
- }
- }
+ let ty = ct.ty();
+ let valtree = self.eval_ty_constant(ct, span)?;
+ let const_val = self.tcx.valtree_to_const_val((ty, valtree));
+ self.const_val_to_op(const_val, ty, layout)
}
mir::ConstantKind::Val(val, ty) => self.const_val_to_op(val, ty, layout),
mir::ConstantKind::Unevaluated(uv, _) => {
let instance = self.resolve(uv.def, uv.substs)?;
- Ok(self.eval_to_allocation(GlobalId { instance, promoted: uv.promoted })?.into())
+ Ok(self.eval_global(GlobalId { instance, promoted: uv.promoted }, span)?.into())
}
}
}
- pub(crate) fn const_val_to_op(
+ pub(super) fn const_val_to_op(
&self,
val_val: ConstValue<'tcx>,
ty: Ty<'tcx>,
diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs
index b0625b5f4..c47cfe8bb 100644
--- a/compiler/rustc_const_eval/src/interpret/place.rs
+++ b/compiler/rustc_const_eval/src/interpret/place.rs
@@ -2,6 +2,8 @@
//! into a place.
//! All high-level functions to write to memory work on places as destinations.
+use either::{Either, Left, Right};
+
use rustc_ast::Mutability;
use rustc_middle::mir;
use rustc_middle::ty;
@@ -201,7 +203,7 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout,
) -> InterpResult<'tcx, Self> {
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
self.offset_with_meta(offset, MemPlaceMeta::None, layout, cx)
}
@@ -252,36 +254,36 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> {
// These are defined here because they produce a place.
impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
#[inline(always)]
- pub fn try_as_mplace(&self) -> Result<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
+ pub fn as_mplace_or_imm(&self) -> Either<MPlaceTy<'tcx, Prov>, ImmTy<'tcx, Prov>> {
match **self {
Operand::Indirect(mplace) => {
- Ok(MPlaceTy { mplace, layout: self.layout, align: self.align.unwrap() })
+ Left(MPlaceTy { mplace, layout: self.layout, align: self.align.unwrap() })
}
- Operand::Immediate(imm) => Err(ImmTy::from_immediate(imm, self.layout)),
+ Operand::Immediate(imm) => Right(ImmTy::from_immediate(imm, self.layout)),
}
}
#[inline(always)]
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
- self.try_as_mplace().unwrap()
+ self.as_mplace_or_imm().left().unwrap()
}
}
impl<'tcx, Prov: Provenance> PlaceTy<'tcx, Prov> {
/// A place is either an mplace or some local.
#[inline]
- pub fn try_as_mplace(&self) -> Result<MPlaceTy<'tcx, Prov>, (usize, mir::Local)> {
+ pub fn as_mplace_or_local(&self) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local)> {
match **self {
- Place::Ptr(mplace) => Ok(MPlaceTy { mplace, layout: self.layout, align: self.align }),
- Place::Local { frame, local } => Err((frame, local)),
+ Place::Ptr(mplace) => Left(MPlaceTy { mplace, layout: self.layout, align: self.align }),
+ Place::Local { frame, local } => Right((frame, local)),
}
}
#[inline(always)]
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
- self.try_as_mplace().unwrap()
+ self.as_mplace_or_local().left().unwrap()
}
}
@@ -316,8 +318,7 @@ where
Ok(MPlaceTy { mplace, layout, align })
}
- /// Take an operand, representing a pointer, and dereference it to a place -- that
- /// will always be a MemPlace. Lives in `place.rs` because it creates a place.
+ /// Take an operand, representing a pointer, and dereference it to a place.
#[instrument(skip(self), level = "debug")]
pub fn deref_operand(
&self,
@@ -331,7 +332,7 @@ where
}
let mplace = self.ref_to_mplace(&val)?;
- self.check_mplace_access(mplace, CheckInAllocMsg::DerefTest)?;
+ self.check_mplace(mplace)?;
Ok(mplace)
}
@@ -340,7 +341,7 @@ where
&self,
place: &MPlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx, Option<AllocRef<'_, 'tcx, M::Provenance, M::AllocExtra>>> {
- assert!(!place.layout.is_unsized());
+ assert!(place.layout.is_sized());
assert!(!place.meta.has_meta());
let size = place.layout.size;
self.get_ptr_alloc(place.ptr, size, place.align)
@@ -351,24 +352,25 @@ where
&mut self,
place: &MPlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx, Option<AllocRefMut<'_, 'tcx, M::Provenance, M::AllocExtra>>> {
- assert!(!place.layout.is_unsized());
+ assert!(place.layout.is_sized());
assert!(!place.meta.has_meta());
let size = place.layout.size;
self.get_ptr_alloc_mut(place.ptr, size, place.align)
}
/// Check if this mplace is dereferenceable and sufficiently aligned.
- fn check_mplace_access(
- &self,
- mplace: MPlaceTy<'tcx, M::Provenance>,
- msg: CheckInAllocMsg,
- ) -> InterpResult<'tcx> {
+ pub fn check_mplace(&self, mplace: MPlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
let (size, align) = self
.size_and_align_of_mplace(&mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
assert!(mplace.align <= align, "dynamic alignment less strict than static one?");
let align = M::enforce_alignment(self).then_some(align);
- self.check_ptr_access_align(mplace.ptr, size, align.unwrap_or(Align::ONE), msg)?;
+ self.check_ptr_access_align(
+ mplace.ptr,
+ size,
+ align.unwrap_or(Align::ONE),
+ CheckInAllocMsg::DerefTest,
+ )?;
Ok(())
}
@@ -485,7 +487,7 @@ where
src: Immediate<M::Provenance>,
dest: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
- assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
+ assert!(dest.layout.is_sized(), "Cannot write unsized data");
trace!("write_immediate: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
// See if we can avoid an allocation. This is the counterpart to `read_immediate_raw`,
@@ -569,9 +571,9 @@ where
}
pub fn write_uninit(&mut self, dest: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
- let mplace = match dest.try_as_mplace() {
- Ok(mplace) => mplace,
- Err((frame, local)) => {
+ let mplace = match dest.as_mplace_or_local() {
+ Left(mplace) => mplace,
+ Right((frame, local)) => {
match M::access_local_mut(self, frame, local)? {
Operand::Immediate(local) => {
*local = Immediate::Uninit;
@@ -639,7 +641,7 @@ where
// Let us see if the layout is simple so we take a shortcut,
// avoid force_allocation.
let src = match self.read_immediate_raw(src)? {
- Ok(src_val) => {
+ Right(src_val) => {
// FIXME(const_prop): Const-prop can possibly evaluate an
// unsized copy operation when it thinks that the type is
// actually sized, due to a trivially false where-clause
@@ -669,7 +671,7 @@ where
)
};
}
- Err(mplace) => mplace,
+ Left(mplace) => mplace,
};
// Slow path, this does not fit into an immediate. Just memcpy.
trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
@@ -746,7 +748,7 @@ where
layout: TyAndLayout<'tcx>,
kind: MemoryKind<M::MemoryKind>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
- assert!(!layout.is_unsized());
+ assert!(layout.is_sized());
let ptr = self.allocate_ptr(layout.size, layout.align.abi, kind)?;
Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout))
}
diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs
index 6b2e2bb8a..2ffd73eef 100644
--- a/compiler/rustc_const_eval/src/interpret/projection.rs
+++ b/compiler/rustc_const_eval/src/interpret/projection.rs
@@ -7,6 +7,8 @@
//! but we still need to do bounds checking and adjust the layout. To not duplicate that with MPlaceTy, we actually
//! implement the logic on OpTy, and MPlaceTy calls that.
+use either::{Left, Right};
+
use rustc_middle::mir;
use rustc_middle::ty;
use rustc_middle::ty::layout::LayoutOf;
@@ -84,13 +86,13 @@ where
base: &OpTy<'tcx, M::Provenance>,
field: usize,
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
- let base = match base.try_as_mplace() {
- Ok(ref mplace) => {
+ let base = match base.as_mplace_or_imm() {
+ Left(ref mplace) => {
// We can reuse the mplace field computation logic for indirect operands.
let field = self.mplace_field(mplace, field)?;
return Ok(field.into());
}
- Err(value) => value,
+ Right(value) => value,
};
let field_layout = base.layout.field(self, field);
@@ -204,8 +206,8 @@ where
}
}
- // Iterates over all fields of an array. Much more efficient than doing the
- // same by repeatedly calling `operand_index`.
+ /// Iterates over all fields of an array. Much more efficient than doing the
+ /// same by repeatedly calling `operand_index`.
pub fn operand_array_fields<'a>(
&self,
base: &'a OpTy<'tcx, Prov>,
diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs
index c6e04cbfb..81b44a494 100644
--- a/compiler/rustc_const_eval/src/interpret/step.rs
+++ b/compiler/rustc_const_eval/src/interpret/step.rs
@@ -2,11 +2,13 @@
//!
//! The main entry point is the `step` method.
+use either::Either;
+
use rustc_middle::mir;
use rustc_middle::mir::interpret::{InterpResult, Scalar};
use rustc_middle::ty::layout::LayoutOf;
-use super::{InterpCx, Machine};
+use super::{ImmTy, InterpCx, Machine};
/// Classify whether an operator is "left-homogeneous", i.e., the LHS has the
/// same type as the result.
@@ -30,11 +32,6 @@ fn binop_right_homogeneous(op: mir::BinOp) -> bool {
}
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
- pub fn run(&mut self) -> InterpResult<'tcx> {
- while self.step()? {}
- Ok(())
- }
-
/// Returns `true` as long as there are more things to do.
///
/// This is used by [priroda](https://github.com/oli-obk/priroda)
@@ -46,7 +43,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(false);
}
- let Ok(loc) = self.frame().loc else {
+ let Either::Left(loc) = self.frame().loc else {
// We are unwinding and this fn has no cleanup code.
// Just go on unwinding.
trace!("unwinding: skipping frame");
@@ -61,7 +58,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Make sure we are not updating `statement_index` of the wrong frame.
assert_eq!(old_frames, self.frame_idx());
// Advance the program counter.
- self.frame_mut().loc.as_mut().unwrap().statement_index += 1;
+ self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
return Ok(true);
}
@@ -111,7 +108,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Stacked Borrows.
Retag(kind, place) => {
let dest = self.eval_place(**place)?;
- M::retag(self, *kind, &dest)?;
+ M::retag_place_contents(self, *kind, &dest)?;
}
Intrinsic(box ref intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
@@ -209,7 +206,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Repeat(ref operand, _) => {
let src = self.eval_operand(operand, None)?;
- assert!(!src.layout.is_unsized());
+ assert!(src.layout.is_sized());
let dest = self.force_allocation(&dest)?;
let length = dest.len(self)?;
@@ -250,10 +247,41 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_scalar(Scalar::from_machine_usize(len, self), &dest)?;
}
- AddressOf(_, place) | Ref(_, _, place) => {
+ Ref(_, borrow_kind, place) => {
let src = self.eval_place(place)?;
let place = self.force_allocation(&src)?;
- self.write_immediate(place.to_ref(self), &dest)?;
+ let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
+ // A fresh reference was created, make sure it gets retagged.
+ let val = M::retag_ptr_value(
+ self,
+ if borrow_kind.allows_two_phase_borrow() {
+ mir::RetagKind::TwoPhase
+ } else {
+ mir::RetagKind::Default
+ },
+ &val,
+ )?;
+ self.write_immediate(*val, &dest)?;
+ }
+
+ AddressOf(_, place) => {
+ // Figure out whether this is an addr_of of an already raw place.
+ let place_base_raw = if place.has_deref() {
+ let ty = self.frame().body.local_decls[place.local].ty;
+ ty.is_unsafe_ptr()
+ } else {
+ // Not a deref, and thus not raw.
+ false
+ };
+
+ let src = self.eval_place(place)?;
+ let place = self.force_allocation(&src)?;
+ let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
+ if !place_base_raw {
+ // If this was not already raw, it needs retagging.
+ val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
+ }
+ self.write_immediate(*val, &dest)?;
}
NullaryOp(null_op, ty) => {
@@ -305,7 +333,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.eval_terminator(terminator)?;
if !self.stack().is_empty() {
- if let Ok(loc) = self.frame().loc {
+ if let Either::Left(loc) = self.frame().loc {
info!("// executing {:?}", loc.block);
}
}
diff --git a/compiler/rustc_const_eval/src/interpret/traits.rs b/compiler/rustc_const_eval/src/interpret/traits.rs
index cab23b724..fa15d466a 100644
--- a/compiler/rustc_const_eval/src/interpret/traits.rs
+++ b/compiler/rustc_const_eval/src/interpret/traits.rs
@@ -53,7 +53,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> InterpResult<'tcx, (Size, Align)> {
let (ty, _trait_ref) = self.get_ptr_vtable(vtable)?;
let layout = self.layout_of(ty)?;
- assert!(!layout.is_unsized(), "there are no vtables for unsized types");
+ assert!(layout.is_sized(), "there are no vtables for unsized types");
Ok((layout.size, layout.align.abi))
}
}
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index 8aa56c275..0e85c7d11 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -8,6 +8,8 @@ use std::convert::TryFrom;
use std::fmt::{Display, Write};
use std::num::NonZeroUsize;
+use either::{Left, Right};
+
use rustc_ast::Mutability;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
@@ -783,18 +785,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
}
}
Abi::ScalarPair(a_layout, b_layout) => {
- // There is no `rustc_layout_scalar_valid_range_start` for pairs, so
- // we would validate these things as we descend into the fields,
- // but that can miss bugs in layout computation. Layout computation
- // is subtle due to enums having ScalarPair layout, where one field
- // is the discriminant.
- if cfg!(debug_assertions)
- && !a_layout.is_uninit_valid()
- && !b_layout.is_uninit_valid()
- {
- // We can only proceed if *both* scalars need to be initialized.
- // FIXME: find a way to also check ScalarPair when one side can be uninit but
- // the other must be init.
+ // We can only proceed if *both* scalars need to be initialized.
+ // FIXME: find a way to also check ScalarPair when one side can be uninit but
+ // the other must be init.
+ if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
let (a, b) =
self.read_immediate(op, "initiailized scalar value")?.to_scalar_pair();
self.visit_scalar(a, a_layout)?;
@@ -852,9 +846,9 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
return Ok(());
}
// Now that we definitely have a non-ZST array, we know it lives in memory.
- let mplace = match op.try_as_mplace() {
- Ok(mplace) => mplace,
- Err(imm) => match *imm {
+ let mplace = match op.as_mplace_or_imm() {
+ Left(mplace) => mplace,
+ Right(imm) => match *imm {
Immediate::Uninit =>
throw_validation_failure!(self.path, { "uninitialized bytes" }),
Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
diff --git a/compiler/rustc_const_eval/src/interpret/visitor.rs b/compiler/rustc_const_eval/src/interpret/visitor.rs
index aee1f93b1..1a10851a9 100644
--- a/compiler/rustc_const_eval/src/interpret/visitor.rs
+++ b/compiler/rustc_const_eval/src/interpret/visitor.rs
@@ -324,7 +324,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueMut<'mir, 'tcx, M>
macro_rules! make_value_visitor {
($visitor_trait:ident, $value_trait:ident, $($mutability:ident)?) => {
- // How to traverse a value and what to do when we are at the leaves.
+ /// How to traverse a value and what to do when we are at the leaves.
pub trait $visitor_trait<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>>: Sized {
type V: $value_trait<'mir, 'tcx, M>;
diff --git a/compiler/rustc_const_eval/src/transform/check_consts/check.rs b/compiler/rustc_const_eval/src/transform/check_consts/check.rs
index 22a61774e..54213d55a 100644
--- a/compiler/rustc_const_eval/src/transform/check_consts/check.rs
+++ b/compiler/rustc_const_eval/src/transform/check_consts/check.rs
@@ -10,14 +10,11 @@ use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceC
use rustc_middle::mir::*;
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
-use rustc_middle::ty::{Binder, TraitPredicate, TraitRef, TypeVisitable};
+use rustc_middle::ty::{Binder, TraitRef, TypeVisitable};
use rustc_mir_dataflow::{self, Analysis};
use rustc_span::{sym, Span, Symbol};
-use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
-use rustc_trait_selection::traits::{
- self, ObligationCauseCode, SelectionContext, TraitEngine, TraitEngineExt,
-};
+use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt, SelectionContext};
use std::mem;
use std::ops::Deref;
@@ -452,8 +449,17 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
| Rvalue::CopyForDeref(..)
| Rvalue::Repeat(..)
| Rvalue::Discriminant(..)
- | Rvalue::Len(_)
- | Rvalue::Aggregate(..) => {}
+ | Rvalue::Len(_) => {}
+
+ Rvalue::Aggregate(ref kind, ..) => {
+ if let AggregateKind::Generator(def_id, ..) = kind.as_ref() {
+ if let Some(generator_kind) = self.tcx.generator_kind(def_id.to_def_id()) {
+ if matches!(generator_kind, hir::GeneratorKind::Async(..)) {
+ self.check_op(ops::Generator(generator_kind));
+ }
+ }
+ }
+ }
Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
| Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
@@ -729,13 +735,10 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
}
let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
- let poly_trait_pred = Binder::dummy(TraitPredicate {
- trait_ref,
- constness: ty::BoundConstness::ConstIfConst,
- polarity: ty::ImplPolarity::Positive,
- });
+ let poly_trait_pred =
+ Binder::dummy(trait_ref).with_constness(ty::BoundConstness::ConstIfConst);
let obligation =
- Obligation::new(ObligationCause::dummy(), param_env, poly_trait_pred);
+ Obligation::new(tcx, ObligationCause::dummy(), param_env, poly_trait_pred);
let implsrc = {
let infcx = tcx.infer_ctxt().build();
@@ -747,37 +750,27 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
// "non-const" check. This is required for correctness here.
{
let infcx = tcx.infer_ctxt().build();
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
+ let ocx = ObligationCtxt::new(&infcx);
+
let predicates = tcx.predicates_of(callee).instantiate(tcx, substs);
let hir_id = tcx
.hir()
.local_def_id_to_hir_id(self.body.source.def_id().expect_local());
- let cause = || {
- ObligationCause::new(
- terminator.source_info.span,
- hir_id,
- ObligationCauseCode::ItemObligation(callee),
- )
- };
- let normalized = infcx.partially_normalize_associated_types_in(
- cause(),
- param_env,
- predicates,
+ let cause = ObligationCause::new(
+ terminator.source_info.span,
+ hir_id,
+ ObligationCauseCode::ItemObligation(callee),
);
-
- for p in normalized.obligations {
- fulfill_cx.register_predicate_obligation(&infcx, p);
- }
- for obligation in traits::predicates_for_generics(
- |_, _| cause(),
+ let normalized_predicates = ocx.normalize(&cause, param_env, predicates);
+ ocx.register_obligations(traits::predicates_for_generics(
+ |_, _| cause.clone(),
self.param_env,
- normalized.value,
- ) {
- fulfill_cx.register_predicate_obligation(&infcx, obligation);
- }
- let errors = fulfill_cx.select_all_or_error(&infcx);
+ normalized_predicates,
+ ));
+
+ let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
}
}
@@ -828,11 +821,10 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
if !nonconst_call_permission {
let obligation = Obligation::new(
+ tcx,
ObligationCause::dummy_with_span(*fn_span),
param_env,
- tcx.mk_predicate(
- poly_trait_pred.map_bound(ty::PredicateKind::Trait),
- ),
+ poly_trait_pred,
);
// improve diagnostics by showing what failed. Our requirements are stricter this time
@@ -843,7 +835,6 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
obligation.clone(),
&obligation,
&e,
- false,
);
}
@@ -901,14 +892,6 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
return;
}
- // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
- let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
- if is_async_block {
- let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
- self.check_op(ops::Generator(kind));
- return;
- }
-
if !tcx.is_const_fn_raw(callee) {
if !tcx.is_const_default_method(callee) {
// To get to here we must have already found a const impl for the
diff --git a/compiler/rustc_const_eval/src/transform/check_consts/mod.rs b/compiler/rustc_const_eval/src/transform/check_consts/mod.rs
index 25b420bed..655ec345e 100644
--- a/compiler/rustc_const_eval/src/transform/check_consts/mod.rs
+++ b/compiler/rustc_const_eval/src/transform/check_consts/mod.rs
@@ -62,7 +62,7 @@ impl<'mir, 'tcx> ConstCx<'mir, 'tcx> {
}
fn is_async(&self) -> bool {
- self.tcx.asyncness(self.def_id()) == hir::IsAsync::Async
+ self.tcx.asyncness(self.def_id()).is_async()
}
}
@@ -75,14 +75,14 @@ pub fn rustc_allow_const_fn_unstable(
attr::rustc_allow_const_fn_unstable(&tcx.sess, attrs).any(|name| name == feature_gate)
}
-// Returns `true` if the given `const fn` is "const-stable".
-//
-// Panics if the given `DefId` does not refer to a `const fn`.
-//
-// Const-stability is only relevant for `const fn` within a `staged_api` crate. Only "const-stable"
-// functions can be called in a const-context by users of the stable compiler. "const-stable"
-// functions are subject to more stringent restrictions than "const-unstable" functions: They
-// cannot use unstable features and can only call other "const-stable" functions.
+/// Returns `true` if the given `const fn` is "const-stable".
+///
+/// Panics if the given `DefId` does not refer to a `const fn`.
+///
+/// Const-stability is only relevant for `const fn` within a `staged_api` crate. Only "const-stable"
+/// functions can be called in a const-context by users of the stable compiler. "const-stable"
+/// functions are subject to more stringent restrictions than "const-unstable" functions: They
+/// cannot use unstable features and can only call other "const-stable" functions.
pub fn is_const_stable_const_fn(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
// A default body in a `#[const_trait]` is not const-stable because const
// trait fns currently cannot be const-stable. We shouldn't
diff --git a/compiler/rustc_const_eval/src/transform/check_consts/ops.rs b/compiler/rustc_const_eval/src/transform/check_consts/ops.rs
index b28d70194..b19d270e6 100644
--- a/compiler/rustc_const_eval/src/transform/check_consts/ops.rs
+++ b/compiler/rustc_const_eval/src/transform/check_consts/ops.rs
@@ -1,7 +1,7 @@
//! Concrete error types for all operations which may be invalid in a certain const context.
use hir::def_id::LocalDefId;
-use hir::ConstContext;
+use hir::{ConstContext, LangItem};
use rustc_errors::{
error_code, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed,
};
@@ -13,10 +13,9 @@ use rustc_middle::mir;
use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::subst::{GenericArgKind, SubstsRef};
use rustc_middle::ty::{
- suggest_constraining_type_param, Adt, Closure, DefIdTree, FnDef, FnPtr, Param, TraitPredicate,
- Ty,
+ suggest_constraining_type_param, Adt, Closure, DefIdTree, FnDef, FnPtr, Param, Ty,
};
-use rustc_middle::ty::{Binder, BoundConstness, ImplPolarity, TraitRef};
+use rustc_middle::ty::{Binder, TraitRef};
use rustc_session::parse::feature_err;
use rustc_span::symbol::sym;
use rustc_span::{BytePos, Pos, Span, Symbol};
@@ -147,13 +146,10 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> {
}
Adt(..) => {
let obligation = Obligation::new(
+ tcx,
ObligationCause::dummy(),
param_env,
- Binder::dummy(TraitPredicate {
- trait_ref,
- constness: BoundConstness::NotConst,
- polarity: ImplPolarity::Positive,
- }),
+ Binder::dummy(trait_ref),
);
let infcx = tcx.infer_ctxt().build();
@@ -303,7 +299,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> {
err.span_note(deref_target, "deref defined here");
}
- diag_trait(&mut err, self_ty, tcx.lang_items().deref_trait().unwrap());
+ diag_trait(&mut err, self_ty, tcx.require_lang_item(LangItem::Deref, Some(span)));
err
}
_ if tcx.opt_parent(callee) == tcx.get_diagnostic_item(sym::ArgumentV1Methods) => {
@@ -380,7 +376,7 @@ impl<'tcx> NonConstOp<'tcx> for Generator {
ccx: &ConstCx<'_, 'tcx>,
span: Span,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
- let msg = format!("{}s are not allowed in {}s", self.0, ccx.const_kind());
+ let msg = format!("{}s are not allowed in {}s", self.0.descr(), ccx.const_kind());
if let hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block) = self.0 {
ccx.tcx.sess.create_feature_err(
UnallowedOpInConstContext { span, msg },
@@ -690,7 +686,7 @@ impl<'tcx> NonConstOp<'tcx> for ThreadLocalAccess {
}
}
-// Types that cannot appear in the signature or locals of a `const fn`.
+/// Types that cannot appear in the signature or locals of a `const fn`.
pub mod ty {
use super::*;
diff --git a/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs b/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs
index 335992342..8ca3fdf40 100644
--- a/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs
+++ b/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs
@@ -146,25 +146,19 @@ impl Qualif for NeedsNonConstDrop {
qualifs.needs_non_const_drop
}
+ #[instrument(level = "trace", skip(cx), ret)]
fn in_any_value_of_ty<'tcx>(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> bool {
// Avoid selecting for simple cases, such as builtin types.
if ty::util::is_trivially_const_drop(ty) {
return false;
}
- let destruct = cx.tcx.require_lang_item(LangItem::Destruct, None);
-
let obligation = Obligation::new(
- ObligationCause::dummy(),
+ cx.tcx,
+ ObligationCause::dummy_with_span(cx.body.span),
cx.param_env,
- ty::Binder::dummy(ty::TraitPredicate {
- trait_ref: ty::TraitRef {
- def_id: destruct,
- substs: cx.tcx.mk_substs_trait(ty, &[]),
- },
- constness: ty::BoundConstness::ConstIfConst,
- polarity: ty::ImplPolarity::Positive,
- }),
+ ty::Binder::dummy(cx.tcx.at(cx.body.span).mk_trait_ref(LangItem::Destruct, [ty]))
+ .with_constness(ty::BoundConstness::ConstIfConst),
);
let infcx = cx.tcx.infer_ctxt().build();
@@ -174,6 +168,8 @@ impl Qualif for NeedsNonConstDrop {
return true;
};
+ trace!(?impl_src);
+
if !matches!(
impl_src,
ImplSource::ConstDestruct(_) | ImplSource::Param(_, ty::BoundConstness::ConstIfConst)
@@ -348,7 +344,11 @@ where
// FIXME(valtrees): check whether const qualifs should behave the same
// way for type and mir constants.
let uneval = match constant.literal {
- ConstantKind::Ty(ct) if matches!(ct.kind(), ty::ConstKind::Param(_)) => None,
+ ConstantKind::Ty(ct)
+ if matches!(ct.kind(), ty::ConstKind::Param(_) | ty::ConstKind::Error(_)) =>
+ {
+ None
+ }
ConstantKind::Ty(c) => bug!("expected ConstKind::Param here, found {:?}", c),
ConstantKind::Unevaluated(uv, _) => Some(uv),
ConstantKind::Val(..) => None,
diff --git a/compiler/rustc_const_eval/src/transform/promote_consts.rs b/compiler/rustc_const_eval/src/transform/promote_consts.rs
index f3ae16da4..6777fae74 100644
--- a/compiler/rustc_const_eval/src/transform/promote_consts.rs
+++ b/compiler/rustc_const_eval/src/transform/promote_consts.rs
@@ -45,11 +45,10 @@ impl<'tcx> MirPass<'tcx> for PromoteTemps<'tcx> {
// There's not really any point in promoting errorful MIR.
//
// This does not include MIR that failed const-checking, which we still try to promote.
- if body.return_ty().references_error() {
- tcx.sess.delay_span_bug(body.span, "PromoteTemps: MIR had errors");
+ if let Err(_) = body.return_ty().error_reported() {
+ debug!("PromoteTemps: MIR had errors");
return;
}
-
if body.source.promoted.is_some() {
return;
}
@@ -319,14 +318,14 @@ impl<'tcx> Validator<'_, 'tcx> {
match elem {
ProjectionElem::Deref => {
let mut promotable = false;
+ // When a static is used by-value, that gets desugared to `*STATIC_ADDR`,
+ // and we need to be able to promote this. So check if this deref matches
+ // that specific pattern.
+
// We need to make sure this is a `Deref` of a local with no further projections.
// Discussion can be found at
// https://github.com/rust-lang/rust/pull/74945#discussion_r463063247
if let Some(local) = place_base.as_local() {
- // This is a special treatment for cases like *&STATIC where STATIC is a
- // global static variable.
- // This pattern is generated only when global static variables are directly
- // accessed and is qualified for promotion safely.
if let TempState::Defined { location, .. } = self.temps[local] {
let def_stmt = self.body[location.block]
.statements
diff --git a/compiler/rustc_const_eval/src/transform/validate.rs b/compiler/rustc_const_eval/src/transform/validate.rs
index 81b82a21f..5c9263dc5 100644
--- a/compiler/rustc_const_eval/src/transform/validate.rs
+++ b/compiler/rustc_const_eval/src/transform/validate.rs
@@ -2,7 +2,7 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_index::bit_set::BitSet;
-use rustc_infer::infer::TyCtxtInferExt;
+use rustc_infer::traits::Reveal;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::visit::NonUseContext::VarDebugInfo;
use rustc_middle::mir::visit::{PlaceContext, Visitor};
@@ -12,8 +12,7 @@ use rustc_middle::mir::{
ProjectionElem, RuntimePhase, Rvalue, SourceScope, Statement, StatementKind, Terminator,
TerminatorKind, UnOp, START_BLOCK,
};
-use rustc_middle::ty::fold::BottomUpFolder;
-use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeFoldable, TypeVisitable};
+use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeVisitable};
use rustc_mir_dataflow::impls::MaybeStorageLive;
use rustc_mir_dataflow::storage::always_storage_live_locals;
use rustc_mir_dataflow::{Analysis, ResultsCursor};
@@ -46,8 +45,11 @@ impl<'tcx> MirPass<'tcx> for Validator {
return;
}
let def_id = body.source.def_id();
- let param_env = tcx.param_env(def_id);
let mir_phase = self.mir_phase;
+ let param_env = match mir_phase.reveal() {
+ Reveal::UserFacing => tcx.param_env(def_id),
+ Reveal::All => tcx.param_env_reveal_all_normalized(def_id),
+ };
let always_live_locals = always_storage_live_locals(body);
let storage_liveness = MaybeStorageLive::new(always_live_locals)
@@ -70,44 +72,6 @@ impl<'tcx> MirPass<'tcx> for Validator {
}
}
-/// Returns whether the two types are equal up to lifetimes.
-/// All lifetimes, including higher-ranked ones, get ignored for this comparison.
-/// (This is unlike the `erasing_regions` methods, which keep higher-ranked lifetimes for soundness reasons.)
-///
-/// The point of this function is to approximate "equal up to subtyping". However,
-/// the approximation is incorrect as variance is ignored.
-pub fn equal_up_to_regions<'tcx>(
- tcx: TyCtxt<'tcx>,
- param_env: ParamEnv<'tcx>,
- src: Ty<'tcx>,
- dest: Ty<'tcx>,
-) -> bool {
- // Fast path.
- if src == dest {
- return true;
- }
-
- // Normalize lifetimes away on both sides, then compare.
- let normalize = |ty: Ty<'tcx>| {
- tcx.try_normalize_erasing_regions(param_env, ty).unwrap_or(ty).fold_with(
- &mut BottomUpFolder {
- tcx,
- // FIXME: We erase all late-bound lifetimes, but this is not fully correct.
- // If you have a type like `<for<'a> fn(&'a u32) as SomeTrait>::Assoc`,
- // this is not necessarily equivalent to `<fn(&'static u32) as SomeTrait>::Assoc`,
- // since one may have an `impl SomeTrait for fn(&32)` and
- // `impl SomeTrait for fn(&'static u32)` at the same time which
- // specify distinct values for Assoc. (See also #56105)
- lt_op: |_| tcx.lifetimes.re_erased,
- // Leave consts and types unchanged.
- ct_op: |ct| ct,
- ty_op: |ty| ty,
- },
- )
- };
- tcx.infer_ctxt().build().can_eq(param_env, normalize(src), normalize(dest)).is_ok()
-}
-
struct TypeChecker<'a, 'tcx> {
when: &'a str,
body: &'a Body<'tcx>,
@@ -121,6 +85,7 @@ struct TypeChecker<'a, 'tcx> {
}
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
+ #[track_caller]
fn fail(&self, location: Location, msg: impl AsRef<str>) {
let span = self.body.source_info(location).span;
// We use `delay_span_bug` as we might see broken MIR when other errors have already
@@ -183,22 +148,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
return true;
}
- // Normalize projections and things like that.
- // Type-changing assignments can happen when subtyping is used. While
- // all normal lifetimes are erased, higher-ranked types with their
- // late-bound lifetimes are still around and can lead to type
- // differences. So we compare ignoring lifetimes.
-
- // First, try with reveal_all. This might not work in some cases, as the predicates
- // can be cleared in reveal_all mode. We try the reveal first anyways as it is used
- // by some other passes like inlining as well.
- let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
- if equal_up_to_regions(self.tcx, param_env, src, dest) {
- return true;
- }
-
- // If this fails, we can try it without the reveal.
- equal_up_to_regions(self.tcx, self.param_env, src, dest)
+ crate::util::is_subtype(self.tcx, self.param_env, src, dest)
}
}
@@ -281,12 +231,12 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
let check_equal = |this: &Self, location, f_ty| {
if !this.mir_assign_valid_types(ty, f_ty) {
this.fail(
- location,
- format!(
- "Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is `{:?}`",
- parent, f, ty, f_ty
+ location,
+ format!(
+ "Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is `{:?}`",
+ parent, f, ty, f_ty
+ )
)
- )
}
};
diff --git a/compiler/rustc_const_eval/src/util/aggregate.rs b/compiler/rustc_const_eval/src/util/aggregate.rs
index 180a40043..c43de3368 100644
--- a/compiler/rustc_const_eval/src/util/aggregate.rs
+++ b/compiler/rustc_const_eval/src/util/aggregate.rs
@@ -9,10 +9,11 @@ use std::iter::TrustedLen;
/// Expand `lhs = Rvalue::Aggregate(kind, operands)` into assignments to the fields.
///
/// Produces something like
-///
+/// ```ignore (ilustrative)
/// (lhs as Variant).field0 = arg0; // We only have a downcast if this is an enum
/// (lhs as Variant).field1 = arg1;
/// discriminant(lhs) = variant_index; // If lhs is an enum or generator.
+/// ```
pub fn expand_aggregate<'tcx>(
orig_lhs: Place<'tcx>,
operands: impl Iterator<Item = (Operand<'tcx>, Ty<'tcx>)> + TrustedLen,
diff --git a/compiler/rustc_const_eval/src/util/call_kind.rs b/compiler/rustc_const_eval/src/util/call_kind.rs
index af9d83f06..b38a6c551 100644
--- a/compiler/rustc_const_eval/src/util/call_kind.rs
+++ b/compiler/rustc_const_eval/src/util/call_kind.rs
@@ -3,7 +3,7 @@
//! context.
use rustc_hir::def_id::DefId;
-use rustc_hir::lang_items::LangItemGroup;
+use rustc_hir::{lang_items, LangItem};
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::{self, AssocItemContainer, DefIdTree, Instance, ParamEnv, Ty, TyCtxt};
use rustc_span::symbol::Ident;
@@ -26,7 +26,7 @@ impl CallDesugaringKind {
match self {
Self::ForLoopIntoIter => tcx.get_diagnostic_item(sym::IntoIterator).unwrap(),
Self::QuestionBranch | Self::TryBlockFromOutput => {
- tcx.lang_items().try_trait().unwrap()
+ tcx.require_lang_item(LangItem::Try, None)
}
Self::QuestionFromResidual => tcx.get_diagnostic_item(sym::FromResidual).unwrap(),
}
@@ -74,22 +74,24 @@ pub fn call_kind<'tcx>(
}
});
- let fn_call = parent
- .and_then(|p| tcx.lang_items().group(LangItemGroup::Fn).iter().find(|did| **did == p));
+ let fn_call = parent.and_then(|p| {
+ lang_items::FN_TRAITS.iter().filter_map(|&l| tcx.lang_items().get(l)).find(|&id| id == p)
+ });
- let operator = (!from_hir_call)
- .then(|| parent)
- .flatten()
- .and_then(|p| tcx.lang_items().group(LangItemGroup::Op).iter().find(|did| **did == p));
+ let operator = if !from_hir_call && let Some(p) = parent {
+ lang_items::OPERATORS.iter().filter_map(|&l| tcx.lang_items().get(l)).find(|&id| id == p)
+ } else {
+ None
+ };
let is_deref = !from_hir_call && tcx.is_diagnostic_item(sym::deref_method, method_did);
// Check for a 'special' use of 'self' -
// an FnOnce call, an operator (e.g. `<<`), or a
// deref coercion.
- let kind = if let Some(&trait_id) = fn_call {
+ let kind = if let Some(trait_id) = fn_call {
Some(CallKind::FnCall { fn_trait_id: trait_id, self_ty: method_substs.type_at(0) })
- } else if let Some(&trait_id) = operator {
+ } else if let Some(trait_id) = operator {
Some(CallKind::Operator { self_arg, trait_id, self_ty: method_substs.type_at(0) })
} else if is_deref {
let deref_target = tcx.get_diagnostic_item(sym::deref_target).and_then(|deref_target| {
diff --git a/compiler/rustc_const_eval/src/util/compare_types.rs b/compiler/rustc_const_eval/src/util/compare_types.rs
new file mode 100644
index 000000000..be786569c
--- /dev/null
+++ b/compiler/rustc_const_eval/src/util/compare_types.rs
@@ -0,0 +1,63 @@
+//! Routines to check for relations between fully inferred types.
+//!
+//! FIXME: Move this to a more general place. The utility of this extends to
+//! other areas of the compiler as well.
+
+use rustc_infer::infer::{DefiningAnchor, TyCtxtInferExt};
+use rustc_infer::traits::ObligationCause;
+use rustc_middle::ty::{ParamEnv, Ty, TyCtxt};
+use rustc_trait_selection::traits::ObligationCtxt;
+
+/// Returns whether the two types are equal up to subtyping.
+///
+/// This is used in case we don't know the expected subtyping direction
+/// and still want to check whether anything is broken.
+pub fn is_equal_up_to_subtyping<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ src: Ty<'tcx>,
+ dest: Ty<'tcx>,
+) -> bool {
+ // Fast path.
+ if src == dest {
+ return true;
+ }
+
+ // Check for subtyping in either direction.
+ is_subtype(tcx, param_env, src, dest) || is_subtype(tcx, param_env, dest, src)
+}
+
+/// Returns whether `src` is a subtype of `dest`, i.e. `src <: dest`.
+///
+/// This mostly ignores opaque types as it can be used in constraining contexts
+/// while still computing the final underlying type.
+pub fn is_subtype<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ src: Ty<'tcx>,
+ dest: Ty<'tcx>,
+) -> bool {
+ if src == dest {
+ return true;
+ }
+
+ let mut builder =
+ tcx.infer_ctxt().ignoring_regions().with_opaque_type_inference(DefiningAnchor::Bubble);
+ let infcx = builder.build();
+ let ocx = ObligationCtxt::new(&infcx);
+ let cause = ObligationCause::dummy();
+ let src = ocx.normalize(&cause, param_env, src);
+ let dest = ocx.normalize(&cause, param_env, dest);
+ match ocx.sub(&cause, param_env, src, dest) {
+ Ok(()) => {}
+ Err(_) => return false,
+ };
+ let errors = ocx.select_all_or_error();
+ // With `Reveal::All`, opaque types get normalized away, with `Reveal::UserFacing`
+ // we would get unification errors because we're unable to look into opaque types,
+ // even if they're constrained in our current function.
+ //
+ // It seems very unlikely that this hides any bugs.
+ let _ = infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
+ errors.is_empty()
+}
diff --git a/compiler/rustc_const_eval/src/util/mod.rs b/compiler/rustc_const_eval/src/util/mod.rs
index 7a05cfd23..76ea5a24e 100644
--- a/compiler/rustc_const_eval/src/util/mod.rs
+++ b/compiler/rustc_const_eval/src/util/mod.rs
@@ -2,11 +2,15 @@ pub mod aggregate;
mod alignment;
mod call_kind;
pub mod collect_writes;
+mod compare_types;
mod find_self_call;
mod might_permit_raw_init;
+mod type_name;
pub use self::aggregate::expand_aggregate;
pub use self::alignment::is_disaligned;
pub use self::call_kind::{call_kind, CallDesugaringKind, CallKind};
+pub use self::compare_types::{is_equal_up_to_subtyping, is_subtype};
pub use self::find_self_call::find_self_call;
pub use self::might_permit_raw_init::might_permit_raw_init;
+pub use self::type_name::type_name;
diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs
index ffdb8de5b..14c8c8802 100644
--- a/compiler/rustc_const_eval/src/interpret/intrinsics/type_name.rs
+++ b/compiler/rustc_const_eval/src/util/type_name.rs
@@ -1,10 +1,9 @@
use rustc_data_structures::intern::Interned;
use rustc_hir::def_id::CrateNum;
use rustc_hir::definitions::DisambiguatedDefPathData;
-use rustc_middle::mir::interpret::{Allocation, ConstAllocation};
use rustc_middle::ty::{
self,
- print::{with_no_verbose_constants, PrettyPrinter, Print, Printer},
+ print::{PrettyPrinter, Print, Printer},
subst::{GenericArg, GenericArgKind},
Ty, TyCtxt,
};
@@ -74,18 +73,10 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
}
fn print_dyn_existential(
- mut self,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ self,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
- let mut first = true;
- for p in predicates {
- if !first {
- write!(self, "+")?;
- }
- first = false;
- self = p.print(self)?;
- }
- Ok(self)
+ self.pretty_print_dyn_existential(predicates)
}
fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
@@ -179,6 +170,11 @@ impl<'tcx> PrettyPrinter<'tcx> for AbsolutePathPrinter<'tcx> {
Ok(self)
}
+
+ fn should_print_verbose(&self) -> bool {
+ // `std::any::type_name` should never print verbose type names
+ false
+ }
}
impl Write for AbsolutePathPrinter<'_> {
@@ -188,11 +184,6 @@ impl Write for AbsolutePathPrinter<'_> {
}
}
-/// Directly returns an `Allocation` containing an absolute path representation of the given type.
-pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> {
- let path = with_no_verbose_constants!(
- AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path
- );
- let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes());
- tcx.intern_const_alloc(alloc)
+pub fn type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> String {
+ AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path
}
diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
index 9daa21ef6..5afce15e2 100644
--- a/compiler/rustc_data_structures/Cargo.toml
+++ b/compiler/rustc_data_structures/Cargo.toml
@@ -23,9 +23,9 @@ rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dangle"] }
stable_deref_trait = "1.0.0"
-stacker = "0.1.14"
+stacker = "0.1.15"
tempfile = "3.2"
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
[dependencies.parking_lot]
diff --git a/compiler/rustc_data_structures/src/fingerprint.rs b/compiler/rustc_data_structures/src/fingerprint.rs
index a39178016..d98f4e43f 100644
--- a/compiler/rustc_data_structures/src/fingerprint.rs
+++ b/compiler/rustc_data_structures/src/fingerprint.rs
@@ -140,7 +140,7 @@ impl stable_hasher::StableHasherResult for Fingerprint {
}
}
-impl_stable_hash_via_hash!(Fingerprint);
+impl_stable_traits_for_trivial_type!(Fingerprint);
impl<E: Encoder> Encodable<E> for Fingerprint {
#[inline]
diff --git a/compiler/rustc_data_structures/src/functor.rs b/compiler/rustc_data_structures/src/functor.rs
index a3d3f9883..84cb417dd 100644
--- a/compiler/rustc_data_structures/src/functor.rs
+++ b/compiler/rustc_data_structures/src/functor.rs
@@ -34,43 +34,11 @@ impl<T> IdFunctor for Vec<T> {
type Inner = T;
#[inline]
- fn try_map_id<F, E>(self, mut f: F) -> Result<Self, E>
+ fn try_map_id<F, E>(self, f: F) -> Result<Self, E>
where
F: FnMut(Self::Inner) -> Result<Self::Inner, E>,
{
- struct HoleVec<T> {
- vec: Vec<mem::ManuallyDrop<T>>,
- hole: Option<usize>,
- }
-
- impl<T> Drop for HoleVec<T> {
- fn drop(&mut self) {
- unsafe {
- for (index, slot) in self.vec.iter_mut().enumerate() {
- if self.hole != Some(index) {
- mem::ManuallyDrop::drop(slot);
- }
- }
- }
- }
- }
-
- unsafe {
- let (ptr, length, capacity) = self.into_raw_parts();
- let vec = Vec::from_raw_parts(ptr.cast(), length, capacity);
- let mut hole_vec = HoleVec { vec, hole: None };
-
- for (index, slot) in hole_vec.vec.iter_mut().enumerate() {
- hole_vec.hole = Some(index);
- let original = mem::ManuallyDrop::take(slot);
- let mapped = f(original)?;
- *slot = mem::ManuallyDrop::new(mapped);
- hole_vec.hole = None;
- }
-
- mem::forget(hole_vec);
- Ok(Vec::from_raw_parts(ptr, length, capacity))
- }
+ self.into_iter().map(f).collect()
}
}
diff --git a/compiler/rustc_data_structures/src/intern.rs b/compiler/rustc_data_structures/src/intern.rs
index 009b5d534..ba94f3776 100644
--- a/compiler/rustc_data_structures/src/intern.rs
+++ b/compiler/rustc_data_structures/src/intern.rs
@@ -4,8 +4,6 @@ use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::ptr;
-use crate::fingerprint::Fingerprint;
-
mod private {
#[derive(Clone, Copy, Debug)]
pub struct PrivateZst;
@@ -72,7 +70,7 @@ impl<'a, T: PartialOrd> PartialOrd for Interned<'a, T> {
if ptr::eq(self.0, other.0) {
Some(Ordering::Equal)
} else {
- let res = self.0.partial_cmp(&other.0);
+ let res = self.0.partial_cmp(other.0);
debug_assert_ne!(res, Some(Ordering::Equal));
res
}
@@ -86,7 +84,7 @@ impl<'a, T: Ord> Ord for Interned<'a, T> {
if ptr::eq(self.0, other.0) {
Ordering::Equal
} else {
- let res = self.0.cmp(&other.0);
+ let res = self.0.cmp(other.0);
debug_assert_ne!(res, Ordering::Equal);
res
}
@@ -110,87 +108,5 @@ where
}
}
-/// A helper trait so that `Interned` things can cache stable hashes reproducibly.
-pub trait InternedHashingContext {
- fn with_def_path_and_no_spans(&mut self, f: impl FnOnce(&mut Self));
-}
-
-/// A helper type that you can wrap round your own type in order to automatically
-/// cache the stable hash on creation and not recompute it whenever the stable hash
-/// of the type is computed.
-/// This is only done in incremental mode. You can also opt out of caching by using
-/// StableHash::ZERO for the hash, in which case the hash gets computed each time.
-/// This is useful if you have values that you intern but never (can?) use for stable
-/// hashing.
-#[derive(Copy, Clone)]
-pub struct WithStableHash<T> {
- pub internee: T,
- pub stable_hash: Fingerprint,
-}
-
-impl<T: PartialEq> PartialEq for WithStableHash<T> {
- #[inline]
- fn eq(&self, other: &Self) -> bool {
- self.internee.eq(&other.internee)
- }
-}
-
-impl<T: Eq> Eq for WithStableHash<T> {}
-
-impl<T: Ord> PartialOrd for WithStableHash<T> {
- fn partial_cmp(&self, other: &WithStableHash<T>) -> Option<Ordering> {
- Some(self.internee.cmp(&other.internee))
- }
-}
-
-impl<T: Ord> Ord for WithStableHash<T> {
- fn cmp(&self, other: &WithStableHash<T>) -> Ordering {
- self.internee.cmp(&other.internee)
- }
-}
-
-impl<T> Deref for WithStableHash<T> {
- type Target = T;
-
- #[inline]
- fn deref(&self) -> &T {
- &self.internee
- }
-}
-
-impl<T: Hash> Hash for WithStableHash<T> {
- #[inline]
- fn hash<H: Hasher>(&self, s: &mut H) {
- self.internee.hash(s)
- }
-}
-
-impl<T: HashStable<CTX>, CTX: InternedHashingContext> HashStable<CTX> for WithStableHash<T> {
- fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
- if self.stable_hash == Fingerprint::ZERO || cfg!(debug_assertions) {
- // No cached hash available. This can only mean that incremental is disabled.
- // We don't cache stable hashes in non-incremental mode, because they are used
- // so rarely that the performance actually suffers.
-
- // We need to build the hash as if we cached it and then hash that hash, as
- // otherwise the hashes will differ between cached and non-cached mode.
- let stable_hash: Fingerprint = {
- let mut hasher = StableHasher::new();
- hcx.with_def_path_and_no_spans(|hcx| self.internee.hash_stable(hcx, &mut hasher));
- hasher.finish()
- };
- if cfg!(debug_assertions) && self.stable_hash != Fingerprint::ZERO {
- assert_eq!(
- stable_hash, self.stable_hash,
- "cached stable hash does not match freshly computed stable hash"
- );
- }
- stable_hash.hash_stable(hcx, hasher);
- } else {
- self.stable_hash.hash_stable(hcx, hasher);
- }
- }
-}
-
#[cfg(test)]
mod tests;
diff --git a/compiler/rustc_data_structures/src/memmap.rs b/compiler/rustc_data_structures/src/memmap.rs
index 917416df6..3d44e17f3 100644
--- a/compiler/rustc_data_structures/src/memmap.rs
+++ b/compiler/rustc_data_structures/src/memmap.rs
@@ -36,6 +36,12 @@ impl Deref for Mmap {
#[inline]
fn deref(&self) -> &[u8] {
+ &self.0
+ }
+}
+
+impl AsRef<[u8]> for Mmap {
+ fn as_ref(&self) -> &[u8] {
&*self.0
}
}
@@ -96,13 +102,13 @@ impl Deref for MmapMut {
#[inline]
fn deref(&self) -> &[u8] {
- &*self.0
+ &self.0
}
}
impl DerefMut for MmapMut {
#[inline]
fn deref_mut(&mut self) -> &mut [u8] {
- &mut *self.0
+ &mut self.0
}
}
diff --git a/compiler/rustc_data_structures/src/owning_ref/mod.rs b/compiler/rustc_data_structures/src/owning_ref/mod.rs
index ed5e56618..980a540cc 100644
--- a/compiler/rustc_data_structures/src/owning_ref/mod.rs
+++ b/compiler/rustc_data_structures/src/owning_ref/mod.rs
@@ -899,25 +899,25 @@ unsafe impl<O, T: ?Sized> StableAddress for OwningRef<O, T> {}
impl<O, T: ?Sized> AsRef<T> for OwningRef<O, T> {
fn as_ref(&self) -> &T {
- &*self
+ self
}
}
impl<O, T: ?Sized> AsRef<T> for OwningRefMut<O, T> {
fn as_ref(&self) -> &T {
- &*self
+ self
}
}
impl<O, T: ?Sized> AsMut<T> for OwningRefMut<O, T> {
fn as_mut(&mut self) -> &mut T {
- &mut *self
+ self
}
}
impl<O, T: ?Sized> Borrow<T> for OwningRef<O, T> {
fn borrow(&self) -> &T {
- &*self
+ self
}
}
@@ -1021,7 +1021,7 @@ where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
- (&*self as &T).eq(&*other as &T)
+ self.deref().eq(other.deref())
}
}
@@ -1032,7 +1032,7 @@ where
T: PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- (&*self as &T).partial_cmp(&*other as &T)
+ self.deref().partial_cmp(other.deref())
}
}
@@ -1041,7 +1041,7 @@ where
T: Ord,
{
fn cmp(&self, other: &Self) -> Ordering {
- (&*self as &T).cmp(&*other as &T)
+ self.deref().cmp(other.deref())
}
}
@@ -1050,7 +1050,7 @@ where
T: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
- (&*self as &T).hash(state);
+ self.deref().hash(state);
}
}
@@ -1059,7 +1059,7 @@ where
T: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
- (&*self as &T).eq(&*other as &T)
+ self.deref().eq(other.deref())
}
}
@@ -1070,7 +1070,7 @@ where
T: PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
- (&*self as &T).partial_cmp(&*other as &T)
+ self.deref().partial_cmp(other.deref())
}
}
@@ -1079,7 +1079,7 @@ where
T: Ord,
{
fn cmp(&self, other: &Self) -> Ordering {
- (&*self as &T).cmp(&*other as &T)
+ self.deref().cmp(other.deref())
}
}
@@ -1088,7 +1088,7 @@ where
T: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
- (&*self as &T).hash(state);
+ self.deref().hash(state);
}
}
diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs
index ba1960805..aa7a01eed 100644
--- a/compiler/rustc_data_structures/src/profiling.rs
+++ b/compiler/rustc_data_structures/src/profiling.rs
@@ -192,7 +192,7 @@ impl SelfProfilerRef {
F: for<'a> FnOnce(&'a SelfProfiler) -> TimingGuard<'a>,
{
let profiler = profiler_ref.profiler.as_ref().unwrap();
- f(&**profiler)
+ f(profiler)
}
if self.event_filter_mask.contains(event_filter) {
@@ -466,7 +466,7 @@ impl SelfProfilerRef {
pub fn with_profiler(&self, f: impl FnOnce(&SelfProfiler)) {
if let Some(profiler) = &self.profiler {
- f(&profiler)
+ f(profiler)
}
}
@@ -733,7 +733,7 @@ impl Drop for VerboseTimingGuard<'_> {
if let Some((start_time, start_rss, ref message)) = self.start_and_message {
let end_rss = get_resident_set_size();
let dur = start_time.elapsed();
- print_time_passes_entry(&message, dur, start_rss, end_rss);
+ print_time_passes_entry(message, dur, start_rss, end_rss);
}
}
}
diff --git a/compiler/rustc_data_structures/src/sorted_map.rs b/compiler/rustc_data_structures/src/sorted_map.rs
index fe257e102..d13313dfd 100644
--- a/compiler/rustc_data_structures/src/sorted_map.rs
+++ b/compiler/rustc_data_structures/src/sorted_map.rs
@@ -1,4 +1,4 @@
-use crate::stable_hasher::{HashStable, StableHasher};
+use crate::stable_hasher::{HashStable, StableHasher, StableOrd};
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::iter::FromIterator;
@@ -10,8 +10,8 @@ mod index_map;
pub use index_map::SortedIndexMultiMap;
/// `SortedMap` is a data structure with similar characteristics as BTreeMap but
-/// slightly different trade-offs: lookup, insertion, and removal are *O*(log(*n*))
-/// and elements can be iterated in order cheaply.
+/// slightly different trade-offs: lookup is *O*(log(*n*)), insertion and removal
+/// are *O*(*n*) but elements can be iterated in order cheaply.
///
/// `SortedMap` can be faster than a `BTreeMap` for small sizes (<50) since it
/// stores data in a more compact way. It also supports accessing contiguous
@@ -308,7 +308,7 @@ impl<K: Ord, V> FromIterator<(K, V)> for SortedMap<K, V> {
}
}
-impl<K: HashStable<CTX>, V: HashStable<CTX>, CTX> HashStable<CTX> for SortedMap<K, V> {
+impl<K: HashStable<CTX> + StableOrd, V: HashStable<CTX>, CTX> HashStable<CTX> for SortedMap<K, V> {
#[inline]
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
self.data.hash_stable(ctx, hasher);
diff --git a/compiler/rustc_data_structures/src/sorted_map/index_map.rs b/compiler/rustc_data_structures/src/sorted_map/index_map.rs
index 0ec32dc43..c2f0ae328 100644
--- a/compiler/rustc_data_structures/src/sorted_map/index_map.rs
+++ b/compiler/rustc_data_structures/src/sorted_map/index_map.rs
@@ -120,13 +120,20 @@ where
self.items.hash(hasher)
}
}
+
impl<I: Idx, K, V, C> HashStable<C> for SortedIndexMultiMap<I, K, V>
where
K: HashStable<C>,
V: HashStable<C>,
{
fn hash_stable(&self, ctx: &mut C, hasher: &mut StableHasher) {
- self.items.hash_stable(ctx, hasher)
+ let SortedIndexMultiMap {
+ items,
+ // We can ignore this field because it is not observable from the outside.
+ idx_sorted_by_item_key: _,
+ } = self;
+
+ items.hash_stable(ctx, hasher)
}
}
diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs
index ce8591734..1a728f82f 100644
--- a/compiler/rustc_data_structures/src/stable_hasher.rs
+++ b/compiler/rustc_data_structures/src/stable_hasher.rs
@@ -219,7 +219,35 @@ pub trait ToStableHashKey<HCX> {
fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType;
}
-/// Implement HashStable by just calling `Hash::hash()`.
+/// Trait for marking a type as having a sort order that is
+/// stable across compilation session boundaries. More formally:
+///
+/// ```txt
+/// Ord::cmp(a1, b1) == Ord:cmp(a2, b2)
+/// where a2 = decode(encode(a1, context1), context2)
+/// b2 = decode(encode(b1, context1), context2)
+/// ```
+///
+/// i.e. the result of `Ord::cmp` is not influenced by encoding
+/// the values in one session and then decoding them in another
+/// session.
+///
+/// This is trivially true for types where encoding and decoding
+/// don't change the bytes of the values that are used during
+/// comparison and comparison only depends on these bytes (as
+/// opposed to some non-local state). Examples are u32, String,
+/// Path, etc.
+///
+/// But it is not true for:
+/// - `*const T` and `*mut T` because the values of these pointers
+/// will change between sessions.
+/// - `DefIndex`, `CrateNum`, `LocalDefId`, because their concrete
+/// values depend on state that might be different between
+/// compilation sessions.
+pub unsafe trait StableOrd: Ord {}
+
+/// Implement HashStable by just calling `Hash::hash()`. Also implement `StableOrd` for the type since
+/// that has the same requirements.
///
/// **WARNING** This is only valid for types that *really* don't need any context for fingerprinting.
/// But it is easy to misuse this macro (see [#96013](https://github.com/rust-lang/rust/issues/96013)
@@ -227,7 +255,7 @@ pub trait ToStableHashKey<HCX> {
/// here in this module.
///
/// Use `#[derive(HashStable_Generic)]` instead.
-macro_rules! impl_stable_hash_via_hash {
+macro_rules! impl_stable_traits_for_trivial_type {
($t:ty) => {
impl<CTX> $crate::stable_hasher::HashStable<CTX> for $t {
#[inline]
@@ -235,26 +263,28 @@ macro_rules! impl_stable_hash_via_hash {
::std::hash::Hash::hash(self, hasher);
}
}
+
+ unsafe impl $crate::stable_hasher::StableOrd for $t {}
};
}
-impl_stable_hash_via_hash!(i8);
-impl_stable_hash_via_hash!(i16);
-impl_stable_hash_via_hash!(i32);
-impl_stable_hash_via_hash!(i64);
-impl_stable_hash_via_hash!(isize);
+impl_stable_traits_for_trivial_type!(i8);
+impl_stable_traits_for_trivial_type!(i16);
+impl_stable_traits_for_trivial_type!(i32);
+impl_stable_traits_for_trivial_type!(i64);
+impl_stable_traits_for_trivial_type!(isize);
-impl_stable_hash_via_hash!(u8);
-impl_stable_hash_via_hash!(u16);
-impl_stable_hash_via_hash!(u32);
-impl_stable_hash_via_hash!(u64);
-impl_stable_hash_via_hash!(usize);
+impl_stable_traits_for_trivial_type!(u8);
+impl_stable_traits_for_trivial_type!(u16);
+impl_stable_traits_for_trivial_type!(u32);
+impl_stable_traits_for_trivial_type!(u64);
+impl_stable_traits_for_trivial_type!(usize);
-impl_stable_hash_via_hash!(u128);
-impl_stable_hash_via_hash!(i128);
+impl_stable_traits_for_trivial_type!(u128);
+impl_stable_traits_for_trivial_type!(i128);
-impl_stable_hash_via_hash!(char);
-impl_stable_hash_via_hash!(());
+impl_stable_traits_for_trivial_type!(char);
+impl_stable_traits_for_trivial_type!(());
impl<CTX> HashStable<CTX> for ! {
fn hash_stable(&self, _ctx: &mut CTX, _hasher: &mut StableHasher) {
@@ -366,7 +396,7 @@ impl<CTX> HashStable<CTX> for [u8] {
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Vec<T> {
#[inline]
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
- (&self[..]).hash_stable(ctx, hasher);
+ self[..].hash_stable(ctx, hasher);
}
}
@@ -399,13 +429,13 @@ where
}
}
-impl<A, CTX> HashStable<CTX> for SmallVec<[A; 1]>
+impl<A, const N: usize, CTX> HashStable<CTX> for SmallVec<[A; N]>
where
A: HashStable<CTX>,
{
#[inline]
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
- (&self[..]).hash_stable(ctx, hasher);
+ self[..].hash_stable(ctx, hasher);
}
}
@@ -440,10 +470,14 @@ impl<CTX> HashStable<CTX> for str {
impl<CTX> HashStable<CTX> for String {
#[inline]
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
- (&self[..]).hash_stable(hcx, hasher);
+ self[..].hash_stable(hcx, hasher);
}
}
+// Safety: String comparison only depends on their contents and the
+// contents are not changed by (de-)serialization.
+unsafe impl StableOrd for String {}
+
impl<HCX> ToStableHashKey<HCX> for String {
type KeyType = String;
#[inline]
@@ -459,6 +493,9 @@ impl<CTX> HashStable<CTX> for bool {
}
}
+// Safety: sort order of bools is not changed by (de-)serialization.
+unsafe impl StableOrd for bool {}
+
impl<T, CTX> HashStable<CTX> for Option<T>
where
T: HashStable<CTX>,
@@ -474,6 +511,9 @@ where
}
}
+// Safety: the Option wrapper does not add instability to comparison.
+unsafe impl<T: StableOrd> StableOrd for Option<T> {}
+
impl<T1, T2, CTX> HashStable<CTX> for Result<T1, T2>
where
T1: HashStable<CTX>,
@@ -550,8 +590,8 @@ where
}
}
-impl_stable_hash_via_hash!(::std::path::Path);
-impl_stable_hash_via_hash!(::std::path::PathBuf);
+impl_stable_traits_for_trivial_type!(::std::path::Path);
+impl_stable_traits_for_trivial_type!(::std::path::PathBuf);
impl<K, V, R, HCX> HashStable<HCX> for ::std::collections::HashMap<K, V, R>
where
@@ -584,27 +624,26 @@ where
impl<K, V, HCX> HashStable<HCX> for ::std::collections::BTreeMap<K, V>
where
- K: ToStableHashKey<HCX>,
+ K: HashStable<HCX> + StableOrd,
V: HashStable<HCX>,
{
fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
- stable_hash_reduce(hcx, hasher, self.iter(), self.len(), |hasher, hcx, (key, value)| {
- let key = key.to_stable_hash_key(hcx);
- key.hash_stable(hcx, hasher);
- value.hash_stable(hcx, hasher);
- });
+ self.len().hash_stable(hcx, hasher);
+ for entry in self.iter() {
+ entry.hash_stable(hcx, hasher);
+ }
}
}
impl<K, HCX> HashStable<HCX> for ::std::collections::BTreeSet<K>
where
- K: ToStableHashKey<HCX>,
+ K: HashStable<HCX> + StableOrd,
{
fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
- stable_hash_reduce(hcx, hasher, self.iter(), self.len(), |hasher, hcx, key| {
- let key = key.to_stable_hash_key(hcx);
- key.hash_stable(hcx, hasher);
- });
+ self.len().hash_stable(hcx, hasher);
+ for entry in self.iter() {
+ entry.hash_stable(hcx, hasher);
+ }
}
}
diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs
index 9c0fb8265..e4f47b22a 100644
--- a/compiler/rustc_data_structures/src/sync.rs
+++ b/compiler/rustc_data_structures/src/sync.rs
@@ -201,7 +201,7 @@ cfg_if! {
#[inline(always)]
fn deref(&self) -> &T {
- &*self.0
+ &self.0
}
}
@@ -410,6 +410,7 @@ impl<T> Lock<T> {
#[cfg(parallel_compiler)]
#[inline(always)]
+ #[track_caller]
pub fn lock(&self) -> LockGuard<'_, T> {
if ERROR_CHECKING {
self.0.try_lock().expect("lock was already held")
@@ -420,21 +421,25 @@ impl<T> Lock<T> {
#[cfg(not(parallel_compiler))]
#[inline(always)]
+ #[track_caller]
pub fn lock(&self) -> LockGuard<'_, T> {
self.0.borrow_mut()
}
#[inline(always)]
+ #[track_caller]
pub fn with_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
f(&mut *self.lock())
}
#[inline(always)]
+ #[track_caller]
pub fn borrow(&self) -> LockGuard<'_, T> {
self.lock()
}
#[inline(always)]
+ #[track_caller]
pub fn borrow_mut(&self) -> LockGuard<'_, T> {
self.lock()
}
@@ -476,6 +481,7 @@ impl<T> RwLock<T> {
#[cfg(not(parallel_compiler))]
#[inline(always)]
+ #[track_caller]
pub fn read(&self) -> ReadGuard<'_, T> {
self.0.borrow()
}
@@ -491,6 +497,7 @@ impl<T> RwLock<T> {
}
#[inline(always)]
+ #[track_caller]
pub fn with_read_lock<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
f(&*self.read())
}
@@ -509,6 +516,7 @@ impl<T> RwLock<T> {
#[cfg(not(parallel_compiler))]
#[inline(always)]
+ #[track_caller]
pub fn write(&self) -> WriteGuard<'_, T> {
self.0.borrow_mut()
}
@@ -524,16 +532,19 @@ impl<T> RwLock<T> {
}
#[inline(always)]
+ #[track_caller]
pub fn with_write_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
f(&mut *self.write())
}
#[inline(always)]
+ #[track_caller]
pub fn borrow(&self) -> ReadGuard<'_, T> {
self.read()
}
#[inline(always)]
+ #[track_caller]
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
self.write()
}
diff --git a/compiler/rustc_driver/src/lib.rs b/compiler/rustc_driver/src/lib.rs
index cfa734c7d..f06ca5a07 100644
--- a/compiler/rustc_driver/src/lib.rs
+++ b/compiler/rustc_driver/src/lib.rs
@@ -5,7 +5,9 @@
//! This API is completely unstable and subject to change.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
+#![feature(is_terminal)]
#![feature(once_cell)]
+#![feature(decl_macro)]
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]
#![deny(rustc::untranslatable_diagnostic)]
@@ -23,10 +25,10 @@ use rustc_data_structures::sync::SeqCst;
use rustc_errors::registry::{InvalidErrorCode, Registry};
use rustc_errors::{ErrorGuaranteed, PResult};
use rustc_feature::find_gated_cfg;
+use rustc_hir::def_id::LOCAL_CRATE;
use rustc_interface::util::{self, collect_crate_types, get_codegen_backend};
use rustc_interface::{interface, Queries};
use rustc_lint::LintStore;
-use rustc_log::stdout_isatty;
use rustc_metadata::locator;
use rustc_save_analysis as save;
use rustc_save_analysis::DumpHandler;
@@ -47,7 +49,7 @@ use std::default::Default;
use std::env;
use std::ffi::OsString;
use std::fs;
-use std::io::{self, Read, Write};
+use std::io::{self, IsTerminal, Read, Write};
use std::panic::{self, catch_unwind};
use std::path::PathBuf;
use std::process::{self, Command, Stdio};
@@ -244,10 +246,8 @@ fn run_compiler(
interface::run_compiler(config, |compiler| {
let sopts = &compiler.session().opts;
if sopts.describe_lints {
- let mut lint_store = rustc_lint::new_lint_store(
- sopts.unstable_opts.no_interleave_lints,
- compiler.session().enable_internal_lints(),
- );
+ let mut lint_store =
+ rustc_lint::new_lint_store(compiler.session().enable_internal_lints());
let registered_lints =
if let Some(register_lints) = compiler.register_lints() {
register_lints(compiler.session(), &mut lint_store);
@@ -317,7 +317,7 @@ fn run_compiler(
compiler.input(),
&*expanded_crate,
*ppm,
- compiler.output_file().as_ref().map(|p| &**p),
+ compiler.output_file().as_deref(),
);
Ok(())
})?;
@@ -328,7 +328,7 @@ fn run_compiler(
compiler.input(),
&krate,
*ppm,
- compiler.output_file().as_ref().map(|p| &**p),
+ compiler.output_file().as_deref(),
);
}
trace!("finished pretty-printing");
@@ -375,17 +375,14 @@ fn run_compiler(
queries.global_ctxt()?.peek_mut().enter(|tcx| {
let result = tcx.analysis(());
if sess.opts.unstable_opts.save_analysis {
- let crate_name = queries.crate_name()?.peek().clone();
+ let crate_name = tcx.crate_name(LOCAL_CRATE);
sess.time("save_analysis", || {
save::process_crate(
tcx,
- &crate_name,
+ crate_name,
compiler.input(),
None,
- DumpHandler::new(
- compiler.output_dir().as_ref().map(|p| &**p),
- &crate_name,
- ),
+ DumpHandler::new(compiler.output_dir().as_deref(), crate_name),
)
});
}
@@ -514,7 +511,7 @@ fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
}
text.push('\n');
}
- if stdout_isatty() {
+ if io::stdout().is_terminal() {
show_content_with_pager(&text);
} else {
print!("{}", text);
@@ -682,7 +679,7 @@ fn print_crate_info(
let crate_types = collect_crate_types(sess, attrs);
for &style in &crate_types {
let fname =
- rustc_session::output::filename_for_input(sess, style, &id, &t_outputs);
+ rustc_session::output::filename_for_input(sess, style, id, &t_outputs);
println!("{}", fname.file_name().unwrap().to_string_lossy());
}
}
@@ -736,26 +733,58 @@ fn print_crate_info(
// Any output here interferes with Cargo's parsing of other printed output
NativeStaticLibs => {}
LinkArgs => {}
+ SplitDebuginfo => {
+ use rustc_target::spec::SplitDebuginfo::{Off, Packed, Unpacked};
+
+ for split in &[Off, Packed, Unpacked] {
+ let stable = sess.target.options.supported_split_debuginfo.contains(split);
+ let unstable_ok = sess.unstable_options();
+ if stable || unstable_ok {
+ println!("{}", split);
+ }
+ }
+ }
}
}
Compilation::Stop
}
/// Prints version information
-pub fn version(binary: &str, matches: &getopts::Matches) {
+///
+/// NOTE: this is a macro to support drivers built at a different time than the main `rustc_driver` crate.
+pub macro version($binary: literal, $matches: expr) {
+ fn unw(x: Option<&str>) -> &str {
+ x.unwrap_or("unknown")
+ }
+ $crate::version_at_macro_invocation(
+ $binary,
+ $matches,
+ unw(option_env!("CFG_VERSION")),
+ unw(option_env!("CFG_VER_HASH")),
+ unw(option_env!("CFG_VER_DATE")),
+ unw(option_env!("CFG_RELEASE")),
+ )
+}
+
+#[doc(hidden)] // use the macro instead
+pub fn version_at_macro_invocation(
+ binary: &str,
+ matches: &getopts::Matches,
+ version: &str,
+ commit_hash: &str,
+ commit_date: &str,
+ release: &str,
+) {
let verbose = matches.opt_present("verbose");
- println!("{} {}", binary, util::version_str().unwrap_or("unknown version"));
+ println!("{} {}", binary, version);
if verbose {
- fn unw(x: Option<&str>) -> &str {
- x.unwrap_or("unknown")
- }
println!("binary: {}", binary);
- println!("commit-hash: {}", unw(util::commit_hash_str()));
- println!("commit-date: {}", unw(util::commit_date_str()));
+ println!("commit-hash: {}", commit_hash);
+ println!("commit-date: {}", commit_date);
println!("host: {}", config::host_triple());
- println!("release: {}", unw(util::release_str()));
+ println!("release: {}", release);
let debug_flags = matches.opt_strs("Z");
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
@@ -1071,7 +1100,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
}
if matches.opt_present("version") {
- version("rustc", &matches);
+ version!("rustc", &matches);
return None;
}
@@ -1200,6 +1229,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
false,
None,
false,
+ false,
));
let handler = rustc_errors::Handler::with_emitter(true, None, emitter);
@@ -1215,7 +1245,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
format!("we would appreciate a bug report: {}", bug_report_url).into(),
format!(
"rustc {} running on {}",
- util::version_str().unwrap_or("unknown_version"),
+ util::version_str!().unwrap_or("unknown_version"),
config::host_triple()
)
.into(),
@@ -1305,8 +1335,8 @@ mod signal_handler {
}
}
- // When an error signal (such as SIGABRT or SIGSEGV) is delivered to the
- // process, print a stack trace and then exit.
+ /// When an error signal (such as SIGABRT or SIGSEGV) is delivered to the
+ /// process, print a stack trace and then exit.
pub(super) fn install() {
unsafe {
const ALT_STACK_SIZE: usize = libc::MINSIGSTKSZ + 64 * 1024;
diff --git a/compiler/rustc_error_codes/src/error_codes.rs b/compiler/rustc_error_codes/src/error_codes.rs
index 1e86d1596..31a709c36 100644
--- a/compiler/rustc_error_codes/src/error_codes.rs
+++ b/compiler/rustc_error_codes/src/error_codes.rs
@@ -494,6 +494,7 @@ E0786: include_str!("./error_codes/E0786.md"),
E0787: include_str!("./error_codes/E0787.md"),
E0788: include_str!("./error_codes/E0788.md"),
E0790: include_str!("./error_codes/E0790.md"),
+E0791: include_str!("./error_codes/E0791.md"),
;
// E0006, // merged with E0005
// E0008, // cannot bind by-move into a pattern guard
diff --git a/compiler/rustc_error_codes/src/error_codes/E0207.md b/compiler/rustc_error_codes/src/error_codes/E0207.md
index 8a7923ac9..95e7c9fc7 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0207.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0207.md
@@ -1,4 +1,5 @@
-A type parameter that is specified for `impl` is not constrained.
+A type, const or lifetime parameter that is specified for `impl` is not
+constrained.
Erroneous code example:
@@ -14,8 +15,8 @@ impl<T: Default> Foo {
}
```
-Any type parameter of an `impl` must meet at least one of
-the following criteria:
+Any type or const parameter of an `impl` must meet at least one of the
+following criteria:
- it appears in the _implementing type_ of the impl, e.g. `impl<T> Foo<T>`
- for a trait impl, it appears in the _implemented trait_, e.g.
@@ -23,6 +24,9 @@ the following criteria:
- it is bound as an associated type, e.g. `impl<T, U> SomeTrait for T
where T: AnotherTrait<AssocType=U>`
+Any unconstrained lifetime parameter of an `impl` is not supported if the
+lifetime parameter is used by an associated type.
+
### Error example 1
Suppose we have a struct `Foo` and we would like to define some methods for it.
@@ -32,7 +36,6 @@ The problem is that the parameter `T` does not appear in the implementing type
(`Foo`) of the impl. In this case, we can fix the error by moving the type
parameter from the `impl` to the method `get`:
-
```
struct Foo;
@@ -128,6 +131,70 @@ impl<T: Default> Maker<Foo<T>> for FooMaker {
}
```
+### Error example 3
+
+Suppose we have a struct `Foo` and we would like to define some methods for it.
+The following code example has a definition which leads to a compiler error:
+
+```compile_fail,E0207
+struct Foo;
+
+impl<const T: i32> Foo {
+ // error: the const parameter `T` is not constrained by the impl trait, self
+ // type, or predicates [E0207]
+ fn get(&self) -> i32 {
+ i32::default()
+ }
+}
+```
+
+The problem is that the const parameter `T` does not appear in the implementing
+type (`Foo`) of the impl. In this case, we can fix the error by moving the type
+parameter from the `impl` to the method `get`:
+
+
+```
+struct Foo;
+
+// Move the const parameter from the impl to the method
+impl Foo {
+ fn get<const T: i32>(&self) -> i32 {
+ i32::default()
+ }
+}
+```
+
+### Error example 4
+
+Suppose we have a struct `Foo` and a struct `Bar` that uses lifetime `'a`. We
+would like to implement trait `Contains` for `Foo`. The trait `Contains` have
+the associated type `B`. The following code example has a definition which
+leads to a compiler error:
+
+```compile_fail,E0207
+struct Foo;
+struct Bar<'a>;
+
+trait Contains {
+ type B;
+
+ fn get(&self) -> i32;
+}
+
+impl<'a> Contains for Foo {
+ type B = Bar<'a>;
+
+ // error: the lifetime parameter `'a` is not constrained by the impl trait,
+ // self type, or predicates [E0207]
+ fn get(&self) -> i32 {
+ i32::default()
+ }
+}
+```
+
+Please note that unconstrained lifetime parameters are not supported if they are
+being used by an associated type.
+
### Additional information
For more information, please see [RFC 447].
diff --git a/compiler/rustc_error_codes/src/error_codes/E0322.md b/compiler/rustc_error_codes/src/error_codes/E0322.md
index ccef8681d..194bbd83b 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0322.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0322.md
@@ -1,4 +1,5 @@
-The `Sized` trait was implemented explicitly.
+A built-in trait was implemented explicitly. All implementations of the trait
+are provided automatically by the compiler.
Erroneous code example:
diff --git a/compiler/rustc_error_codes/src/error_codes/E0382.md b/compiler/rustc_error_codes/src/error_codes/E0382.md
index d1408a062..cbc4980f8 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0382.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0382.md
@@ -61,7 +61,7 @@ with `#[derive(Clone)]`.
Some types have no ownership semantics at all and are trivial to duplicate. An
example is `i32` and the other number types. We don't have to call `.clone()` to
-clone them, because they are marked `Copy` in addition to `Clone`. Implicit
+clone them, because they are marked `Copy` in addition to `Clone`. Implicit
cloning is more convenient in this case. We can mark our own types `Copy` if
all their members also are marked `Copy`.
diff --git a/compiler/rustc_error_codes/src/error_codes/E0706.md b/compiler/rustc_error_codes/src/error_codes/E0706.md
index d379b8a23..fabd855a2 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0706.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0706.md
@@ -56,4 +56,4 @@ You might be interested in visiting the [async book] for further information.
[`async-trait` crate]: https://crates.io/crates/async-trait
[async-is-hard]: https://smallcultfollowing.com/babysteps/blog/2019/10/26/async-fn-in-traits-are-hard/
[Generic Associated Types]: https://github.com/rust-lang/rust/issues/44265
-[async book]: https://rust-lang.github.io/async-book/07_workarounds/06_async_in_traits.html
+[async book]: https://rust-lang.github.io/async-book/07_workarounds/05_async_in_traits.html
diff --git a/compiler/rustc_error_codes/src/error_codes/E0760.md b/compiler/rustc_error_codes/src/error_codes/E0760.md
index e1dcfefeb..85e5faada 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0760.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0760.md
@@ -1,9 +1,11 @@
+#### Note: this error code is no longer emitted by the compiler.
+
`async fn`/`impl trait` return type cannot contain a projection
or `Self` that references lifetimes from a parent scope.
Erroneous code example:
-```compile_fail,E0760,edition2018
+```compile_fail,edition2018
struct S<'a>(&'a i32);
impl<'a> S<'a> {
diff --git a/compiler/rustc_error_codes/src/error_codes/E0791.md b/compiler/rustc_error_codes/src/error_codes/E0791.md
new file mode 100644
index 000000000..61d2f511a
--- /dev/null
+++ b/compiler/rustc_error_codes/src/error_codes/E0791.md
@@ -0,0 +1,41 @@
+Static variables with the `#[linkage]` attribute within external blocks
+must have one of the following types, which are equivalent to a nullable
+pointer in C:
+
+* `*mut T` or `*const T`, where `T` may be any type.
+
+* An enumerator type with no `#[repr]` attribute and with two variants, where
+ one of the variants has no fields, and the other has a single field of one of
+ the following non-nullable types:
+ * Reference type
+ * Function pointer type
+
+ The variants can appear in either order.
+
+For example, the following declaration is invalid:
+
+```compile_fail,E0791
+#![feature(linkage)]
+
+extern "C" {
+ #[linkage = "extern_weak"]
+ static foo: i8;
+}
+```
+
+The following declarations are valid:
+
+```
+#![feature(linkage)]
+
+extern "C" {
+ #[linkage = "extern_weak"]
+ static foo: Option<unsafe extern "C" fn()>;
+
+ #[linkage = "extern_weak"]
+ static bar: Option<&'static i8>;
+
+ #[linkage = "extern_weak"]
+ static baz: *mut i8;
+}
+```
diff --git a/compiler/rustc_error_messages/Cargo.toml b/compiler/rustc_error_messages/Cargo.toml
index 9945f3379..0c705d2ec 100644
--- a/compiler/rustc_error_messages/Cargo.toml
+++ b/compiler/rustc_error_messages/Cargo.toml
@@ -9,9 +9,17 @@ edition = "2021"
fluent-bundle = "0.15.2"
fluent-syntax = "0.11"
intl-memoizer = "0.5.1"
+rustc_baked_icu_data = { path = "../rustc_baked_icu_data" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
rustc_macros = { path = "../rustc_macros" }
tracing = "0.1"
unic-langid = { version = "0.9.0", features = ["macros"] }
+icu_list = "1.0.0"
+writeable = "0.5.0"
+icu_locid = "1.0.0"
+icu_provider_adapters = "1.0.0"
+
+[features]
+rustc_use_parallel_compiler = ['rustc_baked_icu_data/rustc_use_parallel_compiler']
diff --git a/compiler/rustc_error_messages/locales/en-US/borrowck.ftl b/compiler/rustc_error_messages/locales/en-US/borrowck.ftl
index 67f2156f3..2cd473322 100644
--- a/compiler/rustc_error_messages/locales/en-US/borrowck.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/borrowck.ftl
@@ -24,9 +24,6 @@ borrowck_var_does_not_need_mut =
variable does not need to be mutable
.suggestion = remove this `mut`
-borrowck_const_not_used_in_type_alias =
- const parameter `{$ct}` is part of concrete type but not used in parameter list for the `impl Trait` type alias
-
borrowck_var_cannot_escape_closure =
captured variable cannot escape `FnMut` closure body
.note = `FnMut` closures only have access to their captured variables while they are executing...
@@ -58,3 +55,68 @@ borrowck_returned_lifetime_short =
borrowck_used_impl_require_static =
the used `impl` has a `'static` requirement
+
+borrowck_capture_kind_label =
+ capture is {$kind_desc} because of use here
+
+borrowck_var_borrow_by_use_place_in_generator =
+ borrow occurs due to use of {$place} in closure in generator
+
+borrowck_var_borrow_by_use_place_in_closure =
+ borrow occurs due to use of {$place} in closure
+
+borrowck_var_borrow_by_use_place =
+ borrow occurs due to use of {$place}
+
+borrowck_borrow_due_to_use_generator =
+ borrow occurs due to use in generator
+
+borrowck_use_due_to_use_generator =
+ use occurs due to use in generator
+
+borrowck_assign_due_to_use_generator =
+ assign occurs due to use in generator
+
+borrowck_assign_part_due_to_use_generator =
+ assign to part occurs due to use in generator
+
+borrowck_borrow_due_to_use_closure =
+ borrow occurs due to use in closure
+
+borrowck_use_due_to_use_closure =
+ use occurs due to use in closure
+
+borrowck_assign_due_to_use_closure =
+ assign occurs due to use in closure
+
+borrowck_assign_part_due_to_use_closure =
+ assign to part occurs due to use in closure
+
+borrowck_capture_immute =
+ capture is immutable because of use here
+
+borrowck_capture_mut =
+ capture is mutable because of use here
+
+borrowck_capture_move =
+ capture is moved because of use here
+
+borrowck_var_move_by_use_place_in_generator =
+ move occurs due to use of {$place} in generator
+
+borrowck_var_move_by_use_place_in_closure =
+ move occurs due to use of {$place} in closure
+
+borrowck_cannot_move_when_borrowed =
+ cannot move out of {$place ->
+ [value] value
+ *[other] {$place}
+ } because it is borrowed
+ .label = borrow of {$borrow_place ->
+ [value] value
+ *[other] {$borrow_place}
+ } occurs here
+ .move_label = move out of {$value_place ->
+ [value] value
+ *[other] {$value_place}
+ } occurs here
diff --git a/compiler/rustc_error_messages/locales/en-US/codegen_gcc.ftl b/compiler/rustc_error_messages/locales/en-US/codegen_gcc.ftl
index 178e1a67c..08ce51725 100644
--- a/compiler/rustc_error_messages/locales/en-US/codegen_gcc.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/codegen_gcc.ftl
@@ -1,9 +1,3 @@
-codegen_gcc_ranlib_failure =
- Ranlib exited with code {$exit_code}
-
-codegen_gcc_linkage_const_or_mut_type =
- must have type `*const T` or `*mut T` due to `#[linkage]` attribute
-
codegen_gcc_unwinding_inline_asm =
GCC backend does not support unwinding from inline asm
diff --git a/compiler/rustc_error_messages/locales/en-US/codegen_llvm.ftl b/compiler/rustc_error_messages/locales/en-US/codegen_llvm.ftl
new file mode 100644
index 000000000..97198cb4b
--- /dev/null
+++ b/compiler/rustc_error_messages/locales/en-US/codegen_llvm.ftl
@@ -0,0 +1,49 @@
+codegen_llvm_unknown_ctarget_feature =
+ unknown feature specified for `-Ctarget-feature`: `{$feature}`
+ .note = it is still passed through to the codegen backend
+ .possible_feature = you might have meant: `{$rust_feature}`
+ .consider_filing_feature_request = consider filing a feature request
+
+codegen_llvm_unknown_ctarget_feature_prefix =
+ unknown feature specified for `-Ctarget-feature`: `{$feature}`
+ .note = features must begin with a `+` to enable or `-` to disable it
+
+codegen_llvm_error_creating_import_library =
+ Error creating import library for {$lib_name}: {$error}
+
+codegen_llvm_instrument_coverage_requires_llvm_12 =
+ rustc option `-C instrument-coverage` requires LLVM 12 or higher.
+
+codegen_llvm_symbol_already_defined =
+ symbol `{$symbol_name}` is already defined
+
+codegen_llvm_branch_protection_requires_aarch64 =
+ -Zbranch-protection is only supported on aarch64
+
+codegen_llvm_invalid_minimum_alignment =
+ invalid minimum global alignment: {$err}
+
+codegen_llvm_sanitizer_memtag_requires_mte =
+ `-Zsanitizer=memtag` requires `-Ctarget-feature=+mte`
+
+codegen_llvm_error_writing_def_file =
+ Error writing .DEF file: {$error}
+
+codegen_llvm_error_calling_dlltool =
+ Error calling dlltool: {$error}
+
+codegen_llvm_dlltool_fail_import_library =
+ Dlltool could not create import library: {$stdout}\n{$stderr}
+
+codegen_llvm_target_feature_disable_or_enable =
+ the target features {$features} must all be either enabled or disabled together
+
+codegen_llvm_missing_features =
+ add the missing features in a `target_feature` attribute
+
+codegen_llvm_dynamic_linking_with_lto =
+ cannot prefer dynamic linking when performing LTO
+ .note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO
+
+codegen_llvm_fail_parsing_target_machine_config_to_target_machine =
+ failed to parse target machine config to target machine: {$error}
diff --git a/compiler/rustc_error_messages/locales/en-US/codegen_ssa.ftl b/compiler/rustc_error_messages/locales/en-US/codegen_ssa.ftl
index 966a421bc..4d1f9c1c9 100644
--- a/compiler/rustc_error_messages/locales/en-US/codegen_ssa.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/codegen_ssa.ftl
@@ -119,3 +119,76 @@ codegen_ssa_thorin_object_read = {$error}
codegen_ssa_thorin_object_write = {$error}
codegen_ssa_thorin_gimli_read = {$error}
codegen_ssa_thorin_gimli_write = {$error}
+
+codegen_ssa_link_exe_unexpected_error = `link.exe` returned an unexpected error
+
+codegen_ssa_repair_vs_build_tools = the Visual Studio build tools may need to be repaired using the Visual Studio installer
+
+codegen_ssa_missing_cpp_build_tool_component = or a necessary component may be missing from the "C++ build tools" workload
+
+codegen_ssa_select_cpp_build_tool_workload = in the Visual Studio installer, ensure the "C++ build tools" workload is selected
+
+codegen_ssa_visual_studio_not_installed = you may need to install Visual Studio build tools with the "C++ build tools" workload
+
+codegen_ssa_linker_not_found = linker `{$linker_path}` not found
+ .note = {$error}
+
+codegen_ssa_unable_to_exe_linker = could not exec the linker `{$linker_path}`
+ .note = {$error}
+ .command_note = {$command_formatted}
+
+codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but `link.exe` was not found
+
+codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
+
+codegen_ssa_unsufficient_vs_code_product = VS Code is a different product, and is not sufficient.
+
+codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
+ .note = {$output}
+
+codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error}
+
+codegen_ssa_stripping_debu_info_failed = stripping debug info with `{$util}` failed: {$status}
+ .note = {$output}
+
+codegen_ssa_unable_to_run = unable to run `{$util}`: {$error}
+
+codegen_ssa_linker_file_stem = couldn't extract file stem from specified linker
+
+codegen_ssa_static_library_native_artifacts = Link against the following native artifacts when linking against this static library. The order and any duplication can be significant on some platforms.
+
+codegen_ssa_native_static_libs = native-static-libs: {$arguments}
+
+codegen_ssa_link_script_unavailable = can only use link script when linking with GNU-like linker
+
+codegen_ssa_link_script_write_failure = failed to write link script to {$path}: {$error}
+
+codegen_ssa_failed_to_write = failed to write {$path}: {$error}
+
+codegen_ssa_unable_to_write_debugger_visualizer = Unable to write debugger visualizer file `{$path}`: {$error}
+
+codegen_ssa_rlib_archive_build_failure = failed to build archive from rlib: {$error}
+
+codegen_ssa_option_gcc_only = option `-Z gcc-ld` is used even though linker flavor is not gcc
+
+codegen_ssa_extract_bundled_libs_open_file = failed to open file '{$rlib}': {$error}
+codegen_ssa_extract_bundled_libs_mmap_file = failed to mmap file '{$rlib}': {$error}
+codegen_ssa_extract_bundled_libs_parse_archive = failed to parse archive '{$rlib}': {$error}
+codegen_ssa_extract_bundled_libs_read_entry = failed to read entry '{$rlib}': {$error}
+codegen_ssa_extract_bundled_libs_archive_member = failed to get data from archive member '{$rlib}': {$error}
+codegen_ssa_extract_bundled_libs_convert_name = failed to convert name '{$rlib}': {$error}
+codegen_ssa_extract_bundled_libs_write_file = failed to write file '{$rlib}': {$error}
+
+codegen_ssa_unsupported_arch = unsupported arch `{$arch}` for os `{$os}`
+
+codegen_ssa_apple_sdk_error_sdk_path = failed to get {$sdk_name} SDK path: {error}
+
+codegen_ssa_read_file = failed to read file: {message}
+
+codegen_ssa_unsupported_link_self_contained = option `-C link-self-contained` is not supported on this target
+
+codegen_ssa_archive_build_failure =
+ failed to build archive: {$error}
+
+codegen_ssa_unknown_archive_kind =
+ Don't know how to build archive of type: {$kind}
diff --git a/compiler/rustc_error_messages/locales/en-US/hir_analysis.ftl b/compiler/rustc_error_messages/locales/en-US/hir_analysis.ftl
index 74088f4df..a9ea161b9 100644
--- a/compiler/rustc_error_messages/locales/en-US/hir_analysis.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/hir_analysis.ftl
@@ -1,8 +1,3 @@
-hir_analysis_field_multiply_specified_in_initializer =
- field `{$ident}` specified more than once
- .label = used more than once
- .previous_use_label = first use of `{$ident}`
-
hir_analysis_unrecognized_atomic_operation =
unrecognized atomic operation function: `{$op}`
.label = unrecognized atomic operation
@@ -22,6 +17,8 @@ hir_analysis_lifetimes_or_bounds_mismatch_on_trait =
lifetime parameters or bounds on {$item_kind} `{$ident}` do not match the trait declaration
.label = lifetimes do not match {$item_kind} in trait
.generics_label = lifetimes in impl do not match this {$item_kind} in trait
+ .where_label = this `where` clause might not match the one in the trait
+ .bounds_label = this bound might be missing in the impl
hir_analysis_drop_impl_on_wrong_item =
the `Drop` trait may only be implemented for local structs, enums, and unions
@@ -54,44 +51,16 @@ hir_analysis_assoc_type_binding_not_allowed =
associated type bindings are not allowed here
.label = associated type not allowed here
-hir_analysis_functional_record_update_on_non_struct =
- functional record update syntax requires a struct
-
hir_analysis_typeof_reserved_keyword_used =
`typeof` is a reserved keyword but unimplemented
.suggestion = consider replacing `typeof(...)` with an actual type
.label = reserved keyword
-hir_analysis_return_stmt_outside_of_fn_body =
- return statement outside of function body
- .encl_body_label = the return is part of this body...
- .encl_fn_label = ...not the enclosing function body
-
-hir_analysis_yield_expr_outside_of_generator =
- yield expression outside of generator literal
-
-hir_analysis_struct_expr_non_exhaustive =
- cannot create non-exhaustive {$what} using struct expression
-
-hir_analysis_method_call_on_unknown_type =
- the type of this value must be known to call a method on a raw pointer on it
-
hir_analysis_value_of_associated_struct_already_specified =
the value of the associated type `{$item_name}` (from trait `{$def_path}`) is already specified
.label = re-bound here
.previous_bound_label = `{$item_name}` bound here first
-hir_analysis_address_of_temporary_taken = cannot take address of a temporary
- .label = temporary value
-
-hir_analysis_add_return_type_add = try adding a return type
-
-hir_analysis_add_return_type_missing_here = a return type might be missing here
-
-hir_analysis_expected_default_return_type = expected `()` because of default return type
-
-hir_analysis_expected_return_type = expected `{$expected}` because of return type
-
hir_analysis_unconstrained_opaque_type = unconstrained opaque type
.note = `{$name}` must be used in combination with a concrete type within the same {$what}
@@ -134,10 +103,6 @@ hir_analysis_extern_crate_not_idiomatic =
hir_analysis_expected_used_symbol = expected `used`, `used(compiler)` or `used(linker)`
-hir_analysis_missing_parentheses_in_range = can't call method `{$method_name}` on type `{$ty_str}`
-
-hir_analysis_add_missing_parentheses_in_range = you must surround the range in parentheses to call its `{$func_name}` function
-
hir_analysis_const_impl_for_non_const_trait =
const `impl` for trait `{$trait_name}` which is not marked with `#[const_trait]`
.suggestion = mark `{$trait_name}` as const
@@ -150,3 +115,6 @@ hir_analysis_const_bound_for_non_const_trait =
hir_analysis_self_in_impl_self =
`Self` is not valid in the self type of an impl block
.note = replace `Self` with a different type
+
+hir_analysis_linkage_type =
+ invalid type for variable with `#[linkage]` attribute
diff --git a/compiler/rustc_error_messages/locales/en-US/hir_typeck.ftl b/compiler/rustc_error_messages/locales/en-US/hir_typeck.ftl
new file mode 100644
index 000000000..0612dbae0
--- /dev/null
+++ b/compiler/rustc_error_messages/locales/en-US/hir_typeck.ftl
@@ -0,0 +1,48 @@
+hir_typeck_fru_note = this expression may have been misinterpreted as a `..` range expression
+hir_typeck_fru_expr = this expression does not end in a comma...
+hir_typeck_fru_expr2 = ... so this is interpreted as a `..` range expression, instead of functional record update syntax
+hir_typeck_fru_suggestion =
+ to set the remaining fields{$expr ->
+ [NONE]{""}
+ *[other] {" "}from `{$expr}`
+ }, separate the last named field with a comma
+
+hir_typeck_field_multiply_specified_in_initializer =
+ field `{$ident}` specified more than once
+ .label = used more than once
+ .previous_use_label = first use of `{$ident}`
+
+hir_typeck_return_stmt_outside_of_fn_body =
+ return statement outside of function body
+ .encl_body_label = the return is part of this body...
+ .encl_fn_label = ...not the enclosing function body
+
+hir_typeck_yield_expr_outside_of_generator =
+ yield expression outside of generator literal
+
+hir_typeck_struct_expr_non_exhaustive =
+ cannot create non-exhaustive {$what} using struct expression
+
+hir_typeck_method_call_on_unknown_type =
+ the type of this value must be known to call a method on a raw pointer on it
+
+hir_typeck_functional_record_update_on_non_struct =
+ functional record update syntax requires a struct
+
+hir_typeck_address_of_temporary_taken = cannot take address of a temporary
+ .label = temporary value
+
+hir_typeck_add_return_type_add = try adding a return type
+
+hir_typeck_add_return_type_missing_here = a return type might be missing here
+
+hir_typeck_expected_default_return_type = expected `()` because of default return type
+
+hir_typeck_expected_return_type = expected `{$expected}` because of return type
+
+hir_typeck_missing_parentheses_in_range = can't call method `{$method_name}` on type `{$ty_str}`
+
+hir_typeck_add_missing_parentheses_in_range = you must surround the range in parentheses to call its `{$func_name}` function
+
+hir_typeck_op_trait_generic_params =
+ `{$method_name}` must not have any generic parameters
diff --git a/compiler/rustc_error_messages/locales/en-US/infer.ftl b/compiler/rustc_error_messages/locales/en-US/infer.ftl
index 18b3408b0..c9d83746d 100644
--- a/compiler/rustc_error_messages/locales/en-US/infer.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/infer.ftl
@@ -126,10 +126,10 @@ infer_data_lifetime_flow = ...but data with one lifetime flows into the other he
infer_declared_multiple = this type is declared with multiple lifetimes...
infer_types_declared_different = these two types are declared with different lifetimes...
infer_data_flows = ...but data{$label_var1_exists ->
- [true] -> {" "}from `{$label_var1}`
+ [true] {" "}from `{$label_var1}`
*[false] -> {""}
} flows{$label_var2_exists ->
- [true] -> {" "}into `{$label_var2}`
+ [true] {" "}into `{$label_var2}`
*[false] -> {""}
} here
@@ -171,3 +171,4 @@ infer_msl_introduces_static = introduces a `'static` lifetime requirement
infer_msl_unmet_req = because this has an unmet lifetime requirement
infer_msl_trait_note = this has an implicit `'static` lifetime requirement
infer_msl_trait_sugg = consider relaxing the implicit `'static` requirement
+infer_suggest_add_let_for_letchains = consider adding `let`
diff --git a/compiler/rustc_error_messages/locales/en-US/lint.ftl b/compiler/rustc_error_messages/locales/en-US/lint.ftl
index 7e28f22c0..2eb409a5d 100644
--- a/compiler/rustc_error_messages/locales/en-US/lint.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/lint.ftl
@@ -350,6 +350,9 @@ lint_builtin_mutable_transmutes =
lint_builtin_unstable_features = unstable feature
+lint_ungated_async_fn_track_caller = `#[track_caller]` on async functions is a no-op
+ .label = this function will not propagate the caller location
+
lint_builtin_unreachable_pub = unreachable `pub` {$what}
.suggestion = consider restricting its visibility
.help = or consider exporting it for use by other crates
diff --git a/compiler/rustc_error_messages/locales/en-US/metadata.ftl b/compiler/rustc_error_messages/locales/en-US/metadata.ftl
index 08e553d9f..d1e1fd54d 100644
--- a/compiler/rustc_error_messages/locales/en-US/metadata.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/metadata.ftl
@@ -150,12 +150,28 @@ metadata_no_multiple_global_alloc =
metadata_prev_global_alloc =
previous global allocator defined here
+metadata_no_multiple_alloc_error_handler =
+ cannot define multiple allocation error handlers
+ .label = cannot define a new allocation error handler
+
+metadata_prev_alloc_error_handler =
+ previous allocation error handler defined here
+
metadata_conflicting_global_alloc =
the `#[global_allocator]` in {$other_crate_name} conflicts with global allocator in: {$crate_name}
+metadata_conflicting_alloc_error_handler =
+ the `#[alloc_error_handler]` in {$other_crate_name} conflicts with allocation error handler in: {$crate_name}
+
metadata_global_alloc_required =
no global memory allocator found but one is required; link to std or add `#[global_allocator]` to a static item that implements the GlobalAlloc trait
+metadata_alloc_func_required =
+ `#[alloc_error_handler]` function required, but not found
+
+metadata_missing_alloc_error_handler =
+ use `#![feature(default_alloc_error_handler)]` for a default error handler
+
metadata_no_transitive_needs_dep =
the crate `{$crate_name}` cannot depend on a crate that needs {$needs_crate_name}, but it depends on `{$deps_crate_name}`
@@ -259,7 +275,7 @@ metadata_crate_location_unknown_type =
extern location for {$crate_name} is of an unknown type: {$path}
metadata_lib_filename_form =
- file name should be lib*.rlib or {dll_prefix}*.{dll_suffix}
+ file name should be lib*.rlib or {$dll_prefix}*{$dll_suffix}
metadata_multiple_import_name_type =
multiple `import_name_type` arguments in a single `#[link]` attribute
diff --git a/compiler/rustc_error_messages/locales/en-US/middle.ftl b/compiler/rustc_error_messages/locales/en-US/middle.ftl
index b9e4499d4..4f4e5c6a2 100644
--- a/compiler/rustc_error_messages/locales/en-US/middle.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/middle.ftl
@@ -27,3 +27,10 @@ middle_values_too_big =
middle_cannot_be_normalized =
unable to determine layout for `{$ty}` because `{$failure_ty}` cannot be normalized
+
+middle_strict_coherence_needs_negative_coherence =
+ to use `strict_coherence` on this trait, the `with_negative_coherence` feature must be enabled
+ .label = due to this attribute
+
+middle_const_not_used_in_type_alias =
+ const parameter `{$ct}` is part of concrete type but not used in parameter list for the `impl Trait` type alias
diff --git a/compiler/rustc_error_messages/locales/en-US/monomorphize.ftl b/compiler/rustc_error_messages/locales/en-US/monomorphize.ftl
index 42c84fdd2..48ddb54b7 100644
--- a/compiler/rustc_error_messages/locales/en-US/monomorphize.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/monomorphize.ftl
@@ -21,6 +21,3 @@ monomorphize_large_assignments =
moving {$size} bytes
.label = value moved from here
.note = The current maximum size is {$limit}, but it can be customized with the move_size_limit attribute: `#![move_size_limit = "..."]`
-
-monomorphize_requires_lang_item =
- requires `{$lang_item}` lang_item
diff --git a/compiler/rustc_error_messages/locales/en-US/parse.ftl b/compiler/rustc_error_messages/locales/en-US/parse.ftl
new file mode 100644
index 000000000..114b7ec16
--- /dev/null
+++ b/compiler/rustc_error_messages/locales/en-US/parse.ftl
@@ -0,0 +1,364 @@
+parse_struct_literal_body_without_path =
+ struct literal body without path
+ .suggestion = you might have forgotten to add the struct literal inside the block
+
+parse_maybe_report_ambiguous_plus =
+ ambiguous `+` in a type
+ .suggestion = use parentheses to disambiguate
+
+parse_maybe_recover_from_bad_type_plus =
+ expected a path on the left-hand side of `+`, not `{$ty}`
+
+parse_add_paren = try adding parentheses
+
+parse_forgot_paren = perhaps you forgot parentheses?
+
+parse_expect_path = expected a path
+
+parse_maybe_recover_from_bad_qpath_stage_2 =
+ missing angle brackets in associated item path
+ .suggestion = try: `{$ty}`
+
+parse_incorrect_semicolon =
+ expected item, found `;`
+ .suggestion = remove this semicolon
+ .help = {$name} declarations are not followed by a semicolon
+
+parse_incorrect_use_of_await =
+ incorrect use of `await`
+ .parentheses_suggestion = `await` is not a method call, remove the parentheses
+ .postfix_suggestion = `await` is a postfix operation
+
+parse_in_in_typo =
+ expected iterable, found keyword `in`
+ .suggestion = remove the duplicated `in`
+
+parse_invalid_variable_declaration =
+ invalid variable declaration
+
+parse_switch_mut_let_order =
+ switch the order of `mut` and `let`
+parse_missing_let_before_mut = missing keyword
+parse_use_let_not_auto = write `let` instead of `auto` to introduce a new variable
+parse_use_let_not_var = write `let` instead of `var` to introduce a new variable
+
+parse_invalid_comparison_operator = invalid comparison operator `{$invalid}`
+ .use_instead = `{$invalid}` is not a valid comparison operator, use `{$correct}`
+ .spaceship_operator_invalid = `<=>` is not a valid comparison operator, use `std::cmp::Ordering`
+
+parse_invalid_logical_operator = `{$incorrect}` is not a logical operator
+ .note = unlike in e.g., Python and PHP, `&&` and `||` are used for logical operators
+ .use_amp_amp_for_conjunction = use `&&` to perform logical conjunction
+ .use_pipe_pipe_for_disjunction = use `||` to perform logical disjunction
+
+parse_tilde_is_not_unary_operator = `~` cannot be used as a unary operator
+ .suggestion = use `!` to perform bitwise not
+
+parse_unexpected_if_with_if = unexpected `if` in the condition expression
+ .suggestion = remove the `if`
+
+parse_unexpected_token_after_not = unexpected {$negated_desc} after identifier
+parse_unexpected_token_after_not_bitwise = use `!` to perform bitwise not
+parse_unexpected_token_after_not_logical = use `!` to perform logical negation
+parse_unexpected_token_after_not_default = use `!` to perform logical negation or bitwise not
+
+parse_malformed_loop_label = malformed loop label
+ .suggestion = use the correct loop label format
+
+parse_lifetime_in_borrow_expression = borrow expressions cannot be annotated with lifetimes
+ .suggestion = remove the lifetime annotation
+ .label = annotated with lifetime here
+
+parse_field_expression_with_generic = field expressions cannot have generic arguments
+
+parse_macro_invocation_with_qualified_path = macros cannot use qualified paths
+
+parse_unexpected_token_after_label = expected `while`, `for`, `loop` or `{"{"}` after a label
+ .suggestion_remove_label = consider removing the label
+ .suggestion_enclose_in_block = consider enclosing expression in a block
+
+parse_require_colon_after_labeled_expression = labeled expression must be followed by `:`
+ .note = labels are used before loops and blocks, allowing e.g., `break 'label` to them
+ .label = the label
+ .suggestion = add `:` after the label
+
+parse_do_catch_syntax_removed = found removed `do catch` syntax
+ .note = following RFC #2388, the new non-placeholder syntax is `try`
+ .suggestion = replace with the new syntax
+
+parse_float_literal_requires_integer_part = float literals must have an integer part
+ .suggestion = must have an integer part
+
+parse_missing_semicolon_before_array = expected `;`, found `[`
+ .suggestion = consider adding `;` here
+
+parse_invalid_block_macro_segment = cannot use a `block` macro fragment here
+ .label = the `block` fragment is within this context
+
+parse_expect_dotdot_not_dotdotdot = expected `..`, found `...`
+ .suggestion = use `..` to fill in the rest of the fields
+
+parse_if_expression_missing_then_block = this `if` expression is missing a block after the condition
+ .add_then_block = add a block here
+ .condition_possibly_unfinished = this binary operation is possibly unfinished
+
+parse_if_expression_missing_condition = missing condition for `if` expression
+ .condition_label = expected condition here
+ .block_label = if this block is the condition of the `if` expression, then it must be followed by another block
+
+parse_expected_expression_found_let = expected expression, found `let` statement
+
+parse_expect_eq_instead_of_eqeq = expected `=`, found `==`
+ .suggestion = consider using `=` here
+
+parse_expected_else_block = expected `{"{"}`, found {$first_tok}
+ .label = expected an `if` or a block after this `else`
+ .suggestion = add an `if` if this is the condition of a chained `else if` statement
+
+parse_outer_attribute_not_allowed_on_if_else = outer attributes are not allowed on `if` and `else` branches
+ .branch_label = the attributes are attached to this branch
+ .ctx_label = the branch belongs to this `{$ctx}`
+ .suggestion = remove the attributes
+
+parse_missing_in_in_for_loop = missing `in` in `for` loop
+ .use_in_not_of = try using `in` here instead
+ .add_in = try adding `in` here
+
+parse_missing_comma_after_match_arm = expected `,` following `match` arm
+ .suggestion = missing a comma here to end this `match` arm
+
+parse_catch_after_try = keyword `catch` cannot follow a `try` block
+ .help = try using `match` on the result of the `try` block instead
+
+parse_comma_after_base_struct = cannot use a comma after the base struct
+ .note = the base struct must always be the last field
+ .suggestion = remove this comma
+
+parse_eq_field_init = expected `:`, found `=`
+ .suggestion = replace equals symbol with a colon
+
+parse_dotdotdot = unexpected token: `...`
+ .suggest_exclusive_range = use `..` for an exclusive range
+ .suggest_inclusive_range = or `..=` for an inclusive range
+
+parse_left_arrow_operator = unexpected token: `<-`
+ .suggestion = if you meant to write a comparison against a negative value, add a space in between `<` and `-`
+
+parse_remove_let = expected pattern, found `let`
+ .suggestion = remove the unnecessary `let` keyword
+
+parse_use_eq_instead = unexpected `==`
+ .suggestion = try using `=` instead
+
+parse_use_empty_block_not_semi = expected { "`{}`" }, found `;`
+ .suggestion = try using { "`{}`" } instead
+
+parse_comparison_interpreted_as_generic =
+ `<` is interpreted as a start of generic arguments for `{$type}`, not a comparison
+ .label_args = interpreted as generic arguments
+ .label_comparison = not interpreted as comparison
+ .suggestion = try comparing the cast value
+
+parse_shift_interpreted_as_generic =
+ `<<` is interpreted as a start of generic arguments for `{$type}`, not a shift
+ .label_args = interpreted as generic arguments
+ .label_comparison = not interpreted as shift
+ .suggestion = try shifting the cast value
+
+parse_found_expr_would_be_stmt = expected expression, found `{$token}`
+ .label = expected expression
+
+parse_leading_plus_not_supported = leading `+` is not supported
+ .label = unexpected `+`
+ .suggestion_remove_plus = try removing the `+`
+
+parse_parentheses_with_struct_fields = invalid `struct` delimiters or `fn` call arguments
+ .suggestion_braces_for_struct = if `{$type}` is a struct, use braces as delimiters
+ .suggestion_no_fields_for_fn = if `{$type}` is a function, use the arguments directly
+
+parse_labeled_loop_in_break = parentheses are required around this expression to avoid confusion with a labeled break expression
+
+parse_sugg_wrap_expression_in_parentheses = wrap the expression in parentheses
+
+parse_array_brackets_instead_of_braces = this is a block expression, not an array
+ .suggestion = to make an array, use square brackets instead of curly braces
+
+parse_match_arm_body_without_braces = `match` arm body without braces
+ .label_statements = {$num_statements ->
+ [one] this statement is not surrounded by a body
+ *[other] these statements are not surrounded by a body
+ }
+ .label_arrow = while parsing the `match` arm starting here
+ .suggestion_add_braces = surround the {$num_statements ->
+ [one] statement
+ *[other] statements
+ } with a body
+ .suggestion_use_comma_not_semicolon = use a comma to end a `match` arm expression
+
+parse_struct_literal_not_allowed_here = struct literals are not allowed here
+ .suggestion = surround the struct literal with parentheses
+
+parse_invalid_interpolated_expression = invalid interpolated expression
+
+parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
+ .label = invalid suffix `{$suffix}`
+ .tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases
+ .tuple_exception_line_2 = on proc macros, you'll want to use `syn::Index::from` or `proc_macro::Literal::*_unsuffixed` for code that will desugar to tuple field access
+ .tuple_exception_line_3 = see issue #60210 <https://github.com/rust-lang/rust/issues/60210> for more information
+
+parse_non_string_abi_literal = non-string ABI literal
+ .suggestion = specify the ABI with a string literal
+
+parse_mismatched_closing_delimiter = mismatched closing delimiter: `{$delimiter}`
+ .label_unmatched = mismatched closing delimiter
+ .label_opening_candidate = closing delimiter possibly meant for this
+ .label_unclosed = unclosed delimiter
+
+parse_incorrect_visibility_restriction = incorrect visibility restriction
+ .help = some possible visibility restrictions are:
+ `pub(crate)`: visible only on the current crate
+ `pub(super)`: visible only in the current module's parent
+ `pub(in path::to::module)`: visible only on the specified path
+ .suggestion = make this visible only to module `{$inner_str}` with `in`
+
+parse_assignment_else_not_allowed = <assignment> ... else {"{"} ... {"}"} is not allowed
+
+parse_expected_statement_after_outer_attr = expected statement after outer attribute
+
+parse_doc_comment_does_not_document_anything = found a documentation comment that doesn't document anything
+ .help = doc comments must come before what they document, if a comment was intended use `//`
+ .suggestion = missing comma here
+
+parse_const_let_mutually_exclusive = `const` and `let` are mutually exclusive
+ .suggestion = remove `let`
+
+parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else`
+parse_invalid_curly_in_let_else = right curly brace `{"}"}` before `else` in a `let...else` statement not allowed
+
+parse_compound_assignment_expression_in_let = can't reassign to an uninitialized variable
+ .suggestion = initialize the variable
+ .help = if you meant to overwrite, remove the `let` binding
+
+parse_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
+ .help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)
+
+parse_invalid_meta_item = expected unsuffixed literal or identifier, found `{$token}`
+
+parse_label_inner_attr_does_not_annotate_this = the inner attribute doesn't annotate this {$item}
+parse_sugg_change_inner_attr_to_outer = to annotate the {$item}, change the attribute from inner to outer style
+
+parse_inner_attr_not_permitted_after_outer_doc_comment = an inner attribute is not permitted following an outer doc comment
+ .label_attr = not permitted following an outer doc comment
+ .label_prev_doc_comment = previous doc comment
+ .label_does_not_annotate_this = {parse_label_inner_attr_does_not_annotate_this}
+ .sugg_change_inner_to_outer = {parse_sugg_change_inner_attr_to_outer}
+
+parse_inner_attr_not_permitted_after_outer_attr = an inner attribute is not permitted following an outer attribute
+ .label_attr = not permitted following an outer attribute
+ .label_prev_attr = previous outer attribute
+ .label_does_not_annotate_this = {parse_label_inner_attr_does_not_annotate_this}
+ .sugg_change_inner_to_outer = {parse_sugg_change_inner_attr_to_outer}
+
+parse_inner_attr_not_permitted = an inner attribute is not permitted in this context
+ .label_does_not_annotate_this = {parse_label_inner_attr_does_not_annotate_this}
+ .sugg_change_inner_to_outer = {parse_sugg_change_inner_attr_to_outer}
+
+parse_inner_attr_explanation = inner attributes, like `#![no_std]`, annotate the item enclosing them, and are usually found at the beginning of source files
+parse_outer_attr_explanation = outer attributes, like `#[test]`, annotate the item following them
+
+parse_inner_doc_comment_not_permitted = expected outer doc comment
+ .note = inner doc comments like this (starting with `//!` or `/*!`) can only appear before items
+ .suggestion = you might have meant to write a regular comment
+ .label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item}
+ .sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style
+
+parse_expected_identifier_found_reserved_identifier_str = expected identifier, found reserved identifier `{$token}`
+parse_expected_identifier_found_keyword_str = expected identifier, found keyword `{$token}`
+parse_expected_identifier_found_reserved_keyword_str = expected identifier, found reserved keyword `{$token}`
+parse_expected_identifier_found_doc_comment_str = expected identifier, found doc comment `{$token}`
+parse_expected_identifier_found_str = expected identifier, found `{$token}`
+
+parse_expected_identifier_found_reserved_identifier = expected identifier, found reserved identifier
+parse_expected_identifier_found_keyword = expected identifier, found keyword
+parse_expected_identifier_found_reserved_keyword = expected identifier, found reserved keyword
+parse_expected_identifier_found_doc_comment = expected identifier, found doc comment
+parse_expected_identifier = expected identifier
+
+parse_sugg_escape_to_use_as_identifier = escape `{$ident_name}` to use it as an identifier
+
+parse_sugg_remove_comma = remove this comma
+
+parse_expected_semi_found_reserved_identifier_str = expected `;`, found reserved identifier `{$token}`
+parse_expected_semi_found_keyword_str = expected `;`, found keyword `{$token}`
+parse_expected_semi_found_reserved_keyword_str = expected `;`, found reserved keyword `{$token}`
+parse_expected_semi_found_doc_comment_str = expected `;`, found doc comment `{$token}`
+parse_expected_semi_found_str = expected `;`, found `{$token}`
+
+parse_sugg_change_this_to_semi = change this to `;`
+parse_sugg_add_semi = add `;` here
+parse_label_unexpected_token = unexpected token
+
+parse_unmatched_angle_brackets = {$num_extra_brackets ->
+ [one] unmatched angle bracket
+ *[other] unmatched angle brackets
+ }
+ .suggestion = {$num_extra_brackets ->
+ [one] remove extra angle bracket
+ *[other] remove extra angle brackets
+ }
+
+parse_generic_parameters_without_angle_brackets = generic parameters without surrounding angle brackets
+ .suggestion = surround the type parameters with angle brackets
+
+parse_comparison_operators_cannot_be_chained = comparison operators cannot be chained
+ .sugg_parentheses_for_function_args = or use `(...)` if you meant to specify fn arguments
+ .sugg_split_comparison = split the comparison into two
+ .sugg_parenthesize = parenthesize the comparison
+parse_sugg_turbofish_syntax = use `::<...>` instead of `<...>` to specify lifetime, type, or const arguments
+
+parse_question_mark_in_type = invalid `?` in type
+ .label = `?` is only allowed on expressions, not types
+ .suggestion = if you meant to express that the type might not contain a value, use the `Option` wrapper type
+
+parse_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
+ .suggestion = remove parentheses in `for` loop
+
+parse_doc_comment_on_param_type = documentation comments cannot be applied to a function parameter's type
+ .label = doc comments are not allowed here
+
+parse_attribute_on_param_type = attributes cannot be applied to a function parameter's type
+ .label = attributes are not allowed here
+
+parse_pattern_method_param_without_body = patterns aren't allowed in methods without bodies
+ .suggestion = give this argument a name or use an underscore to ignore it
+
+parse_self_param_not_first = unexpected `self` parameter in function
+ .label = must be the first parameter of an associated function
+
+parse_const_generic_without_braces = expressions must be enclosed in braces to be used as const generic arguments
+ .suggestion = enclose the `const` expression in braces
+
+parse_unexpected_const_param_declaration = unexpected `const` parameter declaration
+ .label = expected a `const` expression, not a parameter declaration
+ .suggestion = `const` parameters must be declared for the `impl`
+
+parse_unexpected_const_in_generic_param = expected lifetime, type, or constant, found keyword `const`
+ .suggestion = the `const` keyword is only needed in the definition of the type
+
+parse_async_move_order_incorrect = the order of `move` and `async` is incorrect
+ .suggestion = try switching the order
+
+parse_double_colon_in_bound = expected `:` followed by trait or lifetime
+ .suggestion = use single colon
+
+parse_fn_ptr_with_generics = function pointer types may not have generic parameters
+ .suggestion = consider moving the lifetime {$arity ->
+ [one] parameter
+ *[other] parameters
+ } to {$for_param_list_exists ->
+ [true] the
+ *[false] a
+ } `for` parameter list
+
+parse_invalid_identifier_with_leading_number = expected identifier, found number literal
+ .label = identifiers cannot start with a number
diff --git a/compiler/rustc_error_messages/locales/en-US/parser.ftl b/compiler/rustc_error_messages/locales/en-US/parser.ftl
deleted file mode 100644
index 13c368d1c..000000000
--- a/compiler/rustc_error_messages/locales/en-US/parser.ftl
+++ /dev/null
@@ -1,371 +0,0 @@
-parser_struct_literal_body_without_path =
- struct literal body without path
- .suggestion = you might have forgotten to add the struct literal inside the block
-
-parser_maybe_report_ambiguous_plus =
- ambiguous `+` in a type
- .suggestion = use parentheses to disambiguate
-
-parser_maybe_recover_from_bad_type_plus =
- expected a path on the left-hand side of `+`, not `{$ty}`
-
-parser_add_paren = try adding parentheses
-
-parser_forgot_paren = perhaps you forgot parentheses?
-
-parser_expect_path = expected a path
-
-parser_maybe_recover_from_bad_qpath_stage_2 =
- missing angle brackets in associated item path
- .suggestion = try: `{$ty}`
-
-parser_incorrect_semicolon =
- expected item, found `;`
- .suggestion = remove this semicolon
- .help = {$name} declarations are not followed by a semicolon
-
-parser_incorrect_use_of_await =
- incorrect use of `await`
- .parentheses_suggestion = `await` is not a method call, remove the parentheses
- .postfix_suggestion = `await` is a postfix operation
-
-parser_in_in_typo =
- expected iterable, found keyword `in`
- .suggestion = remove the duplicated `in`
-
-parser_invalid_variable_declaration =
- invalid variable declaration
-
-parser_switch_mut_let_order =
- switch the order of `mut` and `let`
-parser_missing_let_before_mut = missing keyword
-parser_use_let_not_auto = write `let` instead of `auto` to introduce a new variable
-parser_use_let_not_var = write `let` instead of `var` to introduce a new variable
-
-parser_invalid_comparison_operator = invalid comparison operator `{$invalid}`
- .use_instead = `{$invalid}` is not a valid comparison operator, use `{$correct}`
- .spaceship_operator_invalid = `<=>` is not a valid comparison operator, use `std::cmp::Ordering`
-
-parser_invalid_logical_operator = `{$incorrect}` is not a logical operator
- .note = unlike in e.g., Python and PHP, `&&` and `||` are used for logical operators
- .use_amp_amp_for_conjunction = use `&&` to perform logical conjunction
- .use_pipe_pipe_for_disjunction = use `||` to perform logical disjunction
-
-parser_tilde_is_not_unary_operator = `~` cannot be used as a unary operator
- .suggestion = use `!` to perform bitwise not
-
-parser_unexpected_token_after_not = unexpected {$negated_desc} after identifier
-parser_unexpected_token_after_not_bitwise = use `!` to perform bitwise not
-parser_unexpected_token_after_not_logical = use `!` to perform logical negation
-parser_unexpected_token_after_not_default = use `!` to perform logical negation or bitwise not
-
-parser_malformed_loop_label = malformed loop label
- .suggestion = use the correct loop label format
-
-parser_lifetime_in_borrow_expression = borrow expressions cannot be annotated with lifetimes
- .suggestion = remove the lifetime annotation
- .label = annotated with lifetime here
-
-parser_field_expression_with_generic = field expressions cannot have generic arguments
-
-parser_macro_invocation_with_qualified_path = macros cannot use qualified paths
-
-parser_unexpected_token_after_label = expected `while`, `for`, `loop` or `{"{"}` after a label
- .suggestion_remove_label = consider removing the label
- .suggestion_enclose_in_block = consider enclosing expression in a block
-
-parser_require_colon_after_labeled_expression = labeled expression must be followed by `:`
- .note = labels are used before loops and blocks, allowing e.g., `break 'label` to them
- .label = the label
- .suggestion = add `:` after the label
-
-parser_do_catch_syntax_removed = found removed `do catch` syntax
- .note = following RFC #2388, the new non-placeholder syntax is `try`
- .suggestion = replace with the new syntax
-
-parser_float_literal_requires_integer_part = float literals must have an integer part
- .suggestion = must have an integer part
-
-parser_invalid_int_literal_width = invalid width `{$width}` for integer literal
- .help = valid widths are 8, 16, 32, 64 and 128
-
-parser_invalid_num_literal_base_prefix = invalid base prefix for number literal
- .note = base prefixes (`0xff`, `0b1010`, `0o755`) are lowercase
- .suggestion = try making the prefix lowercase
-
-parser_invalid_num_literal_suffix = invalid suffix `{$suffix}` for number literal
- .label = invalid suffix `{$suffix}`
- .help = the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
-
-parser_invalid_float_literal_width = invalid width `{$width}` for float literal
- .help = valid widths are 32 and 64
-
-parser_invalid_float_literal_suffix = invalid suffix `{$suffix}` for float literal
- .label = invalid suffix `{$suffix}`
- .help = valid suffixes are `f32` and `f64`
-
-parser_int_literal_too_large = integer literal is too large
-
-parser_missing_semicolon_before_array = expected `;`, found `[`
- .suggestion = consider adding `;` here
-
-parser_invalid_block_macro_segment = cannot use a `block` macro fragment here
- .label = the `block` fragment is within this context
-
-parser_if_expression_missing_then_block = this `if` expression is missing a block after the condition
- .add_then_block = add a block here
- .condition_possibly_unfinished = this binary operation is possibly unfinished
-
-parser_if_expression_missing_condition = missing condition for `if` expression
- .condition_label = expected condition here
- .block_label = if this block is the condition of the `if` expression, then it must be followed by another block
-
-parser_expected_expression_found_let = expected expression, found `let` statement
-
-parser_expected_else_block = expected `{"{"}`, found {$first_tok}
- .label = expected an `if` or a block after this `else`
- .suggestion = add an `if` if this is the condition of a chained `else if` statement
-
-parser_outer_attribute_not_allowed_on_if_else = outer attributes are not allowed on `if` and `else` branches
- .branch_label = the attributes are attached to this branch
- .ctx_label = the branch belongs to this `{$ctx}`
- .suggestion = remove the attributes
-
-parser_missing_in_in_for_loop = missing `in` in `for` loop
- .use_in_not_of = try using `in` here instead
- .add_in = try adding `in` here
-
-parser_missing_comma_after_match_arm = expected `,` following `match` arm
- .suggestion = missing a comma here to end this `match` arm
-
-parser_catch_after_try = keyword `catch` cannot follow a `try` block
- .help = try using `match` on the result of the `try` block instead
-
-parser_comma_after_base_struct = cannot use a comma after the base struct
- .note = the base struct must always be the last field
- .suggestion = remove this comma
-
-parser_eq_field_init = expected `:`, found `=`
- .suggestion = replace equals symbol with a colon
-
-parser_dotdotdot = unexpected token: `...`
- .suggest_exclusive_range = use `..` for an exclusive range
- .suggest_inclusive_range = or `..=` for an inclusive range
-
-parser_left_arrow_operator = unexpected token: `<-`
- .suggestion = if you meant to write a comparison against a negative value, add a space in between `<` and `-`
-
-parser_remove_let = expected pattern, found `let`
- .suggestion = remove the unnecessary `let` keyword
-
-parser_use_eq_instead = unexpected `==`
- .suggestion = try using `=` instead
-
-parser_use_empty_block_not_semi = expected { "`{}`" }, found `;`
- .suggestion = try using { "`{}`" } instead
-
-parser_comparison_interpreted_as_generic =
- `<` is interpreted as a start of generic arguments for `{$type}`, not a comparison
- .label_args = interpreted as generic arguments
- .label_comparison = not interpreted as comparison
- .suggestion = try comparing the cast value
-
-parser_shift_interpreted_as_generic =
- `<<` is interpreted as a start of generic arguments for `{$type}`, not a shift
- .label_args = interpreted as generic arguments
- .label_comparison = not interpreted as shift
- .suggestion = try shifting the cast value
-
-parser_found_expr_would_be_stmt = expected expression, found `{$token}`
- .label = expected expression
-
-parser_leading_plus_not_supported = leading `+` is not supported
- .label = unexpected `+`
- .suggestion_remove_plus = try removing the `+`
-
-parser_parentheses_with_struct_fields = invalid `struct` delimiters or `fn` call arguments
- .suggestion_braces_for_struct = if `{$type}` is a struct, use braces as delimiters
- .suggestion_no_fields_for_fn = if `{$type}` is a function, use the arguments directly
-
-parser_labeled_loop_in_break = parentheses are required around this expression to avoid confusion with a labeled break expression
-
-parser_sugg_wrap_expression_in_parentheses = wrap the expression in parentheses
-
-parser_array_brackets_instead_of_braces = this is a block expression, not an array
- .suggestion = to make an array, use square brackets instead of curly braces
-
-parser_match_arm_body_without_braces = `match` arm body without braces
- .label_statements = {$num_statements ->
- [one] this statement is not surrounded by a body
- *[other] these statements are not surrounded by a body
- }
- .label_arrow = while parsing the `match` arm starting here
- .suggestion_add_braces = surround the {$num_statements ->
- [one] statement
- *[other] statements
- } with a body
- .suggestion_use_comma_not_semicolon = use a comma to end a `match` arm expression
-
-parser_struct_literal_not_allowed_here = struct literals are not allowed here
- .suggestion = surround the struct literal with parentheses
-
-parser_invalid_interpolated_expression = invalid interpolated expression
-
-parser_hexadecimal_float_literal_not_supported = hexadecimal float literal is not supported
-parser_octal_float_literal_not_supported = octal float literal is not supported
-parser_binary_float_literal_not_supported = binary float literal is not supported
-parser_not_supported = not supported
-
-parser_invalid_literal_suffix = suffixes on {$kind} literals are invalid
- .label = invalid suffix `{$suffix}`
-
-parser_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
- .label = invalid suffix `{$suffix}`
- .tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases
- .tuple_exception_line_2 = on proc macros, you'll want to use `syn::Index::from` or `proc_macro::Literal::*_unsuffixed` for code that will desugar to tuple field access
- .tuple_exception_line_3 = see issue #60210 <https://github.com/rust-lang/rust/issues/60210> for more information
-
-parser_non_string_abi_literal = non-string ABI literal
- .suggestion = specify the ABI with a string literal
-
-parser_mismatched_closing_delimiter = mismatched closing delimiter: `{$delimiter}`
- .label_unmatched = mismatched closing delimiter
- .label_opening_candidate = closing delimiter possibly meant for this
- .label_unclosed = unclosed delimiter
-
-parser_incorrect_visibility_restriction = incorrect visibility restriction
- .help = some possible visibility restrictions are:
- `pub(crate)`: visible only on the current crate
- `pub(super)`: visible only in the current module's parent
- `pub(in path::to::module)`: visible only on the specified path
- .suggestion = make this visible only to module `{$inner_str}` with `in`
-
-parser_assignment_else_not_allowed = <assignment> ... else {"{"} ... {"}"} is not allowed
-
-parser_expected_statement_after_outer_attr = expected statement after outer attribute
-
-parser_doc_comment_does_not_document_anything = found a documentation comment that doesn't document anything
- .help = doc comments must come before what they document, if a comment was intended use `//`
- .suggestion = missing comma here
-
-parser_const_let_mutually_exclusive = `const` and `let` are mutually exclusive
- .suggestion = remove `let`
-
-parser_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else`
-parser_invalid_curly_in_let_else = right curly brace `{"}"}` before `else` in a `let...else` statement not allowed
-
-parser_compound_assignment_expression_in_let = can't reassign to an uninitialized variable
- .suggestion = initialize the variable
- .help = if you meant to overwrite, remove the `let` binding
-
-parser_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
- .help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)
-
-parser_invalid_meta_item = expected unsuffixed literal or identifier, found `{$token}`
-
-parser_label_inner_attr_does_not_annotate_this = the inner attribute doesn't annotate this {$item}
-parser_sugg_change_inner_attr_to_outer = to annotate the {$item}, change the attribute from inner to outer style
-
-parser_inner_attr_not_permitted_after_outer_doc_comment = an inner attribute is not permitted following an outer doc comment
- .label_attr = not permitted following an outer doc comment
- .label_prev_doc_comment = previous doc comment
- .label_does_not_annotate_this = {parser_label_inner_attr_does_not_annotate_this}
- .sugg_change_inner_to_outer = {parser_sugg_change_inner_attr_to_outer}
-
-parser_inner_attr_not_permitted_after_outer_attr = an inner attribute is not permitted following an outer attribute
- .label_attr = not permitted following an outer attribute
- .label_prev_attr = previous outer attribute
- .label_does_not_annotate_this = {parser_label_inner_attr_does_not_annotate_this}
- .sugg_change_inner_to_outer = {parser_sugg_change_inner_attr_to_outer}
-
-parser_inner_attr_not_permitted = an inner attribute is not permitted in this context
- .label_does_not_annotate_this = {parser_label_inner_attr_does_not_annotate_this}
- .sugg_change_inner_to_outer = {parser_sugg_change_inner_attr_to_outer}
-
-parser_inner_attr_explanation = inner attributes, like `#![no_std]`, annotate the item enclosing them, and are usually found at the beginning of source files
-parser_outer_attr_explanation = outer attributes, like `#[test]`, annotate the item following them
-
-parser_inner_doc_comment_not_permitted = expected outer doc comment
- .note = inner doc comments like this (starting with `//!` or `/*!`) can only appear before items
- .suggestion = you might have meant to write a regular comment
- .label_does_not_annotate_this = the inner doc comment doesn't annotate this {$item}
- .sugg_change_inner_to_outer = to annotate the {$item}, change the doc comment from inner to outer style
-
-parser_expected_identifier_found_reserved_identifier_str = expected identifier, found reserved identifier `{$token}`
-parser_expected_identifier_found_keyword_str = expected identifier, found keyword `{$token}`
-parser_expected_identifier_found_reserved_keyword_str = expected identifier, found reserved keyword `{$token}`
-parser_expected_identifier_found_doc_comment_str = expected identifier, found doc comment `{$token}`
-parser_expected_identifier_found_str = expected identifier, found `{$token}`
-
-parser_expected_identifier_found_reserved_identifier = expected identifier, found reserved identifier
-parser_expected_identifier_found_keyword = expected identifier, found keyword
-parser_expected_identifier_found_reserved_keyword = expected identifier, found reserved keyword
-parser_expected_identifier_found_doc_comment = expected identifier, found doc comment
-parser_expected_identifier = expected identifier
-
-parser_sugg_escape_to_use_as_identifier = escape `{$ident_name}` to use it as an identifier
-
-parser_sugg_remove_comma = remove this comma
-
-parser_expected_semi_found_reserved_identifier_str = expected `;`, found reserved identifier `{$token}`
-parser_expected_semi_found_keyword_str = expected `;`, found keyword `{$token}`
-parser_expected_semi_found_reserved_keyword_str = expected `;`, found reserved keyword `{$token}`
-parser_expected_semi_found_doc_comment_str = expected `;`, found doc comment `{$token}`
-parser_expected_semi_found_str = expected `;`, found `{$token}`
-
-parser_sugg_change_this_to_semi = change this to `;`
-parser_sugg_add_semi = add `;` here
-parser_label_unexpected_token = unexpected token
-
-parser_unmatched_angle_brackets = {$num_extra_brackets ->
- [one] unmatched angle bracket
- *[other] unmatched angle brackets
- }
- .suggestion = {$num_extra_brackets ->
- [one] remove extra angle bracket
- *[other] remove extra angle brackets
- }
-
-parser_generic_parameters_without_angle_brackets = generic parameters without surrounding angle brackets
- .suggestion = surround the type parameters with angle brackets
-
-parser_comparison_operators_cannot_be_chained = comparison operators cannot be chained
- .sugg_parentheses_for_function_args = or use `(...)` if you meant to specify fn arguments
- .sugg_split_comparison = split the comparison into two
- .sugg_parenthesize = parenthesize the comparison
-parser_sugg_turbofish_syntax = use `::<...>` instead of `<...>` to specify lifetime, type, or const arguments
-
-parser_question_mark_in_type = invalid `?` in type
- .label = `?` is only allowed on expressions, not types
- .suggestion = if you meant to express that the type might not contain a value, use the `Option` wrapper type
-
-parser_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
- .suggestion = remove parentheses in `for` loop
-
-parser_doc_comment_on_param_type = documentation comments cannot be applied to a function parameter's type
- .label = doc comments are not allowed here
-
-parser_attribute_on_param_type = attributes cannot be applied to a function parameter's type
- .label = attributes are not allowed here
-
-parser_pattern_method_param_without_body = patterns aren't allowed in methods without bodies
- .suggestion = give this argument a name or use an underscore to ignore it
-
-parser_self_param_not_first = unexpected `self` parameter in function
- .label = must be the first parameter of an associated function
-
-parser_const_generic_without_braces = expressions must be enclosed in braces to be used as const generic arguments
- .suggestion = enclose the `const` expression in braces
-
-parser_unexpected_const_param_declaration = unexpected `const` parameter declaration
- .label = expected a `const` expression, not a parameter declaration
- .suggestion = `const` parameters must be declared for the `impl`
-
-parser_unexpected_const_in_generic_param = expected lifetime, type, or constant, found keyword `const`
- .suggestion = the `const` keyword is only needed in the definition of the type
-
-parser_async_move_order_incorrect = the order of `move` and `async` is incorrect
- .suggestion = try switching the order
-
-parser_double_colon_in_bound = expected `:` followed by trait or lifetime
- .suggestion = use single colon
diff --git a/compiler/rustc_error_messages/locales/en-US/passes.ftl b/compiler/rustc_error_messages/locales/en-US/passes.ftl
index 4bc6bd9fb..001e53d1d 100644
--- a/compiler/rustc_error_messages/locales/en-US/passes.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/passes.ftl
@@ -47,7 +47,10 @@ passes_no_coverage_not_coverable =
passes_should_be_applied_to_fn =
attribute should be applied to a function definition
- .label = not a function definition
+ .label = {$on_crate ->
+ [true] cannot be applied to crates
+ *[false] not a function definition
+ }
passes_naked_tracked_caller =
cannot use `#[track_caller]` with `#[naked]`
@@ -367,12 +370,6 @@ passes_unknown_external_lang_item =
passes_missing_panic_handler =
`#[panic_handler]` function required, but not found
-passes_alloc_func_required =
- `#[alloc_error_handler]` function required, but not found
-
-passes_missing_alloc_error_handler =
- use `#![feature(default_alloc_error_handler)]` for a default error handler
-
passes_missing_lang_item =
language item required, but not found: `{$name}`
.note = this can occur when a binary crate with `#![no_std]` is compiled for a target where `{$name}` is defined in the standard library
@@ -457,8 +454,14 @@ passes_break_inside_async_block =
.async_block_label = enclosing `async` block
passes_outside_loop =
- `{$name}` outside of a loop
- .label = cannot `{$name}` outside of a loop
+ `{$name}` outside of a loop{$is_break ->
+ [true] {" or labeled block"}
+ *[false] {""}
+ }
+ .label = cannot `{$name}` outside of a loop{$is_break ->
+ [true] {" or labeled block"}
+ *[false] {""}
+ }
passes_unlabeled_in_labeled_block =
unlabeled `{$cf_type}` inside of a labeled block
@@ -671,3 +674,36 @@ passes_missing_const_err =
attributes `#[rustc_const_unstable]` and `#[rustc_const_stable]` require the function or method to be `const`
.help = make the function or method const
.label = attribute specified here
+
+passes_dead_codes =
+ { $multiple ->
+ *[true] multiple {$descr}s are
+ [false] { $num ->
+ [one] {$descr} {$name_list} is
+ *[other] {$descr}s {$name_list} are
+ }
+ } never {$participle}
+
+passes_change_fields_to_be_of_unit_type =
+ consider changing the { $num ->
+ [one] field
+ *[other] fields
+ } to be of unit type to suppress this warning while preserving the field numbering, or remove the { $num ->
+ [one] field
+ *[other] fields
+ }
+
+passes_parent_info =
+ {$num ->
+ [one] {$descr}
+ *[other] {$descr}s
+ } in this {$parent_descr}
+
+passes_ignored_derived_impls =
+ `{$name}` has {$trait_list_len ->
+ [one] a derived impl
+ *[other] derived impls
+ } for the {$trait_list_len ->
+ [one] trait {$trait_list}, but this is
+ *[other] traits {$trait_list}, but these are
+ } intentionally ignored during dead code analysis
diff --git a/compiler/rustc_error_messages/locales/en-US/resolve.ftl b/compiler/rustc_error_messages/locales/en-US/resolve.ftl
new file mode 100644
index 000000000..817bb83ed
--- /dev/null
+++ b/compiler/rustc_error_messages/locales/en-US/resolve.ftl
@@ -0,0 +1,211 @@
+resolve_parent_module_reset_for_binding =
+ parent module is reset for binding
+
+resolve_ampersand_used_without_explicit_lifetime_name =
+ `&` without an explicit lifetime name cannot be used here
+ .note = explicit lifetime name needed here
+
+resolve_underscore_lifetime_name_cannot_be_used_here =
+ `'_` cannot be used here
+ .note = `'_` is a reserved lifetime name
+
+resolve_crate_may_not_be_imported =
+ `$crate` may not be imported
+
+resolve_crate_root_imports_must_be_named_explicitly =
+ crate root imports need to be explicitly named: `use crate as name;`
+
+resolve_generic_params_from_outer_function =
+ can't use generic parameters from outer function
+ .label = use of generic parameter from outer function
+ .suggestion = try using a local generic parameter instead
+
+resolve_self_type_implicitly_declared_by_impl =
+ `Self` type implicitly declared here, by this `impl`
+
+resolve_cannot_use_self_type_here =
+ can't use `Self` here
+
+resolve_use_a_type_here_instead =
+ use a type here instead
+
+resolve_type_param_from_outer_fn =
+ type parameter from outer function
+
+resolve_const_param_from_outer_fn =
+ const parameter from outer function
+
+resolve_try_using_local_generic_parameter =
+ try using a local generic parameter instead
+
+resolve_try_adding_local_generic_param_on_method =
+ try adding a local generic parameter in this method instead
+
+resolve_help_try_using_local_generic_param =
+ try using a local generic paramter instead
+
+resolve_name_is_already_used_as_generic_parameter =
+ the name `{$name}` is already used for a generic parameter in this item's generic parameters
+ .label = already used
+ .first_use_of_name = first use of `{$name}`
+
+resolve_method_not_member_of_trait =
+ method `{$method}` is not a member of trait `{$trait_}`
+ .label = not a member of trait `{$trait_}`
+
+resolve_associated_fn_with_similar_name_exists =
+ there is an associated function with a similar name
+
+resolve_type_not_member_of_trait =
+ type `{$type_}` is not a member of trait `{$trait_}`
+ .label = not a member of trait `{$trait_}`
+
+resolve_associated_type_with_similar_name_exists =
+ there is an associated type with a similar name
+
+resolve_const_not_member_of_trait =
+ const `{$const_}` is not a member of trait `{$trait_}`
+ .label = not a member of trait `{$trait_}`
+
+resolve_associated_const_with_similar_name_exists =
+ there is an associated constant with a similar name
+
+resolve_variable_bound_with_different_mode =
+ variable `{$variable_name}` is bound inconsistently across alternatives separated by `|`
+ .label = bound in different ways
+ .first_binding_span = first binding
+
+resolve_ident_bound_more_than_once_in_parameter_list =
+ identifier `{$identifier}` is bound more than once in this parameter list
+ .label = used as parameter more than once
+
+resolve_ident_bound_more_than_once_in_same_pattern =
+ identifier `{$identifier}` is bound more than once in the same pattern
+ .label = used in a pattern more than once
+
+resolve_undeclared_label =
+ use of undeclared label `{$name}`
+ .label = undeclared label `{$name}`
+
+resolve_label_with_similar_name_reachable =
+ a label with a similar name is reachable
+
+resolve_try_using_similarly_named_label =
+ try using similarly named label
+
+resolve_unreachable_label_with_similar_name_exists =
+ a label with a similar name exists but is unreachable
+
+resolve_self_import_can_only_appear_once_in_the_list =
+ `self` import can only appear once in an import list
+ .label = can only appear once in an import list
+
+resolve_self_import_only_in_import_list_with_non_empty_prefix =
+ `self` import can only appear in an import list with a non-empty prefix
+ .label = can only appear in an import list with a non-empty prefix
+
+resolve_cannot_capture_dynamic_environment_in_fn_item =
+ can't capture dynamic environment in a fn item
+ .help = use the `|| {"{"} ... {"}"}` closure form instead
+
+resolve_attempt_to_use_non_constant_value_in_constant =
+ attempt to use a non-constant value in a constant
+
+resolve_attempt_to_use_non_constant_value_in_constant_with_suggestion =
+ consider using `{$suggestion}` instead of `{$current}`
+
+resolve_attempt_to_use_non_constant_value_in_constant_label_with_suggestion =
+ non-constant value
+
+resolve_attempt_to_use_non_constant_value_in_constant_without_suggestion =
+ this would need to be a `{$suggestion}`
+
+resolve_self_imports_only_allowed_within =
+ `self` imports are only allowed within a {"{"} {"}"} list
+
+resolve_self_imports_only_allowed_within_suggestion =
+ consider importing the module directly
+
+resolve_self_imports_only_allowed_within_multipart_suggestion =
+ alternatively, use the multi-path `use` syntax to import `self`
+
+resolve_binding_shadows_something_unacceptable =
+ {$shadowing_binding}s cannot shadow {$shadowed_binding}s
+ .label = cannot be named the same as {$article} {$shadowed_binding}
+ .label_shadowed_binding = the {$shadowed_binding} `{$name}` is {$participle} here
+
+resolve_binding_shadows_something_unacceptable_suggestion =
+ try specify the pattern arguments
+
+resolve_forward_declared_generic_param =
+ generic parameters with a default cannot use forward declared identifiers
+ .label = defaulted generic parameters cannot be forward declared
+
+resolve_param_in_ty_of_const_param =
+ the type of const parameters must not depend on other generic parameters
+ .label = the type must not depend on the parameter `{$name}`
+
+resolve_self_in_generic_param_default =
+ generic parameters cannot use `Self` in their defaults
+ .label = `Self` in generic parameter default
+
+resolve_param_in_non_trivial_anon_const =
+ generic parameters may not be used in const operations
+ .label = cannot perform const operation using `{$name}`
+
+resolve_param_in_non_trivial_anon_const_help =
+ use `#![feature(generic_const_exprs)]` to allow generic const expressions
+
+resolve_param_in_non_trivial_anon_const_sub_type =
+ type parameters may not be used in const expressions
+
+resolve_param_in_non_trivial_anon_const_sub_non_type =
+ const parameters may only be used as standalone arguments, i.e. `{$name}`
+
+resolve_unreachable_label =
+ use of unreachable label `{$name}`
+ .label = unreachable label `{$name}`
+ .label_definition_span = unreachable label defined here
+ .note = labels are unreachable through functions, closures, async blocks and modules
+
+resolve_unreachable_label_suggestion_use_similarly_named =
+ try using similarly named label
+
+resolve_unreachable_label_similar_name_reachable =
+ a label with a similar name is reachable
+
+resolve_unreachable_label_similar_name_unreachable =
+ a label with a similar name exists but is also unreachable
+
+resolve_trait_impl_mismatch =
+ item `{$name}` is an associated {$kind}, which doesn't match its trait `{$trait_path}`
+ .label = does not match trait
+ .label_trait_item = item in trait
+
+resolve_invalid_asm_sym =
+ invalid `sym` operand
+ .label = is a local variable
+ .help = `sym` operands must refer to either a function or a static
+
+resolve_trait_impl_duplicate =
+ duplicate definitions with name `{$name}`:
+ .label = duplicate definition
+ .old_span_label = previous definition here
+ .trait_item_span = item in trait
+
+resolve_relative_2018 =
+ relative paths are not supported in visibilities in 2018 edition or later
+ .suggestion = try
+
+resolve_ancestor_only =
+ visibilities can only be restricted to ancestor modules
+
+resolve_expected_found =
+ expected module, found {$res} `{$path_str}`
+ .label = not a module
+
+resolve_indeterminate =
+ cannot determine resolution for the visibility
+
+resolve_module_only =
+ visibility must resolve to a module
diff --git a/compiler/rustc_error_messages/locales/en-US/session.ftl b/compiler/rustc_error_messages/locales/en-US/session.ftl
index e22779230..983eb9262 100644
--- a/compiler/rustc_error_messages/locales/en-US/session.ftl
+++ b/compiler/rustc_error_messages/locales/en-US/session.ftl
@@ -58,3 +58,31 @@ session_expr_parentheses_needed = parentheses are required to parse this as an e
session_skipping_const_checks = skipping const checks
session_unleashed_feature_help_named = skipping check for `{$gate}` feature
session_unleashed_feature_help_unnamed = skipping check that does not even have a feature gate
+
+session_hexadecimal_float_literal_not_supported = hexadecimal float literal is not supported
+session_octal_float_literal_not_supported = octal float literal is not supported
+session_binary_float_literal_not_supported = binary float literal is not supported
+session_not_supported = not supported
+
+session_invalid_literal_suffix = suffixes on {$kind} literals are invalid
+ .label = invalid suffix `{$suffix}`
+
+session_invalid_num_literal_base_prefix = invalid base prefix for number literal
+ .note = base prefixes (`0xff`, `0b1010`, `0o755`) are lowercase
+ .suggestion = try making the prefix lowercase
+
+session_invalid_num_literal_suffix = invalid suffix `{$suffix}` for number literal
+ .label = invalid suffix `{$suffix}`
+ .help = the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)
+
+session_invalid_float_literal_width = invalid width `{$width}` for float literal
+ .help = valid widths are 32 and 64
+
+session_invalid_float_literal_suffix = invalid suffix `{$suffix}` for float literal
+ .label = invalid suffix `{$suffix}`
+ .help = valid suffixes are `f32` and `f64`
+
+session_int_literal_too_large = integer literal is too large
+
+session_invalid_int_literal_width = invalid width `{$width}` for integer literal
+ .help = valid widths are 8, 16, 32, 64 and 128
diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs
index 9465051dd..418ba3c74 100644
--- a/compiler/rustc_error_messages/src/lib.rs
+++ b/compiler/rustc_error_messages/src/lib.rs
@@ -10,6 +10,7 @@ extern crate tracing;
use fluent_bundle::FluentResource;
use fluent_syntax::parser::ParserError;
+use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
use rustc_data_structures::sync::Lrc;
use rustc_macros::{fluent_messages, Decodable, Encodable};
use rustc_span::Span;
@@ -30,7 +31,7 @@ use intl_memoizer::concurrent::IntlLangMemoizer;
#[cfg(not(parallel_compiler))]
use intl_memoizer::IntlLangMemoizer;
-pub use fluent_bundle::{FluentArgs, FluentError, FluentValue};
+pub use fluent_bundle::{self, types::FluentType, FluentArgs, FluentError, FluentValue};
pub use unic_langid::{langid, LanguageIdentifier};
// Generates `DEFAULT_LOCALE_RESOURCES` static and `fluent_generated` module.
@@ -42,6 +43,7 @@ fluent_messages! {
borrowck => "../locales/en-US/borrowck.ftl",
builtin_macros => "../locales/en-US/builtin_macros.ftl",
codegen_gcc => "../locales/en-US/codegen_gcc.ftl",
+ codegen_llvm => "../locales/en-US/codegen_llvm.ftl",
codegen_ssa => "../locales/en-US/codegen_ssa.ftl",
compiletest => "../locales/en-US/compiletest.ftl",
const_eval => "../locales/en-US/const_eval.ftl",
@@ -49,6 +51,7 @@ fluent_messages! {
errors => "../locales/en-US/errors.ftl",
expand => "../locales/en-US/expand.ftl",
hir_analysis => "../locales/en-US/hir_analysis.ftl",
+ hir_typeck => "../locales/en-US/hir_typeck.ftl",
infer => "../locales/en-US/infer.ftl",
interface => "../locales/en-US/interface.ftl",
lint => "../locales/en-US/lint.ftl",
@@ -56,11 +59,12 @@ fluent_messages! {
middle => "../locales/en-US/middle.ftl",
mir_dataflow => "../locales/en-US/mir_dataflow.ftl",
monomorphize => "../locales/en-US/monomorphize.ftl",
- parser => "../locales/en-US/parser.ftl",
+ parse => "../locales/en-US/parse.ftl",
passes => "../locales/en-US/passes.ftl",
plugin_impl => "../locales/en-US/plugin_impl.ftl",
privacy => "../locales/en-US/privacy.ftl",
query_system => "../locales/en-US/query_system.ftl",
+ resolve => "../locales/en-US/resolve.ftl",
save_analysis => "../locales/en-US/save_analysis.ftl",
session => "../locales/en-US/session.ftl",
symbol_mangling => "../locales/en-US/symbol_mangling.ftl",
@@ -539,3 +543,92 @@ impl From<Vec<Span>> for MultiSpan {
MultiSpan::from_spans(spans)
}
}
+
+fn icu_locale_from_unic_langid(lang: LanguageIdentifier) -> Option<icu_locid::Locale> {
+ icu_locid::Locale::try_from_bytes(lang.to_string().as_bytes()).ok()
+}
+
+pub fn fluent_value_from_str_list_sep_by_and<'source>(
+ l: Vec<Cow<'source, str>>,
+) -> FluentValue<'source> {
+ // Fluent requires 'static value here for its AnyEq usages.
+ #[derive(Clone, PartialEq, Debug)]
+ struct FluentStrListSepByAnd(Vec<String>);
+
+ impl FluentType for FluentStrListSepByAnd {
+ fn duplicate(&self) -> Box<dyn FluentType + Send> {
+ Box::new(self.clone())
+ }
+
+ fn as_string(&self, intls: &intl_memoizer::IntlLangMemoizer) -> Cow<'static, str> {
+ let result = intls
+ .with_try_get::<MemoizableListFormatter, _, _>((), |list_formatter| {
+ list_formatter.format_to_string(self.0.iter())
+ })
+ .unwrap();
+ Cow::Owned(result)
+ }
+
+ #[cfg(not(parallel_compiler))]
+ fn as_string_threadsafe(
+ &self,
+ _intls: &intl_memoizer::concurrent::IntlLangMemoizer,
+ ) -> Cow<'static, str> {
+ unreachable!("`as_string_threadsafe` is not used in non-parallel rustc")
+ }
+
+ #[cfg(parallel_compiler)]
+ fn as_string_threadsafe(
+ &self,
+ intls: &intl_memoizer::concurrent::IntlLangMemoizer,
+ ) -> Cow<'static, str> {
+ let result = intls
+ .with_try_get::<MemoizableListFormatter, _, _>((), |list_formatter| {
+ list_formatter.format_to_string(self.0.iter())
+ })
+ .unwrap();
+ Cow::Owned(result)
+ }
+ }
+
+ struct MemoizableListFormatter(icu_list::ListFormatter);
+
+ impl std::ops::Deref for MemoizableListFormatter {
+ type Target = icu_list::ListFormatter;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+
+ impl intl_memoizer::Memoizable for MemoizableListFormatter {
+ type Args = ();
+ type Error = ();
+
+ fn construct(lang: LanguageIdentifier, _args: Self::Args) -> Result<Self, Self::Error>
+ where
+ Self: Sized,
+ {
+ let baked_data_provider = rustc_baked_icu_data::baked_data_provider();
+ let locale_fallbacker =
+ LocaleFallbacker::try_new_with_any_provider(&baked_data_provider)
+ .expect("Failed to create fallback provider");
+ let data_provider =
+ LocaleFallbackProvider::new_with_fallbacker(baked_data_provider, locale_fallbacker);
+ let locale = icu_locale_from_unic_langid(lang)
+ .unwrap_or_else(|| rustc_baked_icu_data::supported_locales::EN);
+ let list_formatter =
+ icu_list::ListFormatter::try_new_and_with_length_with_any_provider(
+ &data_provider,
+ &locale.into(),
+ icu_list::ListLength::Wide,
+ )
+ .expect("Failed to create list formatter");
+
+ Ok(MemoizableListFormatter(list_formatter))
+ }
+ }
+
+ let l = l.into_iter().map(|x| x.into_owned()).collect();
+
+ FluentValue::Custom(Box::new(FluentStrListSepByAnd(l)))
+}
diff --git a/compiler/rustc_errors/Cargo.toml b/compiler/rustc_errors/Cargo.toml
index 7803a0792..dee7a31ec 100644
--- a/compiler/rustc_errors/Cargo.toml
+++ b/compiler/rustc_errors/Cargo.toml
@@ -18,7 +18,6 @@ rustc_target = { path = "../rustc_target" }
rustc_hir = { path = "../rustc_hir" }
rustc_lint_defs = { path = "../rustc_lint_defs" }
unicode-width = "0.1.4"
-atty = "0.2"
termcolor = "1.0"
annotate-snippets = "0.9"
termize = "0.1.1"
@@ -27,3 +26,6 @@ serde_json = "1.0.59"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3", features = [ "handleapi", "synchapi", "winbase" ] }
+
+[features]
+rustc_use_parallel_compiler = ['rustc_error_messages/rustc_use_parallel_compiler']
diff --git a/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs b/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs
index f14b8ee32..d8879bf70 100644
--- a/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs
+++ b/compiler/rustc_errors/src/annotate_snippet_emitter_writer.rs
@@ -39,7 +39,7 @@ impl Translate for AnnotateSnippetEmitterWriter {
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
- &**self.fallback_bundle
+ &self.fallback_bundle
}
}
@@ -49,10 +49,9 @@ impl Emitter for AnnotateSnippetEmitterWriter {
let fluent_args = to_fluent_args(diag.args());
let mut children = diag.children.clone();
- let (mut primary_span, suggestions) = self.primary_span_formatted(&diag, &fluent_args);
+ let (mut primary_span, suggestions) = self.primary_span_formatted(diag, &fluent_args);
self.fix_multispans_in_extern_macros_and_render_macro_backtrace(
- &self.source_map,
&mut primary_span,
&mut children,
&diag.level,
@@ -66,7 +65,7 @@ impl Emitter for AnnotateSnippetEmitterWriter {
&diag.code,
&primary_span,
&children,
- &suggestions,
+ suggestions,
);
}
diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs
index 23f29a24f..06bb5edc0 100644
--- a/compiler/rustc_errors/src/diagnostic.rs
+++ b/compiler/rustc_errors/src/diagnostic.rs
@@ -4,6 +4,7 @@ use crate::{
SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
};
use rustc_data_structures::fx::FxHashMap;
+use rustc_error_messages::fluent_value_from_str_list_sep_by_and;
use rustc_error_messages::FluentValue;
use rustc_lint_defs::{Applicability, LintExpectationId};
use rustc_span::edition::LATEST_STABLE_EDITION;
@@ -12,6 +13,7 @@ use rustc_span::{Span, DUMMY_SP};
use std::borrow::Cow;
use std::fmt;
use std::hash::{Hash, Hasher};
+use std::panic::Location;
/// Error type for `Diagnostic`'s `suggestions` field, indicating that
/// `.disable_suggestions()` was called on the `Diagnostic`.
@@ -33,6 +35,7 @@ pub type DiagnosticArgName<'source> = Cow<'source, str>;
pub enum DiagnosticArgValue<'source> {
Str(Cow<'source, str>),
Number(usize),
+ StrListSepByAnd(Vec<Cow<'source, str>>),
}
/// Converts a value of a type into a `DiagnosticArg` (typically a field of an `IntoDiagnostic`
@@ -43,19 +46,31 @@ pub trait IntoDiagnosticArg {
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static>;
}
+impl<'source> IntoDiagnosticArg for DiagnosticArgValue<'source> {
+ fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
+ match self {
+ DiagnosticArgValue::Str(s) => DiagnosticArgValue::Str(Cow::Owned(s.into_owned())),
+ DiagnosticArgValue::Number(n) => DiagnosticArgValue::Number(n),
+ DiagnosticArgValue::StrListSepByAnd(l) => DiagnosticArgValue::StrListSepByAnd(
+ l.into_iter().map(|s| Cow::Owned(s.into_owned())).collect(),
+ ),
+ }
+ }
+}
+
impl<'source> Into<FluentValue<'source>> for DiagnosticArgValue<'source> {
fn into(self) -> FluentValue<'source> {
match self {
DiagnosticArgValue::Str(s) => From::from(s),
DiagnosticArgValue::Number(n) => From::from(n),
+ DiagnosticArgValue::StrListSepByAnd(l) => fluent_value_from_str_list_sep_by_and(l),
}
}
}
/// Trait implemented by error types. This should not be implemented manually. Instead, use
/// `#[derive(Subdiagnostic)]` -- see [rustc_macros::Subdiagnostic].
-#[cfg_attr(bootstrap, rustc_diagnostic_item = "AddSubdiagnostic")]
-#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "AddToDiagnostic")]
+#[rustc_diagnostic_item = "AddToDiagnostic"]
pub trait AddToDiagnostic
where
Self: Sized,
@@ -107,6 +122,31 @@ pub struct Diagnostic {
/// If diagnostic is from Lint, custom hash function ignores notes
/// otherwise hash is based on the all the fields
pub is_lint: bool,
+
+ /// With `-Ztrack_diagnostics` enabled,
+ /// we print where in rustc this error was emitted.
+ pub emitted_at: DiagnosticLocation,
+}
+
+#[derive(Clone, Debug, Encodable, Decodable)]
+pub struct DiagnosticLocation {
+ file: Cow<'static, str>,
+ line: u32,
+ col: u32,
+}
+
+impl DiagnosticLocation {
+ #[track_caller]
+ fn caller() -> Self {
+ let loc = Location::caller();
+ DiagnosticLocation { file: loc.file().into(), line: loc.line(), col: loc.column() }
+ }
+}
+
+impl fmt::Display for DiagnosticLocation {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}:{}:{}", self.file, self.line, self.col)
+ }
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)]
@@ -173,10 +213,28 @@ impl StringPart {
}
impl Diagnostic {
+ #[track_caller]
pub fn new<M: Into<DiagnosticMessage>>(level: Level, message: M) -> Self {
Diagnostic::new_with_code(level, None, message)
}
+ #[track_caller]
+ pub fn new_with_messages(level: Level, messages: Vec<(DiagnosticMessage, Style)>) -> Self {
+ Diagnostic {
+ level,
+ message: messages,
+ code: None,
+ span: MultiSpan::new(),
+ children: vec![],
+ suggestions: Ok(vec![]),
+ args: Default::default(),
+ sort_span: DUMMY_SP,
+ is_lint: false,
+ emitted_at: DiagnosticLocation::caller(),
+ }
+ }
+
+ #[track_caller]
pub fn new_with_code<M: Into<DiagnosticMessage>>(
level: Level,
code: Option<DiagnosticId>,
@@ -192,6 +250,7 @@ impl Diagnostic {
args: Default::default(),
sort_span: DUMMY_SP,
is_lint: false,
+ emitted_at: DiagnosticLocation::caller(),
}
}
@@ -233,7 +292,7 @@ impl Diagnostic {
let lint_index = expectation_id.get_lint_index();
expectation_id.set_lint_index(None);
let mut stable_id = unstable_to_stable
- .get(&expectation_id)
+ .get(expectation_id)
.expect("each unstable `LintExpectationId` must have a matching stable id")
.normalize();
@@ -688,7 +747,7 @@ impl Diagnostic {
&mut self,
sp: Span,
msg: impl Into<SubdiagnosticMessage>,
- suggestions: impl Iterator<Item = String>,
+ suggestions: impl IntoIterator<Item = String>,
applicability: Applicability,
) -> &mut Self {
self.span_suggestions_with_style(
@@ -705,11 +764,11 @@ impl Diagnostic {
&mut self,
sp: Span,
msg: impl Into<SubdiagnosticMessage>,
- suggestions: impl Iterator<Item = String>,
+ suggestions: impl IntoIterator<Item = String>,
applicability: Applicability,
style: SuggestionStyle,
) -> &mut Self {
- let mut suggestions: Vec<_> = suggestions.collect();
+ let mut suggestions: Vec<_> = suggestions.into_iter().collect();
suggestions.sort();
debug_assert!(
@@ -736,10 +795,10 @@ impl Diagnostic {
pub fn multipart_suggestions(
&mut self,
msg: impl Into<SubdiagnosticMessage>,
- suggestions: impl Iterator<Item = Vec<(Span, String)>>,
+ suggestions: impl IntoIterator<Item = Vec<(Span, String)>>,
applicability: Applicability,
) -> &mut Self {
- let suggestions: Vec<_> = suggestions.collect();
+ let suggestions: Vec<_> = suggestions.into_iter().collect();
debug_assert!(
!(suggestions
.iter()
@@ -902,6 +961,13 @@ impl Diagnostic {
self
}
+ pub fn replace_args(
+ &mut self,
+ args: FxHashMap<DiagnosticArgName<'static>, DiagnosticArgValue<'static>>,
+ ) {
+ self.args = args;
+ }
+
pub fn styled_message(&self) -> &[(DiagnosticMessage, Style)] {
&self.message
}
diff --git a/compiler/rustc_errors/src/diagnostic_builder.rs b/compiler/rustc_errors/src/diagnostic_builder.rs
index 9b41234dc..a2ed98864 100644
--- a/compiler/rustc_errors/src/diagnostic_builder.rs
+++ b/compiler/rustc_errors/src/diagnostic_builder.rs
@@ -16,8 +16,7 @@ use std::thread::panicking;
/// Trait implemented by error types. This should not be implemented manually. Instead, use
/// `#[derive(Diagnostic)]` -- see [rustc_macros::Diagnostic].
-#[cfg_attr(bootstrap, rustc_diagnostic_item = "SessionDiagnostic")]
-#[cfg_attr(not(bootstrap), rustc_diagnostic_item = "IntoDiagnostic")]
+#[rustc_diagnostic_item = "IntoDiagnostic"]
pub trait IntoDiagnostic<'a, T: EmissionGuarantee = ErrorGuaranteed> {
/// Write out as a diagnostic out of `Handler`.
#[must_use]
@@ -133,6 +132,7 @@ mod sealed_level_is_error {
impl<'a> DiagnosticBuilder<'a, ErrorGuaranteed> {
/// Convenience function for internal use, clients should use one of the
/// `struct_*` methods on [`Handler`].
+ #[track_caller]
pub(crate) fn new_guaranteeing_error<M: Into<DiagnosticMessage>, const L: Level>(
handler: &'a Handler,
message: M,
@@ -196,6 +196,7 @@ impl EmissionGuarantee for ErrorGuaranteed {
}
}
+ #[track_caller]
fn make_diagnostic_builder(
handler: &Handler,
msg: impl Into<DiagnosticMessage>,
@@ -209,6 +210,7 @@ impl EmissionGuarantee for ErrorGuaranteed {
impl<'a> DiagnosticBuilder<'a, ()> {
/// Convenience function for internal use, clients should use one of the
/// `struct_*` methods on [`Handler`].
+ #[track_caller]
pub(crate) fn new<M: Into<DiagnosticMessage>>(
handler: &'a Handler,
level: Level,
@@ -220,6 +222,7 @@ impl<'a> DiagnosticBuilder<'a, ()> {
/// Creates a new `DiagnosticBuilder` with an already constructed
/// diagnostic.
+ #[track_caller]
pub(crate) fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic) -> Self {
debug!("Created new diagnostic");
Self {
@@ -308,6 +311,7 @@ impl EmissionGuarantee for Noted {
impl<'a> DiagnosticBuilder<'a, !> {
/// Convenience function for internal use, clients should use one of the
/// `struct_*` methods on [`Handler`].
+ #[track_caller]
pub(crate) fn new_fatal(handler: &'a Handler, message: impl Into<DiagnosticMessage>) -> Self {
let diagnostic = Diagnostic::new_with_code(Level::Fatal, None, message);
Self::new_diagnostic_fatal(handler, diagnostic)
@@ -477,9 +481,9 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
/// In the meantime, though, callsites are required to deal with the "bug"
/// locally in whichever way makes the most sense.
#[track_caller]
- pub fn delay_as_bug(&mut self) {
+ pub fn delay_as_bug(&mut self) -> G {
self.downgrade_to_delayed_bug();
- self.emit();
+ self.emit()
}
forward!(
@@ -594,13 +598,13 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
&mut self,
sp: Span,
msg: impl Into<SubdiagnosticMessage>,
- suggestions: impl Iterator<Item = String>,
+ suggestions: impl IntoIterator<Item = String>,
applicability: Applicability,
) -> &mut Self);
forward!(pub fn multipart_suggestions(
&mut self,
msg: impl Into<SubdiagnosticMessage>,
- suggestions: impl Iterator<Item = Vec<(Span, String)>>,
+ suggestions: impl IntoIterator<Item = Vec<(Span, String)>>,
applicability: Applicability,
) -> &mut Self);
forward!(pub fn span_suggestion_short(
diff --git a/compiler/rustc_errors/src/diagnostic_impls.rs b/compiler/rustc_errors/src/diagnostic_impls.rs
index 7640b2919..7155db32e 100644
--- a/compiler/rustc_errors/src/diagnostic_impls.rs
+++ b/compiler/rustc_errors/src/diagnostic_impls.rs
@@ -13,6 +13,7 @@ use std::borrow::Cow;
use std::fmt;
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
+use std::process::ExitStatus;
pub struct DiagnosticArgFromDisplay<'a>(pub &'a dyn fmt::Display);
@@ -58,6 +59,7 @@ into_diagnostic_arg_using_display!(
i128,
u128,
std::io::Error,
+ std::boxed::Box<dyn std::error::Error>,
std::num::NonZeroU32,
hir::Target,
Edition,
@@ -66,7 +68,8 @@ into_diagnostic_arg_using_display!(
ParseIntError,
StackProtector,
&TargetTriple,
- SplitDebuginfo
+ SplitDebuginfo,
+ ExitStatus,
);
impl IntoDiagnosticArg for bool {
@@ -103,6 +106,12 @@ impl IntoDiagnosticArg for String {
}
}
+impl<'a> IntoDiagnosticArg for Cow<'a, str> {
+ fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
+ DiagnosticArgValue::Str(Cow::Owned(self.into_owned()))
+ }
+}
+
impl<'a> IntoDiagnosticArg for &'a Path {
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
DiagnosticArgValue::Str(Cow::Owned(self.display().to_string()))
@@ -170,6 +179,29 @@ impl IntoDiagnosticArg for Level {
}
}
+#[derive(Clone)]
+pub struct DiagnosticSymbolList(Vec<Symbol>);
+
+impl From<Vec<Symbol>> for DiagnosticSymbolList {
+ fn from(v: Vec<Symbol>) -> Self {
+ DiagnosticSymbolList(v)
+ }
+}
+
+impl IntoDiagnosticArg for DiagnosticSymbolList {
+ fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
+ DiagnosticArgValue::StrListSepByAnd(
+ self.0.into_iter().map(|sym| Cow::Owned(format!("`{sym}`"))).collect(),
+ )
+ }
+}
+
+impl<Id> IntoDiagnosticArg for hir::def::Res<Id> {
+ fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
+ DiagnosticArgValue::Str(Cow::Borrowed(self.descr()))
+ }
+}
+
impl IntoDiagnostic<'_, !> for TargetDataLayoutErrors<'_> {
fn into_diagnostic(self, handler: &Handler) -> DiagnosticBuilder<'_, !> {
let mut diag;
diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs
index cd6413bc3..4df2198fb 100644
--- a/compiler/rustc_errors/src/emitter.rs
+++ b/compiler/rustc_errors/src/emitter.rs
@@ -16,20 +16,20 @@ use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, Styl
use crate::styled_buffer::StyledBuffer;
use crate::translation::{to_fluent_args, Translate};
use crate::{
- CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage, FluentBundle, Handler,
- LazyFallbackBundle, Level, MultiSpan, SubDiagnostic, SubstitutionHighlight, SuggestionStyle,
+ diagnostic::DiagnosticLocation, CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage,
+ FluentBundle, Handler, LazyFallbackBundle, Level, MultiSpan, SubDiagnostic,
+ SubstitutionHighlight, SuggestionStyle,
};
-
use rustc_lint_defs::pluralize;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_data_structures::sync::Lrc;
-use rustc_error_messages::FluentArgs;
+use rustc_error_messages::{FluentArgs, SpanLabel};
use rustc_span::hygiene::{ExpnKind, MacroKind};
use std::borrow::Cow;
use std::cmp::{max, min, Reverse};
-use std::io;
use std::io::prelude::*;
+use std::io::{self, IsTerminal};
use std::iter;
use std::path::Path;
use termcolor::{Ansi, BufferWriter, ColorChoice, ColorSpec, StandardStream};
@@ -64,6 +64,7 @@ impl HumanReadableErrorType {
teach: bool,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
) -> EmitterWriter {
let (short, color_config) = self.unzip();
let color = color_config.suggests_using_colors();
@@ -77,6 +78,7 @@ impl HumanReadableErrorType {
color,
diagnostic_width,
macro_backtrace,
+ track_diagnostics,
)
}
}
@@ -246,7 +248,7 @@ pub trait Emitter: Translate {
fluent_args: &FluentArgs<'_>,
) -> (MultiSpan, &'a [CodeSuggestion]) {
let mut primary_span = diag.span.clone();
- let suggestions = diag.suggestions.as_ref().map_or(&[][..], |suggestions| &suggestions[..]);
+ let suggestions = diag.suggestions.as_deref().unwrap_or(&[]);
if let Some((sugg, rest)) = suggestions.split_first() {
let msg = self.translate_message(&sugg.msg, fluent_args);
if rest.is_empty() &&
@@ -281,7 +283,7 @@ pub trait Emitter: Translate {
if self
.source_map()
.map(|sm| is_case_difference(
- &**sm,
+ sm,
substitution,
sugg.substitutions[0].parts[0].span,
))
@@ -312,7 +314,6 @@ pub trait Emitter: Translate {
fn fix_multispans_in_extern_macros_and_render_macro_backtrace(
&self,
- source_map: &Option<Lrc<SourceMap>>,
span: &mut MultiSpan,
children: &mut Vec<SubDiagnostic>,
level: &Level,
@@ -338,7 +339,7 @@ pub trait Emitter: Translate {
.collect();
if !backtrace {
- self.fix_multispans_in_extern_macros(source_map, span, children);
+ self.fix_multispans_in_extern_macros(span, children);
}
self.render_multispans_macro_backtrace(span, children, backtrace);
@@ -478,15 +479,13 @@ pub trait Emitter: Translate {
// this will change the span to point at the use site.
fn fix_multispans_in_extern_macros(
&self,
- source_map: &Option<Lrc<SourceMap>>,
span: &mut MultiSpan,
children: &mut Vec<SubDiagnostic>,
) {
- let Some(source_map) = source_map else { return };
debug!("fix_multispans_in_extern_macros: before: span={:?} children={:?}", span, children);
- self.fix_multispan_in_extern_macros(source_map, span);
+ self.fix_multispan_in_extern_macros(span);
for child in children.iter_mut() {
- self.fix_multispan_in_extern_macros(source_map, &mut child.span);
+ self.fix_multispan_in_extern_macros(&mut child.span);
}
debug!("fix_multispans_in_extern_macros: after: span={:?} children={:?}", span, children);
}
@@ -494,7 +493,8 @@ pub trait Emitter: Translate {
// This "fixes" MultiSpans that contain `Span`s pointing to locations inside of external macros.
// Since these locations are often difficult to read,
// we move these spans from the external macros to their corresponding use site.
- fn fix_multispan_in_extern_macros(&self, source_map: &Lrc<SourceMap>, span: &mut MultiSpan) {
+ fn fix_multispan_in_extern_macros(&self, span: &mut MultiSpan) {
+ let Some(source_map) = self.source_map() else { return };
// First, find all the spans in external macros and point instead at their use site.
let replacements: Vec<(Span, Span)> = span
.primary_spans()
@@ -525,7 +525,7 @@ impl Translate for EmitterWriter {
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
- &**self.fallback_bundle
+ &self.fallback_bundle
}
}
@@ -538,11 +538,10 @@ impl Emitter for EmitterWriter {
let fluent_args = to_fluent_args(diag.args());
let mut children = diag.children.clone();
- let (mut primary_span, suggestions) = self.primary_span_formatted(&diag, &fluent_args);
+ let (mut primary_span, suggestions) = self.primary_span_formatted(diag, &fluent_args);
debug!("emit_diagnostic: suggestions={:?}", suggestions);
self.fix_multispans_in_extern_macros_and_render_macro_backtrace(
- &self.sm,
&mut primary_span,
&mut children,
&diag.level,
@@ -556,7 +555,8 @@ impl Emitter for EmitterWriter {
&diag.code,
&primary_span,
&children,
- &suggestions,
+ suggestions,
+ self.track_diagnostics.then_some(&diag.emitted_at),
);
}
@@ -619,14 +619,14 @@ impl ColorConfig {
fn to_color_choice(self) -> ColorChoice {
match self {
ColorConfig::Always => {
- if atty::is(atty::Stream::Stderr) {
+ if io::stderr().is_terminal() {
ColorChoice::Always
} else {
ColorChoice::AlwaysAnsi
}
}
ColorConfig::Never => ColorChoice::Never,
- ColorConfig::Auto if atty::is(atty::Stream::Stderr) => ColorChoice::Auto,
+ ColorConfig::Auto if io::stderr().is_terminal() => ColorChoice::Auto,
ColorConfig::Auto => ColorChoice::Never,
}
}
@@ -650,6 +650,7 @@ pub struct EmitterWriter {
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
}
#[derive(Debug)]
@@ -669,6 +670,7 @@ impl EmitterWriter {
teach: bool,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
) -> EmitterWriter {
let dst = Destination::from_stderr(color_config);
EmitterWriter {
@@ -681,6 +683,7 @@ impl EmitterWriter {
ui_testing: false,
diagnostic_width,
macro_backtrace,
+ track_diagnostics,
}
}
@@ -694,6 +697,7 @@ impl EmitterWriter {
colored: bool,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
) -> EmitterWriter {
EmitterWriter {
dst: Raw(dst, colored),
@@ -705,6 +709,7 @@ impl EmitterWriter {
ui_testing: false,
diagnostic_width,
macro_backtrace,
+ track_diagnostics,
}
}
@@ -768,6 +773,7 @@ impl EmitterWriter {
draw_col_separator_no_space(buffer, line_offset, width_offset - 2);
}
+ #[instrument(level = "trace", skip(self), ret)]
fn render_source_line(
&self,
buffer: &mut StyledBuffer,
@@ -796,9 +802,10 @@ impl EmitterWriter {
}
let source_string = match file.get_line(line.line_index - 1) {
- Some(s) => normalize_whitespace(&*s),
+ Some(s) => normalize_whitespace(&s),
None => return Vec::new(),
};
+ trace!(?source_string);
let line_offset = buffer.num_lines();
@@ -1143,7 +1150,7 @@ impl EmitterWriter {
(pos + 2, annotation.start_col.saturating_sub(left))
};
if let Some(ref label) = annotation.label {
- buffer.puts(line_offset + pos, code_offset + col, &label, style);
+ buffer.puts(line_offset + pos, code_offset + col, label, style);
}
}
@@ -1318,6 +1325,7 @@ impl EmitterWriter {
}
}
+ #[instrument(level = "trace", skip(self, args), ret)]
fn emit_message_default(
&mut self,
msp: &MultiSpan,
@@ -1327,6 +1335,7 @@ impl EmitterWriter {
level: &Level,
max_line_num_len: usize,
is_secondary: bool,
+ emitted_at: Option<&DiagnosticLocation>,
) -> io::Result<()> {
let mut buffer = StyledBuffer::new();
@@ -1352,7 +1361,7 @@ impl EmitterWriter {
// only render error codes, not lint codes
if let Some(DiagnosticId::Error(ref code)) = *code {
buffer.append(0, "[", Style::Level(*level));
- buffer.append(0, &code, Style::Level(*level));
+ buffer.append(0, code, Style::Level(*level));
buffer.append(0, "]", Style::Level(*level));
label_width += 2 + code.len();
}
@@ -1377,24 +1386,16 @@ impl EmitterWriter {
}
}
}
-
let mut annotated_files = FileWithAnnotatedLines::collect_annotations(self, args, msp);
+ trace!("{annotated_files:#?}");
// Make sure our primary file comes first
- let (primary_lo, sm) = if let (Some(sm), Some(ref primary_span)) =
- (self.sm.as_ref(), msp.primary_span().as_ref())
- {
- if !primary_span.is_dummy() {
- (sm.lookup_char_pos(primary_span.lo()), sm)
- } else {
- emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
- return Ok(());
- }
- } else {
+ let primary_span = msp.primary_span().unwrap_or_default();
+ let (Some(sm), false) = (self.sm.as_ref(), primary_span.is_dummy()) else {
// If we don't have span information, emit and exit
- emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
- return Ok(());
+ return emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message);
};
+ let primary_lo = sm.lookup_char_pos(primary_span.lo());
if let Ok(pos) =
annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name))
{
@@ -1405,6 +1406,54 @@ impl EmitterWriter {
for annotated_file in annotated_files {
// we can't annotate anything if the source is unavailable.
if !sm.ensure_source_file_source_present(annotated_file.file.clone()) {
+ if !self.short_message {
+ // We'll just print an unannotated message.
+ for (annotation_id, line) in annotated_file.lines.into_iter().enumerate() {
+ let mut annotations = line.annotations.clone();
+ annotations.sort_by_key(|a| Reverse(a.start_col));
+ let mut line_idx = buffer.num_lines();
+ buffer.append(
+ line_idx,
+ &format!(
+ "{}:{}:{}",
+ sm.filename_for_diagnostics(&annotated_file.file.name),
+ sm.doctest_offset_line(&annotated_file.file.name, line.line_index),
+ annotations[0].start_col + 1,
+ ),
+ Style::LineAndColumn,
+ );
+ if annotation_id == 0 {
+ buffer.prepend(line_idx, "--> ", Style::LineNumber);
+ for _ in 0..max_line_num_len {
+ buffer.prepend(line_idx, " ", Style::NoStyle);
+ }
+ line_idx += 1;
+ };
+ for (i, annotation) in annotations.into_iter().enumerate() {
+ if let Some(label) = &annotation.label {
+ let style = if annotation.is_primary {
+ Style::LabelPrimary
+ } else {
+ Style::LabelSecondary
+ };
+ if annotation_id == 0 {
+ buffer.prepend(line_idx, " |", Style::LineNumber);
+ for _ in 0..max_line_num_len {
+ buffer.prepend(line_idx, " ", Style::NoStyle);
+ }
+ line_idx += 1;
+ buffer.append(line_idx + i, " = note: ", style);
+ for _ in 0..max_line_num_len {
+ buffer.prepend(line_idx, " ", Style::NoStyle);
+ }
+ } else {
+ buffer.append(line_idx + i, ": ", style);
+ }
+ buffer.append(line_idx + i, label, style);
+ }
+ }
+ }
+ }
continue;
}
@@ -1651,6 +1700,13 @@ impl EmitterWriter {
multilines.extend(&to_add);
}
}
+ trace!("buffer: {:#?}", buffer.render());
+ }
+
+ if let Some(tracked) = emitted_at {
+ let track = format!("-Ztrack-diagnostics: created at {tracked}");
+ let len = buffer.num_lines();
+ buffer.append(len, &track, Style::NoStyle);
}
// final step: take our styled buffer, render it, then output it
@@ -1672,7 +1728,7 @@ impl EmitterWriter {
};
// Render the replacements for each suggestion
- let suggestions = suggestion.splice_lines(&**sm);
+ let suggestions = suggestion.splice_lines(sm);
debug!("emit_suggestion_default: suggestions={:?}", suggestions);
if suggestions.is_empty() {
@@ -1773,7 +1829,7 @@ impl EmitterWriter {
buffer.puts(
row_num - 1 + line - line_start,
max_line_num_len + 3,
- &normalize_whitespace(&*file_lines.file.get_line(line - 1).unwrap()),
+ &normalize_whitespace(&file_lines.file.get_line(line - 1).unwrap()),
Style::Removal,
);
}
@@ -1915,7 +1971,7 @@ impl EmitterWriter {
buffer.putc(
row_num,
(padding as isize + p) as usize,
- if part.is_addition(&sm) { '+' } else { '~' },
+ if part.is_addition(sm) { '+' } else { '~' },
Style::Addition,
);
}
@@ -1962,12 +2018,13 @@ impl EmitterWriter {
buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
} else if notice_capitalization {
let msg = "notice the capitalization difference";
- buffer.puts(row_num, max_line_num_len + 3, &msg, Style::NoStyle);
+ buffer.puts(row_num, max_line_num_len + 3, msg, Style::NoStyle);
}
emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
Ok(())
}
+ #[instrument(level = "trace", skip(self, args, code, children, suggestions))]
fn emit_messages_default(
&mut self,
level: &Level,
@@ -1977,6 +2034,7 @@ impl EmitterWriter {
span: &MultiSpan,
children: &[SubDiagnostic],
suggestions: &[CodeSuggestion],
+ emitted_at: Option<&DiagnosticLocation>,
) {
let max_line_num_len = if self.ui_testing {
ANONYMIZED_LINE_NUM.len()
@@ -1985,7 +2043,16 @@ impl EmitterWriter {
num_decimal_digits(n)
};
- match self.emit_message_default(span, message, args, code, level, max_line_num_len, false) {
+ match self.emit_message_default(
+ span,
+ message,
+ args,
+ code,
+ level,
+ max_line_num_len,
+ false,
+ emitted_at,
+ ) {
Ok(()) => {
if !children.is_empty()
|| suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden)
@@ -2007,13 +2074,14 @@ impl EmitterWriter {
for child in children {
let span = child.render_span.as_ref().unwrap_or(&child.span);
if let Err(err) = self.emit_message_default(
- &span,
+ span,
&child.message,
args,
&None,
&child.level,
max_line_num_len,
true,
+ None,
) {
panic!("failed to emit error: {}", err);
}
@@ -2030,6 +2098,7 @@ impl EmitterWriter {
&Level::Help,
max_line_num_len,
true,
+ None,
) {
panic!("failed to emit error: {}", e);
}
@@ -2090,7 +2159,7 @@ impl EmitterWriter {
*row_num - 1,
max_line_num_len + 3,
&normalize_whitespace(
- &*file_lines.file.get_line(file_lines.lines[line_pos].line_index).unwrap(),
+ &file_lines.file.get_line(file_lines.lines[line_pos].line_index).unwrap(),
),
Style::NoStyle,
);
@@ -2186,13 +2255,16 @@ impl FileWithAnnotatedLines {
let mut multiline_annotations = vec![];
if let Some(ref sm) = emitter.source_map() {
- for span_label in msp.span_labels() {
- if span_label.span.is_dummy() {
- continue;
- }
+ for SpanLabel { span, is_primary, label } in msp.span_labels() {
+ // If we don't have a useful span, pick the primary span if that exists.
+ // Worst case we'll just print an error at the top of the main file.
+ let span = match (span.is_dummy(), msp.primary_span()) {
+ (_, None) | (false, _) => span,
+ (true, Some(span)) => span,
+ };
- let lo = sm.lookup_char_pos(span_label.span.lo());
- let mut hi = sm.lookup_char_pos(span_label.span.hi());
+ let lo = sm.lookup_char_pos(span.lo());
+ let mut hi = sm.lookup_char_pos(span.hi());
// Watch out for "empty spans". If we get a span like 6..6, we
// want to just display a `^` at 6, so convert that to
@@ -2204,6 +2276,8 @@ impl FileWithAnnotatedLines {
hi.col_display += 1;
}
+ let label = label.as_ref().map(|m| emitter.translate_message(m, args).to_string());
+
if lo.line != hi.line {
let ml = MultilineAnnotation {
depth: 1,
@@ -2211,11 +2285,8 @@ impl FileWithAnnotatedLines {
line_end: hi.line,
start_col: lo.col_display,
end_col: hi.col_display,
- is_primary: span_label.is_primary,
- label: span_label
- .label
- .as_ref()
- .map(|m| emitter.translate_message(m, args).to_string()),
+ is_primary,
+ label,
overlaps_exactly: false,
};
multiline_annotations.push((lo.file, ml));
@@ -2223,11 +2294,8 @@ impl FileWithAnnotatedLines {
let ann = Annotation {
start_col: lo.col_display,
end_col: hi.col_display,
- is_primary: span_label.is_primary,
- label: span_label
- .label
- .as_ref()
- .map(|m| emitter.translate_message(m, args).to_string()),
+ is_primary,
+ label,
annotation_type: AnnotationType::Singleline,
};
add_annotation_to_file(&mut output, lo.file, lo.line, ann);
@@ -2236,7 +2304,7 @@ impl FileWithAnnotatedLines {
}
// Find overlapping multiline annotations, put them at different depths
- multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, ml.line_end));
+ multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, usize::MAX - ml.line_end));
for (_, ann) in multiline_annotations.clone() {
for (_, a) in multiline_annotations.iter_mut() {
// Move all other multiline annotations overlapping with this one
@@ -2254,8 +2322,14 @@ impl FileWithAnnotatedLines {
}
let mut max_depth = 0; // max overlapping multiline spans
- for (file, ann) in multiline_annotations {
+ for (_, ann) in &multiline_annotations {
max_depth = max(max_depth, ann.depth);
+ }
+ // Change order of multispan depth to minimize the number of overlaps in the ASCII art.
+ for (_, a) in multiline_annotations.iter_mut() {
+ a.depth = max_depth - a.depth + 1;
+ }
+ for (file, ann) in multiline_annotations {
let mut end_ann = ann.as_end();
if !ann.overlaps_exactly {
// avoid output like
diff --git a/compiler/rustc_errors/src/json.rs b/compiler/rustc_errors/src/json.rs
index 4cc7be47f..a37073d8f 100644
--- a/compiler/rustc_errors/src/json.rs
+++ b/compiler/rustc_errors/src/json.rs
@@ -45,6 +45,7 @@ pub struct JsonEmitter {
json_rendered: HumanReadableErrorType,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
}
impl JsonEmitter {
@@ -57,6 +58,7 @@ impl JsonEmitter {
json_rendered: HumanReadableErrorType,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
) -> JsonEmitter {
JsonEmitter {
dst: Box::new(io::BufWriter::new(io::stderr())),
@@ -69,6 +71,7 @@ impl JsonEmitter {
json_rendered,
diagnostic_width,
macro_backtrace,
+ track_diagnostics,
}
}
@@ -79,6 +82,7 @@ impl JsonEmitter {
fallback_bundle: LazyFallbackBundle,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
) -> JsonEmitter {
let file_path_mapping = FilePathMapping::empty();
JsonEmitter::stderr(
@@ -90,6 +94,7 @@ impl JsonEmitter {
json_rendered,
diagnostic_width,
macro_backtrace,
+ track_diagnostics,
)
}
@@ -103,6 +108,7 @@ impl JsonEmitter {
json_rendered: HumanReadableErrorType,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
+ track_diagnostics: bool,
) -> JsonEmitter {
JsonEmitter {
dst,
@@ -115,6 +121,7 @@ impl JsonEmitter {
json_rendered,
diagnostic_width,
macro_backtrace,
+ track_diagnostics,
}
}
@@ -129,7 +136,7 @@ impl Translate for JsonEmitter {
}
fn fallback_fluent_bundle(&self) -> &FluentBundle {
- &**self.fallback_bundle
+ &self.fallback_bundle
}
}
@@ -350,6 +357,7 @@ impl Diagnostic {
false,
je.diagnostic_width,
je.macro_backtrace,
+ je.track_diagnostics,
)
.ui_testing(je.ui_testing)
.emit_diagnostic(diag);
diff --git a/compiler/rustc_errors/src/json/tests.rs b/compiler/rustc_errors/src/json/tests.rs
index d940d14e1..f13146897 100644
--- a/compiler/rustc_errors/src/json/tests.rs
+++ b/compiler/rustc_errors/src/json/tests.rs
@@ -59,6 +59,7 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
HumanReadableErrorType::Short(ColorConfig::Never),
None,
false,
+ false,
);
let span = Span::with_root_ctxt(BytePos(span.0), BytePos(span.1));
diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs
index 0963ea71f..eb0506c45 100644
--- a/compiler/rustc_errors/src/lib.rs
+++ b/compiler/rustc_errors/src/lib.rs
@@ -5,6 +5,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(drain_filter)]
#![feature(if_let_guard)]
+#![feature(is_terminal)]
#![feature(adt_const_params)]
#![feature(let_chains)]
#![feature(never_type)]
@@ -376,7 +377,7 @@ pub use diagnostic::{
DiagnosticStyledString, IntoDiagnosticArg, SubDiagnostic,
};
pub use diagnostic_builder::{DiagnosticBuilder, EmissionGuarantee, Noted};
-pub use diagnostic_impls::DiagnosticArgFromDisplay;
+pub use diagnostic_impls::{DiagnosticArgFromDisplay, DiagnosticSymbolList};
use std::backtrace::Backtrace;
/// A handler deals with errors and other compiler output.
@@ -466,6 +467,10 @@ pub enum StashKey {
/// When an invalid lifetime e.g. `'2` should be reinterpreted
/// as a char literal in the parser
LifetimeIsChar,
+ /// Maybe there was a typo where a comma was forgotten before
+ /// FRU syntax
+ MaybeFruTypo,
+ CallAssocMethod,
}
fn default_track_diagnostic(_: &Diagnostic) {}
@@ -492,6 +497,8 @@ pub struct HandlerFlags {
pub macro_backtrace: bool,
/// If true, identical diagnostics are reported only once.
pub deduplicate_diagnostics: bool,
+ /// Track where errors are created. Enabled with `-Ztrack-diagnostics`.
+ pub track_diagnostics: bool,
}
impl Drop for HandlerInner {
@@ -559,6 +566,7 @@ impl Handler {
false,
None,
flags.macro_backtrace,
+ flags.track_diagnostics,
));
Self::with_emitter_and_flags(emitter, flags)
}
@@ -641,13 +649,14 @@ impl Handler {
inner.stashed_diagnostics = Default::default();
}
- /// Stash a given diagnostic with the given `Span` and `StashKey` as the key for later stealing.
+ /// Stash a given diagnostic with the given `Span` and [`StashKey`] as the key.
+ /// Retrieve a stashed diagnostic with `steal_diagnostic`.
pub fn stash_diagnostic(&self, span: Span, key: StashKey, diag: Diagnostic) {
let mut inner = self.inner.borrow_mut();
inner.stash((span, key), diag);
}
- /// Steal a previously stashed diagnostic with the given `Span` and `StashKey` as the key.
+ /// Steal a previously stashed diagnostic with the given `Span` and [`StashKey`] as the key.
pub fn steal_diagnostic(&self, span: Span, key: StashKey) -> Option<DiagnosticBuilder<'_, ()>> {
let mut inner = self.inner.borrow_mut();
inner.steal((span, key)).map(|diag| DiagnosticBuilder::new_diagnostic(self, diag))
@@ -664,6 +673,7 @@ impl Handler {
/// Construct a builder with the `msg` at the level appropriate for the specific `EmissionGuarantee`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_diagnostic<G: EmissionGuarantee>(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -677,6 +687,7 @@ impl Handler {
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_warn(
&self,
span: impl Into<MultiSpan>,
@@ -693,6 +704,7 @@ impl Handler {
/// Attempting to `.emit()` the builder will only emit if either:
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
+ #[track_caller]
pub fn struct_span_warn_with_expectation(
&self,
span: impl Into<MultiSpan>,
@@ -706,6 +718,7 @@ impl Handler {
/// Construct a builder at the `Allow` level at the given `span` and with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_allow(
&self,
span: impl Into<MultiSpan>,
@@ -719,6 +732,7 @@ impl Handler {
/// Construct a builder at the `Warning` level at the given `span` and with the `msg`.
/// Also include a code.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_warn_with_code(
&self,
span: impl Into<MultiSpan>,
@@ -736,6 +750,7 @@ impl Handler {
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_warn(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
DiagnosticBuilder::new(self, Level::Warning(None), msg)
}
@@ -746,6 +761,7 @@ impl Handler {
/// Attempting to `.emit()` the builder will only emit if either:
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
+ #[track_caller]
pub fn struct_warn_with_expectation(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -756,12 +772,14 @@ impl Handler {
/// Construct a builder at the `Allow` level with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_allow(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
DiagnosticBuilder::new(self, Level::Allow, msg)
}
/// Construct a builder at the `Expect` level with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_expect(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -772,6 +790,7 @@ impl Handler {
/// Construct a builder at the `Error` level at the given `span` and with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_err(
&self,
span: impl Into<MultiSpan>,
@@ -784,6 +803,7 @@ impl Handler {
/// Construct a builder at the `Error` level at the given `span`, with the `msg`, and `code`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_err_with_code(
&self,
span: impl Into<MultiSpan>,
@@ -798,6 +818,7 @@ impl Handler {
/// Construct a builder at the `Error` level with the `msg`.
// FIXME: This method should be removed (every error should have an associated error code).
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_err(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -807,12 +828,14 @@ impl Handler {
/// This should only be used by `rustc_middle::lint::struct_lint_level`. Do not use it for hard errors.
#[doc(hidden)]
+ #[track_caller]
pub fn struct_err_lint(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
DiagnosticBuilder::new(self, Level::Error { lint: true }, msg)
}
/// Construct a builder at the `Error` level with the `msg` and the `code`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_err_with_code(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -825,6 +848,7 @@ impl Handler {
/// Construct a builder at the `Warn` level with the `msg` and the `code`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_warn_with_code(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -837,6 +861,7 @@ impl Handler {
/// Construct a builder at the `Fatal` level at the given `span` and with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_fatal(
&self,
span: impl Into<MultiSpan>,
@@ -849,6 +874,7 @@ impl Handler {
/// Construct a builder at the `Fatal` level at the given `span`, with the `msg`, and `code`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_fatal_with_code(
&self,
span: impl Into<MultiSpan>,
@@ -862,6 +888,7 @@ impl Handler {
/// Construct a builder at the `Error` level with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_fatal(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, !> {
DiagnosticBuilder::new_fatal(self, msg)
}
@@ -874,6 +901,7 @@ impl Handler {
/// Construct a builder at the `Note` level with the `msg`.
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_note_without_error(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -882,12 +910,14 @@ impl Handler {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_fatal(&self, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) -> ! {
self.emit_diag_at_span(Diagnostic::new(Fatal, msg), span);
FatalError.raise()
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_fatal_with_code(
&self,
span: impl Into<MultiSpan>,
@@ -899,6 +929,7 @@ impl Handler {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_err(
&self,
span: impl Into<MultiSpan>,
@@ -908,6 +939,7 @@ impl Handler {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_err_with_code(
&self,
span: impl Into<MultiSpan>,
@@ -921,11 +953,13 @@ impl Handler {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_warn(&self, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) {
self.emit_diag_at_span(Diagnostic::new(Warning(None), msg), span);
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_warn_with_code(
&self,
span: impl Into<MultiSpan>,
@@ -954,10 +988,12 @@ impl Handler {
self.inner.borrow_mut().delay_good_path_bug(msg)
}
+ #[track_caller]
pub fn span_bug_no_panic(&self, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) {
self.emit_diag_at_span(Diagnostic::new(Bug, msg), span);
}
+ #[track_caller]
pub fn span_note_without_error(
&self,
span: impl Into<MultiSpan>,
@@ -966,6 +1002,7 @@ impl Handler {
self.emit_diag_at_span(Diagnostic::new(Note, msg), span);
}
+ #[track_caller]
pub fn span_note_diag(
&self,
span: Span,
@@ -1008,13 +1045,24 @@ impl Handler {
}
pub fn has_errors_or_lint_errors(&self) -> Option<ErrorGuaranteed> {
if self.inner.borrow().has_errors_or_lint_errors() {
- Some(ErrorGuaranteed(()))
+ Some(ErrorGuaranteed::unchecked_claim_error_was_emitted())
} else {
None
}
}
- pub fn has_errors_or_delayed_span_bugs(&self) -> bool {
- self.inner.borrow().has_errors_or_delayed_span_bugs()
+ pub fn has_errors_or_delayed_span_bugs(&self) -> Option<ErrorGuaranteed> {
+ if self.inner.borrow().has_errors_or_delayed_span_bugs() {
+ Some(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ } else {
+ None
+ }
+ }
+ pub fn is_compilation_going_to_fail(&self) -> Option<ErrorGuaranteed> {
+ if self.inner.borrow().is_compilation_going_to_fail() {
+ Some(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ } else {
+ None
+ }
}
pub fn print_error_count(&self, registry: &Registry) {
@@ -1223,6 +1271,10 @@ impl HandlerInner {
}
if diagnostic.has_future_breakage() {
+ // Future breakages aren't emitted if they're Level::Allowed,
+ // but they still need to be constructed and stashed below,
+ // so they'll trigger the good-path bug check.
+ self.suppressed_expected_diag = true;
self.future_breakage_diagnostics.push(diagnostic.clone());
}
@@ -1277,7 +1329,7 @@ impl HandlerInner {
diagnostic.children.drain_filter(already_emitted_sub).for_each(|_| {});
- self.emitter.emit_diagnostic(&diagnostic);
+ self.emitter.emit_diagnostic(diagnostic);
if diagnostic.is_error() {
self.deduplicated_err_count += 1;
} else if let Warning(_) = diagnostic.level {
@@ -1444,6 +1496,10 @@ impl HandlerInner {
self.err_count() > 0 || self.lint_err_count > 0 || self.warn_count > 0
}
+ fn is_compilation_going_to_fail(&self) -> bool {
+ self.has_errors() || self.lint_err_count > 0 || !self.delayed_span_bugs.is_empty()
+ }
+
fn abort_if_errors(&mut self) {
self.emit_stashed_diagnostics();
@@ -1452,6 +1508,7 @@ impl HandlerInner {
}
}
+ #[track_caller]
fn span_bug(&mut self, sp: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) -> ! {
self.emit_diag_at_span(Diagnostic::new(Bug, msg), sp);
panic::panic_any(ExplicitBug);
diff --git a/compiler/rustc_errors/src/translation.rs b/compiler/rustc_errors/src/translation.rs
index a7737b467..afd660ff1 100644
--- a/compiler/rustc_errors/src/translation.rs
+++ b/compiler/rustc_errors/src/translation.rs
@@ -1,7 +1,10 @@
use crate::snippet::Style;
use crate::{DiagnosticArg, DiagnosticMessage, FluentBundle};
use rustc_data_structures::sync::Lrc;
-use rustc_error_messages::FluentArgs;
+use rustc_error_messages::{
+ fluent_bundle::resolver::errors::{ReferenceKind, ResolverError},
+ FluentArgs, FluentError,
+};
use std::borrow::Cow;
/// Convert diagnostic arguments (a rustc internal type that exists to implement
@@ -56,13 +59,13 @@ pub trait Translate {
trace!(?message, ?args);
let (identifier, attr) = match message {
DiagnosticMessage::Str(msg) | DiagnosticMessage::Eager(msg) => {
- return Cow::Borrowed(&msg);
+ return Cow::Borrowed(msg);
}
DiagnosticMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
};
let translate_with_bundle = |bundle: &'a FluentBundle| -> Option<(Cow<'_, str>, Vec<_>)> {
- let message = bundle.get_message(&identifier)?;
+ let message = bundle.get_message(identifier)?;
let value = match attr {
Some(attr) => message.get_attribute(attr)?.value(),
None => message.value()?,
@@ -70,7 +73,7 @@ pub trait Translate {
debug!(?message, ?value);
let mut errs = vec![];
- let translated = bundle.format_pattern(value, Some(&args), &mut errs);
+ let translated = bundle.format_pattern(value, Some(args), &mut errs);
debug!(?translated, ?errs);
Some((translated, errs))
};
@@ -102,14 +105,31 @@ pub trait Translate {
.or_else(|| translate_with_bundle(self.fallback_fluent_bundle()))
.map(|(translated, errs)| {
// Always bail out for errors with the fallback bundle.
- assert!(
- errs.is_empty(),
- "identifier: {:?}, attr: {:?}, args: {:?}, errors: {:?}",
- identifier,
- attr,
- args,
- errs
- );
+
+ let mut help_messages = vec![];
+
+ if !errs.is_empty() {
+ for error in &errs {
+ match error {
+ FluentError::ResolverError(ResolverError::Reference(
+ ReferenceKind::Message { id, .. },
+ )) if args.iter().any(|(arg_id, _)| arg_id == id) => {
+ help_messages.push(format!("Argument `{id}` exists but was not referenced correctly. Try using `{{${id}}}` instead"));
+ }
+ _ => {}
+ }
+ }
+
+ panic!(
+ "Encountered errors while formatting message for `{identifier}`\n\
+ help: {}\n\
+ attr: `{attr:?}`\n\
+ args: `{args:?}`\n\
+ errors: `{errs:?}`",
+ help_messages.join("\nhelp: ")
+ );
+ }
+
translated
})
.expect("failed to find message in primary or fallback fluent bundles")
diff --git a/compiler/rustc_expand/Cargo.toml b/compiler/rustc_expand/Cargo.toml
index 4ee7b6c42..192f54171 100644
--- a/compiler/rustc_expand/Cargo.toml
+++ b/compiler/rustc_expand/Cargo.toml
@@ -8,20 +8,21 @@ build = false
doctest = false
[dependencies]
-rustc_serialize = { path = "../rustc_serialize" }
-tracing = "0.1"
-rustc_span = { path = "../rustc_span" }
-rustc_ast_pretty = { path = "../rustc_ast_pretty" }
+crossbeam-channel = "0.5.0"
rustc_ast_passes = { path = "../rustc_ast_passes" }
+rustc_ast = { path = "../rustc_ast" }
+rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_attr = { path = "../rustc_attr" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_feature = { path = "../rustc_feature" }
+rustc_lexer = { path = "../rustc_lexer" }
rustc_lint_defs = { path = "../rustc_lint_defs" }
rustc_macros = { path = "../rustc_macros" }
-rustc_lexer = { path = "../rustc_lexer" }
rustc_parse = { path = "../rustc_parse" }
+rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
+rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-rustc_ast = { path = "../rustc_ast" }
-crossbeam-channel = "0.5.0"
+thin-vec = "0.2.8"
+tracing = "0.1"
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs
index c8de60ccb..9d6a4f9a1 100644
--- a/compiler/rustc_expand/src/base.rs
+++ b/compiler/rustc_expand/src/base.rs
@@ -16,6 +16,7 @@ use rustc_errors::{
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics};
use rustc_parse::{self, parser, MACRO_ARGUMENTS};
+use rustc_session::errors::report_lit_error;
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
use rustc_span::edition::Edition;
@@ -242,14 +243,15 @@ pub enum ExpandResult<T, U> {
Retry(U),
}
-// `meta_item` is the attribute, and `item` is the item being modified.
pub trait MultiItemModifier {
+ /// `meta_item` is the attribute, and `item` is the item being modified.
fn expand(
&self,
ecx: &mut ExtCtxt<'_>,
span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
+ is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable>;
}
@@ -263,6 +265,7 @@ where
span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
+ _is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
ExpandResult::Ready(self(ecx, span, meta_item, item))
}
@@ -505,7 +508,7 @@ impl MacResult for MacEager {
return Some(p);
}
if let Some(e) = self.expr {
- if let ast::ExprKind::Lit(_) = e.kind {
+ if matches!(e.kind, ast::ExprKind::Lit(_) | ast::ExprKind::IncludedBytes(_)) {
return Some(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
span: e.span,
@@ -674,8 +677,13 @@ pub enum SyntaxExtensionKind {
/// A token-based derive macro.
Derive(
- /// An expander with signature TokenStream -> TokenStream (not yet).
+ /// An expander with signature TokenStream -> TokenStream.
/// The produced TokenSteam is appended to the input TokenSteam.
+ ///
+ /// FIXME: The text above describes how this should work. Currently it
+ /// is handled identically to `LegacyDerive`. It should be migrated to
+ /// a token-based representation like `Bang` and `Attr`, instead of
+ /// using `MultiItemModifier`.
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
),
@@ -873,7 +881,7 @@ impl SyntaxExtension {
/// Error type that denotes indeterminacy.
pub struct Indeterminate;
-pub type DeriveResolutions = Vec<(ast::Path, Annotatable, Option<Lrc<SyntaxExtension>>)>;
+pub type DeriveResolutions = Vec<(ast::Path, Annotatable, Option<Lrc<SyntaxExtension>>, bool)>;
pub trait ResolverExpand {
fn next_node_id(&mut self) -> NodeId;
@@ -952,7 +960,7 @@ pub trait LintStoreExpand {
node_id: NodeId,
attrs: &[Attribute],
items: &[P<Item>],
- name: &str,
+ name: Symbol,
);
}
@@ -1224,10 +1232,10 @@ pub fn expr_to_spanned_string<'a>(
let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
Err(match expr.kind {
- ast::ExprKind::Lit(ref l) => match l.kind {
- ast::LitKind::Str(s, style) => return Ok((s, style, expr.span)),
- ast::LitKind::ByteStr(_) => {
- let mut err = cx.struct_span_err(l.span, err_msg);
+ ast::ExprKind::Lit(token_lit) => match ast::LitKind::from_token_lit(token_lit) {
+ Ok(ast::LitKind::Str(s, style)) => return Ok((s, style, expr.span)),
+ Ok(ast::LitKind::ByteStr(_)) => {
+ let mut err = cx.struct_span_err(expr.span, err_msg);
let span = expr.span.shrink_to_lo();
err.span_suggestion(
span.with_hi(span.lo() + BytePos(1)),
@@ -1237,8 +1245,12 @@ pub fn expr_to_spanned_string<'a>(
);
Some((err, true))
}
- ast::LitKind::Err => None,
- _ => Some((cx.struct_span_err(l.span, err_msg), false)),
+ Ok(ast::LitKind::Err) => None,
+ Err(err) => {
+ report_lit_error(&cx.sess.parse_sess, err, token_lit, expr.span);
+ None
+ }
+ _ => Some((cx.struct_span_err(expr.span, err_msg), false)),
},
ast::ExprKind::Err => None,
_ => Some((cx.struct_span_err(expr.span, err_msg), false)),
@@ -1433,7 +1445,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool {
let crate_matches = if c.starts_with("allsorts-rental") {
true
} else {
- let mut version = c.trim_start_matches("rental-").split(".");
+ let mut version = c.trim_start_matches("rental-").split('.');
version.next() == Some("0")
&& version.next() == Some("5")
&& version
diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs
index 0952e65cf..4812bdd9d 100644
--- a/compiler/rustc_expand/src/build.rs
+++ b/compiler/rustc_expand/src/build.rs
@@ -1,13 +1,12 @@
use crate::base::ExtCtxt;
-
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, AttrVec, BlockCheckMode, Expr, LocalKind, PatKind, UnOp};
use rustc_data_structures::sync::Lrc;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
-
use rustc_span::Span;
+use thin_vec::ThinVec;
impl<'a> ExtCtxt<'a> {
pub fn path(&self, span: Span, strs: Vec<Ident>) -> ast::Path {
@@ -28,7 +27,7 @@ impl<'a> ExtCtxt<'a> {
) -> ast::Path {
assert!(!idents.is_empty());
let add_root = global && !idents[0].is_path_segment_keyword();
- let mut segments = Vec::with_capacity(idents.len() + add_root as usize);
+ let mut segments = ThinVec::with_capacity(idents.len() + add_root as usize);
if add_root {
segments.push(ast::PathSegment::path_root(span));
}
@@ -194,7 +193,7 @@ impl<'a> ExtCtxt<'a> {
self.stmt_local(local, sp)
}
- // Generates `let _: Type;`, which is usually used for type assertions.
+ /// Generates `let _: Type;`, which is usually used for type assertions.
pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
let local = P(ast::Local {
pat: self.pat_wild(span),
@@ -334,8 +333,8 @@ impl<'a> ExtCtxt<'a> {
}
fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
- let lit = ast::Lit::from_lit_kind(lit_kind, span);
- self.expr(span, ast::ExprKind::Lit(lit))
+ let token_lit = lit_kind.to_token_lit();
+ self.expr(span, ast::ExprKind::Lit(token_lit))
}
pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
@@ -532,15 +531,18 @@ impl<'a> ExtCtxt<'a> {
// here, but that's not entirely clear.
self.expr(
span,
- ast::ExprKind::Closure(
- ast::ClosureBinder::NotPresent,
- ast::CaptureBy::Ref,
- ast::Async::No,
- ast::Movability::Movable,
+ ast::ExprKind::Closure(Box::new(ast::Closure {
+ binder: ast::ClosureBinder::NotPresent,
+ capture_clause: ast::CaptureBy::Ref,
+ asyncness: ast::Async::No,
+ movability: ast::Movability::Movable,
fn_decl,
body,
- span,
- ),
+ fn_decl_span: span,
+ // FIXME(SarthakSingh31): This points to the start of the declaration block and
+ // not the span of the argument block.
+ fn_arg_span: span,
+ })),
)
}
@@ -580,8 +582,6 @@ impl<'a> ExtCtxt<'a> {
attrs: ast::AttrVec,
kind: ast::ItemKind,
) -> P<ast::Item> {
- // FIXME: Would be nice if our generated code didn't violate
- // Rust coding conventions
P(ast::Item {
ident: name,
attrs,
@@ -619,11 +619,23 @@ impl<'a> ExtCtxt<'a> {
self.item(span, name, AttrVec::new(), ast::ItemKind::Const(def, ty, Some(expr)))
}
- pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute {
- attr::mk_attr_outer(&self.sess.parse_sess.attr_id_generator, mi)
+ // Builds `#[name]`.
+ pub fn attr_word(&self, name: Symbol, span: Span) -> ast::Attribute {
+ let g = &self.sess.parse_sess.attr_id_generator;
+ attr::mk_attr_word(g, ast::AttrStyle::Outer, name, span)
+ }
+
+ // Builds `#[name = val]`.
+ //
+ // Note: `span` is used for both the identifer and the value.
+ pub fn attr_name_value_str(&self, name: Symbol, val: Symbol, span: Span) -> ast::Attribute {
+ let g = &self.sess.parse_sess.attr_id_generator;
+ attr::mk_attr_name_value_str(g, ast::AttrStyle::Outer, name, val, span)
}
- pub fn meta_word(&self, sp: Span, w: Symbol) -> ast::MetaItem {
- attr::mk_word_item(Ident::new(w, sp))
+ // Builds `#[outer(inner)]`.
+ pub fn attr_nested_word(&self, outer: Symbol, inner: Symbol, span: Span) -> ast::Attribute {
+ let g = &self.sess.parse_sess.attr_id_generator;
+ attr::mk_attr_nested_word(g, ast::AttrStyle::Outer, outer, inner, span)
}
}
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 1d2b1298a..2510795c2 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -200,7 +200,7 @@ fn get_features(
features
}
-// `cfg_attr`-process the crate's attributes and compute the crate's features.
+/// `cfg_attr`-process the crate's attributes and compute the crate's features.
pub fn features(
sess: &Session,
mut krate: ast::Crate,
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 57713fb3c..1014ec220 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -1,7 +1,7 @@
use crate::base::*;
use crate::config::StripUnconfigured;
use crate::hygiene::SyntaxContext;
-use crate::mbe::macro_rules::annotate_err_with_kind;
+use crate::mbe::diagnostics::annotate_err_with_kind;
use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod};
use crate::placeholders::{placeholder, PlaceholderExpander};
@@ -11,9 +11,9 @@ use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::visit::{self, AssocCtxt, Visitor};
-use rustc_ast::{AssocItemKind, AstNodeWrapper, AttrStyle, AttrVec, ExprKind, ForeignItemKind};
-use rustc_ast::{HasAttrs, HasNodeId};
-use rustc_ast::{Inline, ItemKind, MacArgs, MacStmtStyle, MetaItemKind, ModKind};
+use rustc_ast::{AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind};
+use rustc_ast::{ForeignItemKind, HasAttrs, HasNodeId};
+use rustc_ast::{Inline, ItemKind, MacStmtStyle, MetaItemKind, ModKind};
use rustc_ast::{NestedMetaItem, NodeId, PatKind, StmtKind, TyKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::map_in_place::MapInPlace;
@@ -337,6 +337,7 @@ pub enum InvocationKind {
},
Derive {
path: ast::Path,
+ is_const: bool,
item: Annotatable,
},
}
@@ -400,7 +401,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
krate
}
- // Recursively expand all macro invocations in this AST fragment.
+ /// Recursively expand all macro invocations in this AST fragment.
pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment {
let orig_expansion_data = self.cx.current_expansion.clone();
let orig_force_mode = self.cx.force_mode;
@@ -478,13 +479,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
derive_invocations.reserve(derives.len());
derives
.into_iter()
- .map(|(path, item, _exts)| {
+ .map(|(path, item, _exts, is_const)| {
// FIXME: Consider using the derive resolutions (`_exts`)
// instead of enqueuing the derives to be resolved again later.
let expn_id = LocalExpnId::fresh_empty();
derive_invocations.push((
Invocation {
- kind: InvocationKind::Derive { path, item },
+ kind: InvocationKind::Derive { path, item, is_const },
fragment_kind,
expansion_data: ExpansionData {
id: expn_id,
@@ -653,7 +654,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
ExpandResult::Ready(match invoc.kind {
InvocationKind::Bang { mac, .. } => match ext {
SyntaxExtensionKind::Bang(expander) => {
- let Ok(tok_result) = expander.expand(self.cx, span, mac.args.inner_tokens()) else {
+ let Ok(tok_result) = expander.expand(self.cx, span, mac.args.tokens.clone()) else {
return ExpandResult::Ready(fragment_kind.dummy(span));
};
self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
@@ -661,7 +662,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
SyntaxExtensionKind::LegacyBang(expander) => {
let prev = self.cx.current_expansion.prior_type_ascription;
self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription;
- let tok_result = expander.expand(self.cx, span, mac.args.inner_tokens());
+ let tok_result = expander.expand(self.cx, span, mac.args.tokens.clone());
let result = if let Some(result) = fragment_kind.make_from(tok_result) {
result
} else {
@@ -705,7 +706,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
_ => item.to_tokens(),
};
let attr_item = attr.unwrap_normal_item();
- if let MacArgs::Eq(..) = attr_item.args {
+ if let AttrArgs::Eq(..) = attr_item.args {
self.cx.span_err(span, "key-value macro attributes are not supported");
}
let inner_tokens = attr_item.args.inner_tokens();
@@ -717,7 +718,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
SyntaxExtensionKind::LegacyAttr(expander) => {
match validate_attr::parse_meta(&self.cx.sess.parse_sess, &attr) {
Ok(meta) => {
- let items = match expander.expand(self.cx, span, &meta, item) {
+ let items = match expander.expand(self.cx, span, &meta, item, false) {
ExpandResult::Ready(items) => items,
ExpandResult::Retry(item) => {
// Reassemble the original invocation for retrying.
@@ -749,19 +750,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
_ => unreachable!(),
},
- InvocationKind::Derive { path, item } => match ext {
+ InvocationKind::Derive { path, item, is_const } => match ext {
SyntaxExtensionKind::Derive(expander)
| SyntaxExtensionKind::LegacyDerive(expander) => {
if let SyntaxExtensionKind::Derive(..) = ext {
self.gate_proc_macro_input(&item);
}
let meta = ast::MetaItem { kind: MetaItemKind::Word, span, path };
- let items = match expander.expand(self.cx, span, &meta, item) {
+ let items = match expander.expand(self.cx, span, &meta, item, is_const) {
ExpandResult::Ready(items) => items,
ExpandResult::Retry(item) => {
// Reassemble the original invocation for retrying.
return ExpandResult::Retry(Invocation {
- kind: InvocationKind::Derive { path: meta.path, item },
+ kind: InvocationKind::Derive { path: meta.path, item, is_const },
..invoc
});
}
@@ -1121,7 +1122,7 @@ impl InvocationCollectorNode for P<ast::Item> {
ecx.current_expansion.lint_node_id,
&attrs,
&items,
- ident.name.as_str(),
+ ident.name,
);
}
@@ -1643,7 +1644,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
let mut span: Option<Span> = None;
while let Some(attr) = attrs.next() {
rustc_ast_passes::feature_gate::check_attribute(attr, self.cx.sess, features);
- validate_attr::check_meta(&self.cx.sess.parse_sess, attr);
+ validate_attr::check_attr(&self.cx.sess.parse_sess, attr);
let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span };
span = Some(current_span);
@@ -1930,9 +1931,12 @@ pub struct ExpansionConfig<'feat> {
pub features: Option<&'feat Features>,
pub recursion_limit: Limit,
pub trace_mac: bool,
- pub should_test: bool, // If false, strip `#[test]` nodes
- pub span_debug: bool, // If true, use verbose debugging for `proc_macro::Span`
- pub proc_macro_backtrace: bool, // If true, show backtraces for proc-macro panics
+ /// If false, strip `#[test]` nodes
+ pub should_test: bool,
+ /// If true, use verbose debugging for `proc_macro::Span`
+ pub span_debug: bool,
+ /// If true, show backtraces for proc-macro panics
+ pub proc_macro_backtrace: bool,
}
impl<'feat> ExpansionConfig<'feat> {
diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs
index f42576b16..a43b2a001 100644
--- a/compiler/rustc_expand/src/mbe.rs
+++ b/compiler/rustc_expand/src/mbe.rs
@@ -3,6 +3,7 @@
//! why we call this module `mbe`. For external documentation, prefer the
//! official terminology: "declarative macros".
+pub(crate) mod diagnostics;
pub(crate) mod macro_check;
pub(crate) mod macro_parser;
pub(crate) mod macro_rules;
@@ -52,7 +53,7 @@ impl KleeneToken {
/// A Kleene-style [repetition operator](https://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy)]
-enum KleeneOp {
+pub(crate) enum KleeneOp {
/// Kleene star (`*`) for zero or more repetitions
ZeroOrMore,
/// Kleene plus (`+`) for one or more repetitions
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
new file mode 100644
index 000000000..197f05691
--- /dev/null
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -0,0 +1,257 @@
+use std::borrow::Cow;
+
+use crate::base::{DummyResult, ExtCtxt, MacResult};
+use crate::expand::{parse_ast_fragment, AstFragmentKind};
+use crate::mbe::{
+ macro_parser::{MatcherLoc, NamedParseResult, ParseResult::*, TtParser},
+ macro_rules::{try_match_macro, Tracker},
+};
+use rustc_ast::token::{self, Token};
+use rustc_ast::tokenstream::TokenStream;
+use rustc_ast_pretty::pprust;
+use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage};
+use rustc_parse::parser::{Parser, Recovery};
+use rustc_span::source_map::SourceMap;
+use rustc_span::symbol::Ident;
+use rustc_span::Span;
+
+use super::macro_rules::{parser_from_cx, NoopTracker};
+
+pub(super) fn failed_to_match_macro<'cx>(
+ cx: &'cx mut ExtCtxt<'_>,
+ sp: Span,
+ def_span: Span,
+ name: Ident,
+ arg: TokenStream,
+ lhses: &[Vec<MatcherLoc>],
+) -> Box<dyn MacResult + 'cx> {
+ let sess = &cx.sess.parse_sess;
+
+ // An error occurred, try the expansion again, tracking the expansion closely for better diagnostics.
+ let mut tracker = CollectTrackerAndEmitter::new(cx, sp);
+
+ let try_success_result = try_match_macro(sess, name, &arg, lhses, &mut tracker);
+
+ if try_success_result.is_ok() {
+ // Nonterminal parser recovery might turn failed matches into successful ones,
+ // but for that it must have emitted an error already
+ tracker.cx.sess.delay_span_bug(sp, "Macro matching returned a success on the second try");
+ }
+
+ if let Some(result) = tracker.result {
+ // An irrecoverable error occurred and has been emitted.
+ return result;
+ }
+
+ let Some((token, label, remaining_matcher)) = tracker.best_failure else {
+ return DummyResult::any(sp);
+ };
+
+ let span = token.span.substitute_dummy(sp);
+
+ let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
+ err.span_label(span, label);
+ if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) {
+ err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro");
+ }
+
+ annotate_doc_comment(&mut err, sess.source_map(), span);
+
+ if let Some(span) = remaining_matcher.span() {
+ err.span_note(span, format!("while trying to match {remaining_matcher}"));
+ } else {
+ err.note(format!("while trying to match {remaining_matcher}"));
+ }
+
+ // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
+ if let Some((arg, comma_span)) = arg.add_comma() {
+ for lhs in lhses {
+ let parser = parser_from_cx(sess, arg.clone(), Recovery::Allowed);
+ let mut tt_parser = TtParser::new(name);
+
+ if let Success(_) =
+ tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, &mut NoopTracker)
+ {
+ if comma_span.is_dummy() {
+ err.note("you might be missing a comma");
+ } else {
+ err.span_suggestion_short(
+ comma_span,
+ "missing comma here",
+ ", ",
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ }
+ }
+ err.emit();
+ cx.trace_macros_diag();
+ DummyResult::any(sp)
+}
+
+/// The tracker used for the slow error path that collects useful info for diagnostics.
+struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
+ cx: &'a mut ExtCtxt<'cx>,
+ remaining_matcher: Option<&'matcher MatcherLoc>,
+ /// Which arm's failure should we report? (the one furthest along)
+ best_failure: Option<(Token, &'static str, MatcherLoc)>,
+ root_span: Span,
+ result: Option<Box<dyn MacResult + 'cx>>,
+}
+
+impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
+ fn before_match_loc(&mut self, parser: &TtParser, matcher: &'matcher MatcherLoc) {
+ if self.remaining_matcher.is_none()
+ || (parser.has_no_remaining_items_for_step() && *matcher != MatcherLoc::Eof)
+ {
+ self.remaining_matcher = Some(matcher);
+ }
+ }
+
+ fn after_arm(&mut self, result: &NamedParseResult) {
+ match result {
+ Success(_) => {
+ // Nonterminal parser recovery might turn failed matches into successful ones,
+ // but for that it must have emitted an error already
+ self.cx.sess.delay_span_bug(
+ self.root_span,
+ "should not collect detailed info for successful macro match",
+ );
+ }
+ Failure(token, msg) => match self.best_failure {
+ Some((ref best_token, _, _)) if best_token.span.lo() >= token.span.lo() => {}
+ _ => {
+ self.best_failure = Some((
+ token.clone(),
+ msg,
+ self.remaining_matcher
+ .expect("must have collected matcher already")
+ .clone(),
+ ))
+ }
+ },
+ Error(err_sp, msg) => {
+ let span = err_sp.substitute_dummy(self.root_span);
+ self.cx.struct_span_err(span, msg).emit();
+ self.result = Some(DummyResult::any(span));
+ }
+ ErrorReported(_) => self.result = Some(DummyResult::any(self.root_span)),
+ }
+ }
+
+ fn description() -> &'static str {
+ "detailed"
+ }
+
+ fn recovery() -> Recovery {
+ Recovery::Allowed
+ }
+}
+
+impl<'a, 'cx> CollectTrackerAndEmitter<'a, 'cx, '_> {
+ fn new(cx: &'a mut ExtCtxt<'cx>, root_span: Span) -> Self {
+ Self { cx, remaining_matcher: None, best_failure: None, root_span, result: None }
+ }
+}
+
+pub(super) fn emit_frag_parse_err(
+ mut e: DiagnosticBuilder<'_, rustc_errors::ErrorGuaranteed>,
+ parser: &Parser<'_>,
+ orig_parser: &mut Parser<'_>,
+ site_span: Span,
+ arm_span: Span,
+ kind: AstFragmentKind,
+) {
+ // FIXME(davidtwco): avoid depending on the error message text
+ if parser.token == token::Eof
+ && let DiagnosticMessage::Str(message) = &e.message[0].0
+ && message.ends_with(", found `<eof>`")
+ {
+ let msg = &e.message[0];
+ e.message[0] = (
+ DiagnosticMessage::Str(format!(
+ "macro expansion ends with an incomplete expression: {}",
+ message.replace(", found `<eof>`", ""),
+ )),
+ msg.1,
+ );
+ if !e.span.is_dummy() {
+ // early end of macro arm (#52866)
+ e.replace_span_with(parser.token.span.shrink_to_hi());
+ }
+ }
+ if e.span.is_dummy() {
+ // Get around lack of span in error (#30128)
+ e.replace_span_with(site_span);
+ if !parser.sess.source_map().is_imported(arm_span) {
+ e.span_label(arm_span, "in this macro arm");
+ }
+ } else if parser.sess.source_map().is_imported(parser.token.span) {
+ e.span_label(site_span, "in this macro invocation");
+ }
+ match kind {
+ // Try a statement if an expression is wanted but failed and suggest adding `;` to call.
+ AstFragmentKind::Expr => match parse_ast_fragment(orig_parser, AstFragmentKind::Stmts) {
+ Err(err) => err.cancel(),
+ Ok(_) => {
+ e.note(
+ "the macro call doesn't expand to an expression, but it can expand to a statement",
+ );
+ e.span_suggestion_verbose(
+ site_span.shrink_to_hi(),
+ "add `;` to interpret the expansion as a statement",
+ ";",
+ Applicability::MaybeIncorrect,
+ );
+ }
+ },
+ _ => annotate_err_with_kind(&mut e, kind, site_span),
+ };
+ e.emit();
+}
+
+pub(crate) fn annotate_err_with_kind(err: &mut Diagnostic, kind: AstFragmentKind, span: Span) {
+ match kind {
+ AstFragmentKind::Ty => {
+ err.span_label(span, "this macro call doesn't expand to a type");
+ }
+ AstFragmentKind::Pat => {
+ err.span_label(span, "this macro call doesn't expand to a pattern");
+ }
+ _ => {}
+ };
+}
+
+#[derive(Subdiagnostic)]
+enum ExplainDocComment {
+ #[label(expand_explain_doc_comment_inner)]
+ Inner {
+ #[primary_span]
+ span: Span,
+ },
+ #[label(expand_explain_doc_comment_outer)]
+ Outer {
+ #[primary_span]
+ span: Span,
+ },
+}
+
+pub(super) fn annotate_doc_comment(err: &mut Diagnostic, sm: &SourceMap, span: Span) {
+ if let Ok(src) = sm.span_to_snippet(span) {
+ if src.starts_with("///") || src.starts_with("/**") {
+ err.subdiagnostic(ExplainDocComment::Outer { span });
+ } else if src.starts_with("//!") || src.starts_with("/*!") {
+ err.subdiagnostic(ExplainDocComment::Inner { span });
+ }
+ }
+}
+
+/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
+/// other tokens, this is "unexpected token...".
+pub(super) fn parse_failure_msg(tok: &Token) -> String {
+ match tok.kind {
+ token::Eof => "unexpected end of macro invocation".to_string(),
+ _ => format!("no rules expected the token `{}`", pprust::token_to_string(tok),),
+ }
+}
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index c8bdc3931..d161868ed 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -73,19 +73,21 @@
pub(crate) use NamedMatch::*;
pub(crate) use ParseResult::*;
-use crate::mbe::{KleeneOp, TokenTree};
+use crate::mbe::{macro_rules::Tracker, KleeneOp, TokenTree};
use rustc_ast::token::{self, DocComment, Nonterminal, NonterminalKind, Token};
+use rustc_ast_pretty::pprust;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Lrc;
+use rustc_errors::ErrorGuaranteed;
use rustc_lint_defs::pluralize;
use rustc_parse::parser::{NtOrTt, Parser};
+use rustc_span::symbol::Ident;
use rustc_span::symbol::MacroRulesNormalizedIdent;
use rustc_span::Span;
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::Lrc;
-use rustc_span::symbol::Ident;
use std::borrow::Cow;
use std::collections::hash_map::Entry::{Occupied, Vacant};
+use std::fmt::Display;
/// A unit within a matcher that a `MatcherPos` can refer to. Similar to (and derived from)
/// `mbe::TokenTree`, but designed specifically for fast and easy traversal during matching.
@@ -96,7 +98,8 @@ use std::collections::hash_map::Entry::{Occupied, Vacant};
///
/// This means a matcher can be represented by `&[MatcherLoc]`, and traversal mostly involves
/// simply incrementing the current matcher position index by one.
-pub(super) enum MatcherLoc {
+#[derive(Debug, PartialEq, Clone)]
+pub(crate) enum MatcherLoc {
Token {
token: Token,
},
@@ -128,6 +131,46 @@ pub(super) enum MatcherLoc {
Eof,
}
+impl MatcherLoc {
+ pub(super) fn span(&self) -> Option<Span> {
+ match self {
+ MatcherLoc::Token { token } => Some(token.span),
+ MatcherLoc::Delimited => None,
+ MatcherLoc::Sequence { .. } => None,
+ MatcherLoc::SequenceKleeneOpNoSep { .. } => None,
+ MatcherLoc::SequenceSep { .. } => None,
+ MatcherLoc::SequenceKleeneOpAfterSep { .. } => None,
+ MatcherLoc::MetaVarDecl { span, .. } => Some(*span),
+ MatcherLoc::Eof => None,
+ }
+ }
+}
+
+impl Display for MatcherLoc {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ MatcherLoc::Token { token } | MatcherLoc::SequenceSep { separator: token } => {
+ write!(f, "`{}`", pprust::token_to_string(token))
+ }
+ MatcherLoc::MetaVarDecl { bind, kind, .. } => {
+ write!(f, "meta-variable `${bind}")?;
+ if let Some(kind) = kind {
+ write!(f, ":{}", kind)?;
+ }
+ write!(f, "`")?;
+ Ok(())
+ }
+ MatcherLoc::Eof => f.write_str("end of macro"),
+
+ // These are not printed in the diagnostic
+ MatcherLoc::Delimited => f.write_str("delimiter"),
+ MatcherLoc::Sequence { .. } => f.write_str("sequence start"),
+ MatcherLoc::SequenceKleeneOpNoSep { .. } => f.write_str("sequence end"),
+ MatcherLoc::SequenceKleeneOpAfterSep { .. } => f.write_str("sequence end"),
+ }
+ }
+}
+
pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
fn inner(
tts: &[TokenTree],
@@ -270,13 +313,17 @@ pub(crate) enum ParseResult<T> {
Failure(Token, &'static str),
/// Fatal error (malformed macro?). Abort compilation.
Error(rustc_span::Span, String),
- ErrorReported,
+ ErrorReported(ErrorGuaranteed),
}
/// A `ParseResult` where the `Success` variant contains a mapping of
/// `MacroRulesNormalizedIdent`s to `NamedMatch`es. This represents the mapping
/// of metavars to the token trees they bind to.
-pub(crate) type NamedParseResult = ParseResult<FxHashMap<MacroRulesNormalizedIdent, NamedMatch>>;
+pub(crate) type NamedParseResult = ParseResult<NamedMatches>;
+
+/// Contains a mapping of `MacroRulesNormalizedIdent`s to `NamedMatch`es.
+/// This represents the mapping of metavars to the token trees they bind to.
+pub(crate) type NamedMatches = FxHashMap<MacroRulesNormalizedIdent, NamedMatch>;
/// Count how many metavars declarations are in `matcher`.
pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
@@ -393,6 +440,10 @@ impl TtParser {
}
}
+ pub(super) fn has_no_remaining_items_for_step(&self) -> bool {
+ self.cur_mps.is_empty()
+ }
+
/// Process the matcher positions of `cur_mps` until it is empty. In the process, this will
/// produce more mps in `next_mps` and `bb_mps`.
///
@@ -400,17 +451,21 @@ impl TtParser {
///
/// `Some(result)` if everything is finished, `None` otherwise. Note that matches are kept
/// track of through the mps generated.
- fn parse_tt_inner(
+ fn parse_tt_inner<'matcher, T: Tracker<'matcher>>(
&mut self,
- matcher: &[MatcherLoc],
+ matcher: &'matcher [MatcherLoc],
token: &Token,
+ track: &mut T,
) -> Option<NamedParseResult> {
// Matcher positions that would be valid if the macro invocation was over now. Only
// modified if `token == Eof`.
let mut eof_mps = EofMatcherPositions::None;
while let Some(mut mp) = self.cur_mps.pop() {
- match &matcher[mp.idx] {
+ let matcher_loc = &matcher[mp.idx];
+ track.before_match_loc(self, matcher_loc);
+
+ match matcher_loc {
MatcherLoc::Token { token: t } => {
// If it's a doc comment, we just ignore it and move on to the next tt in the
// matcher. This is a bug, but #95267 showed that existing programs rely on
@@ -450,7 +505,7 @@ impl TtParser {
// Try zero matches of this sequence, by skipping over it.
self.cur_mps.push(MatcherPos {
idx: idx_first_after,
- matches: mp.matches.clone(), // a cheap clone
+ matches: Lrc::clone(&mp.matches),
});
}
@@ -463,8 +518,8 @@ impl TtParser {
// sequence. If that's not possible, `ending_mp` will fail quietly when it is
// processed next time around the loop.
let ending_mp = MatcherPos {
- idx: mp.idx + 1, // +1 skips the Kleene op
- matches: mp.matches.clone(), // a cheap clone
+ idx: mp.idx + 1, // +1 skips the Kleene op
+ matches: Lrc::clone(&mp.matches),
};
self.cur_mps.push(ending_mp);
@@ -479,8 +534,8 @@ impl TtParser {
// separator yet. Try ending the sequence. If that's not possible, `ending_mp`
// will fail quietly when it is processed next time around the loop.
let ending_mp = MatcherPos {
- idx: mp.idx + 2, // +2 skips the separator and the Kleene op
- matches: mp.matches.clone(), // a cheap clone
+ idx: mp.idx + 2, // +2 skips the separator and the Kleene op
+ matches: Lrc::clone(&mp.matches),
};
self.cur_mps.push(ending_mp);
@@ -552,10 +607,11 @@ impl TtParser {
}
/// Match the token stream from `parser` against `matcher`.
- pub(super) fn parse_tt(
+ pub(super) fn parse_tt<'matcher, T: Tracker<'matcher>>(
&mut self,
parser: &mut Cow<'_, Parser<'_>>,
- matcher: &[MatcherLoc],
+ matcher: &'matcher [MatcherLoc],
+ track: &mut T,
) -> NamedParseResult {
// A queue of possible matcher positions. We initialize it with the matcher position in
// which the "dot" is before the first token of the first token tree in `matcher`.
@@ -571,7 +627,8 @@ impl TtParser {
// Process `cur_mps` until either we have finished the input or we need to get some
// parsing from the black-box parser done.
- if let Some(res) = self.parse_tt_inner(matcher, &parser.token) {
+ let res = self.parse_tt_inner(matcher, &parser.token, track);
+ if let Some(res) = res {
return res;
}
@@ -612,14 +669,14 @@ impl TtParser {
// edition-specific matching behavior for non-terminals.
let nt = match parser.to_mut().parse_nonterminal(kind) {
Err(mut err) => {
- err.span_label(
+ let guarantee = err.span_label(
span,
format!(
"while parsing argument for this `{kind}` macro fragment"
),
)
.emit();
- return ErrorReported;
+ return ErrorReported(guarantee);
}
Ok(nt) => nt,
};
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index f6fe38174..2dbb90e21 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -2,6 +2,7 @@ use crate::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
use crate::base::{SyntaxExtension, SyntaxExtensionKind};
use crate::expand::{ensure_complete_parse, parse_ast_fragment, AstFragment, AstFragmentKind};
use crate::mbe;
+use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg};
use crate::mbe::macro_check;
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser};
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
@@ -14,18 +15,17 @@ use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
-use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage};
+use rustc_errors::{Applicability, ErrorGuaranteed};
use rustc_feature::Features;
use rustc_lint_defs::builtin::{
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
};
use rustc_lint_defs::BuiltinLintDiagnostics;
-use rustc_parse::parser::Parser;
+use rustc_parse::parser::{Parser, Recovery};
use rustc_session::parse::ParseSess;
use rustc_session::Session;
use rustc_span::edition::Edition;
use rustc_span::hygiene::Transparency;
-use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::Span;
@@ -33,6 +33,9 @@ use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::{mem, slice};
+use super::diagnostics;
+use super::macro_parser::{NamedMatches, NamedParseResult};
+
pub(crate) struct ParserAnyMacro<'a> {
parser: Parser<'a>,
@@ -47,74 +50,6 @@ pub(crate) struct ParserAnyMacro<'a> {
is_local: bool,
}
-pub(crate) fn annotate_err_with_kind(err: &mut Diagnostic, kind: AstFragmentKind, span: Span) {
- match kind {
- AstFragmentKind::Ty => {
- err.span_label(span, "this macro call doesn't expand to a type");
- }
- AstFragmentKind::Pat => {
- err.span_label(span, "this macro call doesn't expand to a pattern");
- }
- _ => {}
- };
-}
-
-fn emit_frag_parse_err(
- mut e: DiagnosticBuilder<'_, rustc_errors::ErrorGuaranteed>,
- parser: &Parser<'_>,
- orig_parser: &mut Parser<'_>,
- site_span: Span,
- arm_span: Span,
- kind: AstFragmentKind,
-) {
- // FIXME(davidtwco): avoid depending on the error message text
- if parser.token == token::Eof
- && let DiagnosticMessage::Str(message) = &e.message[0].0
- && message.ends_with(", found `<eof>`")
- {
- let msg = &e.message[0];
- e.message[0] = (
- DiagnosticMessage::Str(format!(
- "macro expansion ends with an incomplete expression: {}",
- message.replace(", found `<eof>`", ""),
- )),
- msg.1,
- );
- if !e.span.is_dummy() {
- // early end of macro arm (#52866)
- e.replace_span_with(parser.token.span.shrink_to_hi());
- }
- }
- if e.span.is_dummy() {
- // Get around lack of span in error (#30128)
- e.replace_span_with(site_span);
- if !parser.sess.source_map().is_imported(arm_span) {
- e.span_label(arm_span, "in this macro arm");
- }
- } else if parser.sess.source_map().is_imported(parser.token.span) {
- e.span_label(site_span, "in this macro invocation");
- }
- match kind {
- // Try a statement if an expression is wanted but failed and suggest adding `;` to call.
- AstFragmentKind::Expr => match parse_ast_fragment(orig_parser, AstFragmentKind::Stmts) {
- Err(err) => err.cancel(),
- Ok(_) => {
- e.note(
- "the macro call doesn't expand to an expression, but it can expand to a statement",
- );
- e.span_suggestion_verbose(
- site_span.shrink_to_hi(),
- "add `;` to interpret the expansion as a statement",
- ";",
- Applicability::MaybeIncorrect,
- );
- }
- },
- _ => annotate_err_with_kind(&mut e, kind, site_span),
- };
- e.emit();
-}
-
impl<'a> ParserAnyMacro<'a> {
pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
let ParserAnyMacro {
@@ -130,7 +65,7 @@ impl<'a> ParserAnyMacro<'a> {
let fragment = match parse_ast_fragment(parser, kind) {
Ok(f) => f,
Err(err) => {
- emit_frag_parse_err(err, parser, snapshot, site_span, arm_span, kind);
+ diagnostics::emit_frag_parse_err(err, parser, snapshot, site_span, arm_span, kind);
return kind.dummy(site_span);
}
};
@@ -205,8 +140,37 @@ fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span
cx_expansions.entry(sp).or_default().push(message);
}
+pub(super) trait Tracker<'matcher> {
+ /// This is called before trying to match next MatcherLoc on the current token.
+ fn before_match_loc(&mut self, parser: &TtParser, matcher: &'matcher MatcherLoc);
+
+ /// This is called after an arm has been parsed, either successfully or unsuccessfully. When this is called,
+ /// `before_match_loc` was called at least once (with a `MatcherLoc::Eof`).
+ fn after_arm(&mut self, result: &NamedParseResult);
+
+ /// For tracing.
+ fn description() -> &'static str;
+
+ fn recovery() -> Recovery;
+}
+
+/// A noop tracker that is used in the hot path of the expansion, has zero overhead thanks to monomorphization.
+pub(super) struct NoopTracker;
+
+impl<'matcher> Tracker<'matcher> for NoopTracker {
+ fn before_match_loc(&mut self, _: &TtParser, _: &'matcher MatcherLoc) {}
+ fn after_arm(&mut self, _: &NamedParseResult) {}
+ fn description() -> &'static str {
+ "none"
+ }
+ fn recovery() -> Recovery {
+ Recovery::Forbidden
+ }
+}
+
/// Expands the rules based macro defined by `lhses` and `rhses` for a given
/// input `arg`.
+#[instrument(skip(cx, transparency, arg, lhses, rhses))]
fn expand_macro<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
@@ -228,9 +192,96 @@ fn expand_macro<'cx>(
trace_macros_note(&mut cx.expansions, sp, msg);
}
- // Which arm's failure should we report? (the one furthest along)
- let mut best_failure: Option<(Token, &str)> = None;
+ // Track nothing for the best performance.
+ let try_success_result = try_match_macro(sess, name, &arg, lhses, &mut NoopTracker);
+
+ match try_success_result {
+ Ok((i, named_matches)) => {
+ let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
+ mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span),
+ _ => cx.span_bug(sp, "malformed macro rhs"),
+ };
+ let arm_span = rhses[i].span();
+
+ let rhs_spans = rhs.tts.iter().map(|t| t.span()).collect::<Vec<_>>();
+ // rhs has holes ( `$id` and `$(...)` that need filled)
+ let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) {
+ Ok(tts) => tts,
+ Err(mut err) => {
+ err.emit();
+ return DummyResult::any(arm_span);
+ }
+ };
+
+ // Replace all the tokens for the corresponding positions in the macro, to maintain
+ // proper positions in error reporting, while maintaining the macro_backtrace.
+ if rhs_spans.len() == tts.len() {
+ tts = tts.map_enumerated(|i, tt| {
+ let mut tt = tt.clone();
+ let mut sp = rhs_spans[i];
+ sp = sp.with_ctxt(tt.span().ctxt());
+ tt.set_span(sp);
+ tt
+ });
+ }
+
+ if cx.trace_macros() {
+ let msg = format!("to `{}`", pprust::tts_to_string(&tts));
+ trace_macros_note(&mut cx.expansions, sp, msg);
+ }
+
+ let mut p = Parser::new(sess, tts, false, None);
+ p.last_type_ascription = cx.current_expansion.prior_type_ascription;
+
+ if is_local {
+ cx.resolver.record_macro_rule_usage(node_id, i);
+ }
+
+ // Let the context choose how to interpret the result.
+ // Weird, but useful for X-macros.
+ return Box::new(ParserAnyMacro {
+ parser: p,
+
+ // Pass along the original expansion site and the name of the macro
+ // so we can print a useful error message if the parse of the expanded
+ // macro leaves unparsed tokens.
+ site_span: sp,
+ macro_ident: name,
+ lint_node_id: cx.current_expansion.lint_node_id,
+ is_trailing_mac: cx.current_expansion.is_trailing_mac,
+ arm_span,
+ is_local,
+ });
+ }
+ Err(CanRetry::No(_)) => {
+ debug!("Will not retry matching as an error was emitted already");
+ return DummyResult::any(sp);
+ }
+ Err(CanRetry::Yes) => {
+ // Retry and emit a better error below.
+ }
+ }
+
+ diagnostics::failed_to_match_macro(cx, sp, def_span, name, arg, lhses)
+}
+
+pub(super) enum CanRetry {
+ Yes,
+ /// We are not allowed to retry macro expansion as a fatal error has been emitted already.
+ No(ErrorGuaranteed),
+}
+/// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful,
+/// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors
+/// correctly.
+#[instrument(level = "debug", skip(sess, arg, lhses, track), fields(tracking = %T::description()))]
+pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
+ sess: &ParseSess,
+ name: Ident,
+ arg: &TokenStream,
+ lhses: &'matcher [Vec<MatcherLoc>],
+ track: &mut T,
+) -> Result<(usize, NamedMatches), CanRetry> {
// We create a base parser that can be used for the "black box" parts.
// Every iteration needs a fresh copy of that parser. However, the parser
// is not mutated on many of the iterations, particularly when dealing with
@@ -250,127 +301,53 @@ fn expand_macro<'cx>(
// hacky, but speeds up the `html5ever` benchmark significantly. (Issue
// 68836 suggests a more comprehensive but more complex change to deal with
// this situation.)
- // FIXME(Nilstrieb): Stop recovery from happening on this parser and retry later with recovery if the macro failed to match.
- let parser = parser_from_cx(sess, arg.clone());
-
+ let parser = parser_from_cx(sess, arg.clone(), T::recovery());
// Try each arm's matchers.
let mut tt_parser = TtParser::new(name);
for (i, lhs) in lhses.iter().enumerate() {
+ let _tracing_span = trace_span!("Matching arm", %i);
+
// Take a snapshot of the state of pre-expansion gating at this point.
// This is used so that if a matcher is not `Success(..)`ful,
// then the spans which became gated when parsing the unsuccessful matcher
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
let mut gated_spans_snapshot = mem::take(&mut *sess.gated_spans.spans.borrow_mut());
- match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs) {
+ let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
+
+ track.after_arm(&result);
+
+ match result {
Success(named_matches) => {
+ debug!("Parsed arm successfully");
// The matcher was `Success(..)`ful.
// Merge the gated spans from parsing the matcher with the pre-existing ones.
sess.gated_spans.merge(gated_spans_snapshot);
- let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
- mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span),
- _ => cx.span_bug(sp, "malformed macro rhs"),
- };
- let arm_span = rhses[i].span();
-
- let rhs_spans = rhs.tts.iter().map(|t| t.span()).collect::<Vec<_>>();
- // rhs has holes ( `$id` and `$(...)` that need filled)
- let mut tts = match transcribe(cx, &named_matches, &rhs, rhs_span, transparency) {
- Ok(tts) => tts,
- Err(mut err) => {
- err.emit();
- return DummyResult::any(arm_span);
- }
- };
-
- // Replace all the tokens for the corresponding positions in the macro, to maintain
- // proper positions in error reporting, while maintaining the macro_backtrace.
- if rhs_spans.len() == tts.len() {
- tts = tts.map_enumerated(|i, tt| {
- let mut tt = tt.clone();
- let mut sp = rhs_spans[i];
- sp = sp.with_ctxt(tt.span().ctxt());
- tt.set_span(sp);
- tt
- });
- }
-
- if cx.trace_macros() {
- let msg = format!("to `{}`", pprust::tts_to_string(&tts));
- trace_macros_note(&mut cx.expansions, sp, msg);
- }
-
- let mut p = Parser::new(sess, tts, false, None);
- p.last_type_ascription = cx.current_expansion.prior_type_ascription;
-
- if is_local {
- cx.resolver.record_macro_rule_usage(node_id, i);
- }
-
- // Let the context choose how to interpret the result.
- // Weird, but useful for X-macros.
- return Box::new(ParserAnyMacro {
- parser: p,
-
- // Pass along the original expansion site and the name of the macro
- // so we can print a useful error message if the parse of the expanded
- // macro leaves unparsed tokens.
- site_span: sp,
- macro_ident: name,
- lint_node_id: cx.current_expansion.lint_node_id,
- is_trailing_mac: cx.current_expansion.is_trailing_mac,
- arm_span,
- is_local,
- });
+ return Ok((i, named_matches));
}
- Failure(token, msg) => match best_failure {
- Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {}
- _ => best_failure = Some((token, msg)),
- },
- Error(err_sp, ref msg) => {
- let span = err_sp.substitute_dummy(sp);
- cx.struct_span_err(span, &msg).emit();
- return DummyResult::any(span);
+ Failure(_, _) => {
+ trace!("Failed to match arm, trying the next one");
+ // Try the next arm.
+ }
+ Error(_, _) => {
+ debug!("Fatal error occurred during matching");
+ // We haven't emitted an error yet, so we can retry.
+ return Err(CanRetry::Yes);
+ }
+ ErrorReported(guarantee) => {
+ debug!("Fatal error occurred and was reported during matching");
+ // An error has been reported already, we cannot retry as that would cause duplicate errors.
+ return Err(CanRetry::No(guarantee));
}
- ErrorReported => return DummyResult::any(sp),
}
// The matcher was not `Success(..)`ful.
// Restore to the state before snapshotting and maybe try again.
mem::swap(&mut gated_spans_snapshot, &mut sess.gated_spans.spans.borrow_mut());
}
- drop(parser);
-
- let (token, label) = best_failure.expect("ran no matchers");
- let span = token.span.substitute_dummy(sp);
- let mut err = cx.struct_span_err(span, &parse_failure_msg(&token));
- err.span_label(span, label);
- if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) {
- err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro");
- }
- annotate_doc_comment(&mut err, sess.source_map(), span);
- // Check whether there's a missing comma in this macro call, like `println!("{}" a);`
- if let Some((arg, comma_span)) = arg.add_comma() {
- for lhs in lhses {
- let parser = parser_from_cx(sess, arg.clone());
- if let Success(_) = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs) {
- if comma_span.is_dummy() {
- err.note("you might be missing a comma");
- } else {
- err.span_suggestion_short(
- comma_span,
- "missing comma here",
- ", ",
- Applicability::MachineApplicable,
- );
- }
- }
- }
- }
- err.emit();
- cx.trace_macros_diag();
- DummyResult::any(sp)
+
+ Err(CanRetry::Yes)
}
// Note that macro-by-example's input is also matched against a token tree:
@@ -406,7 +383,7 @@ pub fn compile_declarative_macro(
// Parse the macro_rules! invocation
let (macro_rules, body) = match &def.kind {
- ast::ItemKind::MacroDef(def) => (def.macro_rules, def.body.inner_tokens()),
+ ast::ItemKind::MacroDef(def) => (def.macro_rules, def.body.tokens.clone()),
_ => unreachable!(),
};
@@ -452,28 +429,29 @@ pub fn compile_declarative_macro(
let parser = Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS);
let mut tt_parser =
TtParser::new(Ident::with_dummy_span(if macro_rules { kw::MacroRules } else { kw::Macro }));
- let argument_map = match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &argument_gram) {
- Success(m) => m,
- Failure(token, msg) => {
- let s = parse_failure_msg(&token);
- let sp = token.span.substitute_dummy(def.span);
- let mut err = sess.parse_sess.span_diagnostic.struct_span_err(sp, &s);
- err.span_label(sp, msg);
- annotate_doc_comment(&mut err, sess.source_map(), sp);
- err.emit();
- return dummy_syn_ext();
- }
- Error(sp, msg) => {
- sess.parse_sess
- .span_diagnostic
- .struct_span_err(sp.substitute_dummy(def.span), &msg)
- .emit();
- return dummy_syn_ext();
- }
- ErrorReported => {
- return dummy_syn_ext();
- }
- };
+ let argument_map =
+ match tt_parser.parse_tt(&mut Cow::Owned(parser), &argument_gram, &mut NoopTracker) {
+ Success(m) => m,
+ Failure(token, msg) => {
+ let s = parse_failure_msg(&token);
+ let sp = token.span.substitute_dummy(def.span);
+ let mut err = sess.parse_sess.span_diagnostic.struct_span_err(sp, &s);
+ err.span_label(sp, msg);
+ annotate_doc_comment(&mut err, sess.source_map(), sp);
+ err.emit();
+ return dummy_syn_ext();
+ }
+ Error(sp, msg) => {
+ sess.parse_sess
+ .span_diagnostic
+ .struct_span_err(sp.substitute_dummy(def.span), &msg)
+ .emit();
+ return dummy_syn_ext();
+ }
+ ErrorReported(_) => {
+ return dummy_syn_ext();
+ }
+ };
let mut valid = true;
@@ -597,30 +575,6 @@ pub fn compile_declarative_macro(
(mk_syn_ext(expander), rule_spans)
}
-#[derive(Subdiagnostic)]
-enum ExplainDocComment {
- #[label(expand_explain_doc_comment_inner)]
- Inner {
- #[primary_span]
- span: Span,
- },
- #[label(expand_explain_doc_comment_outer)]
- Outer {
- #[primary_span]
- span: Span,
- },
-}
-
-fn annotate_doc_comment(err: &mut Diagnostic, sm: &SourceMap, span: Span) {
- if let Ok(src) = sm.span_to_snippet(span) {
- if src.starts_with("///") || src.starts_with("/**") {
- err.subdiagnostic(ExplainDocComment::Outer { span });
- } else if src.starts_with("//!") || src.starts_with("/*!") {
- err.subdiagnostic(ExplainDocComment::Inner { span });
- }
- }
-}
-
fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
@@ -1405,15 +1359,6 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
}
}
-fn parser_from_cx(sess: &ParseSess, tts: TokenStream) -> Parser<'_> {
- Parser::new(sess, tts, true, rustc_parse::MACRO_ARGUMENTS)
-}
-
-/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
-/// other tokens, this is "unexpected token...".
-fn parse_failure_msg(tok: &Token) -> String {
- match tok.kind {
- token::Eof => "unexpected end of macro invocation".to_string(),
- _ => format!("no rules expected the token `{}`", pprust::token_to_string(tok),),
- }
+pub(super) fn parser_from_cx(sess: &ParseSess, tts: TokenStream, recovery: Recovery) -> Parser<'_> {
+ Parser::new(sess, tts, true, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
}
diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs
index a3c631d33..e49f112bf 100644
--- a/compiler/rustc_expand/src/parse/tests.rs
+++ b/compiler/rustc_expand/src/parse/tests.rs
@@ -291,7 +291,7 @@ fn ttdelim_span() {
.unwrap();
let tts: Vec<_> = match expr.kind {
- ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().into_trees().collect(),
+ ast::ExprKind::MacCall(ref mac) => mac.args.tokens.clone().into_trees().collect(),
_ => panic!("not a macro"),
};
diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs
index faaf3b3fe..03bb5c1df 100644
--- a/compiler/rustc_expand/src/placeholders.rs
+++ b/compiler/rustc_expand/src/placeholders.rs
@@ -1,14 +1,12 @@
use crate::expand::{AstFragment, AstFragmentKind};
-
use rustc_ast as ast;
use rustc_ast::mut_visit::*;
use rustc_ast::ptr::P;
+use rustc_data_structures::fx::FxHashMap;
use rustc_span::source_map::DUMMY_SP;
use rustc_span::symbol::Ident;
-
use smallvec::{smallvec, SmallVec};
-
-use rustc_data_structures::fx::FxHashMap;
+use thin_vec::ThinVec;
pub fn placeholder(
kind: AstFragmentKind,
@@ -17,8 +15,12 @@ pub fn placeholder(
) -> AstFragment {
fn mac_placeholder() -> P<ast::MacCall> {
P(ast::MacCall {
- path: ast::Path { span: DUMMY_SP, segments: Vec::new(), tokens: None },
- args: P(ast::MacArgs::Empty),
+ path: ast::Path { span: DUMMY_SP, segments: ThinVec::new(), tokens: None },
+ args: P(ast::DelimArgs {
+ dspan: ast::tokenstream::DelimSpan::dummy(),
+ delim: ast::MacDelimiter::Parenthesis,
+ tokens: ast::tokenstream::TokenStream::new(Vec::new()),
+ }),
prior_type_ascription: None,
})
}
diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs
index 1a2ab9d19..e9a691920 100644
--- a/compiler/rustc_expand/src/proc_macro.rs
+++ b/compiler/rustc_expand/src/proc_macro.rs
@@ -112,6 +112,7 @@ impl MultiItemModifier for DeriveProcMacro {
span: Span,
_meta_item: &ast::MetaItem,
item: Annotatable,
+ _is_derive_const: bool,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
// We need special handling for statement items
// (e.g. `fn foo() { #[derive(Debug)] struct Bar; }`)
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index cc2858d3f..761657961 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -516,26 +516,27 @@ impl server::TokenStream for Rustc<'_, '_> {
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
// be recovered in the general case.
match &expr.kind {
- ast::ExprKind::Lit(l) if l.token_lit.kind == token::Bool => {
+ ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
Ok(tokenstream::TokenStream::token_alone(
- token::Ident(l.token_lit.symbol, false),
- l.span,
+ token::Ident(token_lit.symbol, false),
+ expr.span,
))
}
- ast::ExprKind::Lit(l) => {
- Ok(tokenstream::TokenStream::token_alone(token::Literal(l.token_lit), l.span))
+ ast::ExprKind::Lit(token_lit) => {
+ Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span))
+ }
+ ast::ExprKind::IncludedBytes(bytes) => {
+ let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
+ Ok(tokenstream::TokenStream::token_alone(token::TokenKind::Literal(lit), expr.span))
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
- ast::ExprKind::Lit(l) => match l.token_lit {
+ ast::ExprKind::Lit(token_lit) => match token_lit {
token::Lit { kind: token::Integer | token::Float, .. } => {
Ok(Self::TokenStream::from_iter([
// FIXME: The span of the `-` token is lost when
// parsing, so we cannot faithfully recover it here.
tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
- tokenstream::TokenTree::token_alone(
- token::Literal(l.token_lit),
- l.span,
- ),
+ tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
]))
}
_ => Err(()),
diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs
index e44f06081..539b04535 100644
--- a/compiler/rustc_expand/src/tests.rs
+++ b/compiler/rustc_expand/src/tests.rs
@@ -151,6 +151,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
false,
None,
false,
+ false,
);
let handler = Handler::with_emitter(true, None, Box::new(emitter));
handler.span_err(msp, "foo");
@@ -271,13 +272,13 @@ error: foo
--> test.rs:3:3
|
3 | X0 Y0
- | ____^__-
- | | ___|
+ | ___^__-
+ | |___|
| ||
4 | || X1 Y1
5 | || X2 Y2
| ||____^__- `Y` is a good letter too
- | |____|
+ | |_____|
| `X` is a good letter
"#,
@@ -310,12 +311,12 @@ error: foo
--> test.rs:3:3
|
3 | X0 Y0
- | ____^__-
- | | ___|
+ | ___^__-
+ | |___|
| ||
4 | || Y1 X1
| ||____-__^ `X` is a good letter
- | |_____|
+ | |____|
| `Y` is a good letter too
"#,
@@ -350,13 +351,13 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
- | ______^
-4 | | X1 Y1 Z1
- | |_________-
+ | _______^
+4 | | X1 Y1 Z1
+ | | _________-
5 | || X2 Y2 Z2
| ||____^ `X` is a good letter
-6 | | X3 Y3 Z3
- | |_____- `Y` is a good letter too
+6 | | X3 Y3 Z3
+ | |____- `Y` is a good letter too
"#,
);
@@ -394,15 +395,15 @@ error: foo
--> test.rs:3:3
|
3 | X0 Y0 Z0
- | _____^__-__-
- | | ____|__|
- | || ___|
+ | ___^__-__-
+ | |___|__|
+ | ||___|
| |||
4 | ||| X1 Y1 Z1
5 | ||| X2 Y2 Z2
| |||____^__-__- `Z` label
- | ||____|__|
- | |____| `Y` is a good letter too
+ | ||_____|__|
+ | |______| `Y` is a good letter too
| `X` is a good letter
"#,
@@ -486,17 +487,17 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
- | ______^
-4 | | X1 Y1 Z1
- | |____^_-
+ | _______^
+4 | | X1 Y1 Z1
+ | | ____^_-
| ||____|
- | | `X` is a good letter
-5 | | X2 Y2 Z2
- | |____-______- `Y` is a good letter too
- | ____|
- | |
-6 | | X3 Y3 Z3
- | |________- `Z`
+ | | `X` is a good letter
+5 | | X2 Y2 Z2
+ | |___-______- `Y` is a good letter too
+ | ___|
+ | |
+6 | | X3 Y3 Z3
+ | |_______- `Z`
"#,
);
@@ -569,14 +570,14 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
- | ______^
-4 | | X1 Y1 Z1
- | |____^____-
+ | _______^
+4 | | X1 Y1 Z1
+ | | ____^____-
| ||____|
- | | `X` is a good letter
-5 | | X2 Y2 Z2
-6 | | X3 Y3 Z3
- | |___________- `Y` is a good letter too
+ | | `X` is a good letter
+5 | | X2 Y2 Z2
+6 | | X3 Y3 Z3
+ | |__________- `Y` is a good letter too
"#,
);
@@ -940,18 +941,18 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
- | ______^
-4 | | X1 Y1 Z1
- | |____^____-
+ | _______^
+4 | | X1 Y1 Z1
+ | | ____^____-
| ||____|
- | | `X` is a good letter
-5 | | 1
-6 | | 2
-7 | | 3
-... |
-15 | | X2 Y2 Z2
-16 | | X3 Y3 Z3
- | |___________- `Y` is a good letter too
+ | | `X` is a good letter
+5 | | 1
+6 | | 2
+7 | | 3
+... |
+15 | | X2 Y2 Z2
+16 | | X3 Y3 Z3
+ | |__________- `Y` is a good letter too
"#,
);
@@ -995,21 +996,21 @@ error: foo
--> test.rs:3:6
|
3 | X0 Y0 Z0
- | ______^
-4 | | 1
-5 | | 2
-6 | | 3
-7 | | X1 Y1 Z1
- | |_________-
+ | _______^
+4 | | 1
+5 | | 2
+6 | | 3
+7 | | X1 Y1 Z1
+ | | _________-
8 | || 4
9 | || 5
10 | || 6
11 | || X2 Y2 Z2
| ||__________- `Z` is a good letter too
-... |
-15 | | 10
-16 | | X3 Y3 Z3
- | |_______^ `Y` is a good letter
+... |
+15 | | 10
+16 | | X3 Y3 Z3
+ | |________^ `Y` is a good letter
"#,
);
diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs
index 80448605c..62ef40a9d 100644
--- a/compiler/rustc_feature/src/accepted.rs
+++ b/compiler/rustc_feature/src/accepted.rs
@@ -191,6 +191,8 @@ declare_features! (
(accepted, infer_outlives_requirements, "1.30.0", Some(44493), None),
/// Allows irrefutable patterns in `if let` and `while let` statements (RFC 2086).
(accepted, irrefutable_let_patterns, "1.33.0", Some(44495), None),
+ /// Allows `#[instruction_set(_)]` attribute.
+ (accepted, isa_attribute, "1.67.0", Some(74727), None),
/// Allows some increased flexibility in the name resolution rules,
/// especially around globs and shadowing (RFC 1560).
(accepted, item_like_imports, "1.15.0", Some(35120), None),
@@ -235,6 +237,8 @@ declare_features! (
(accepted, native_link_modifiers, "1.61.0", Some(81490), None),
/// Allows specifying the bundle link modifier
(accepted, native_link_modifiers_bundle, "1.63.0", Some(81490), None),
+ /// Allows specifying the verbatim link modifier
+ (accepted, native_link_modifiers_verbatim, "1.67.0", Some(81490), None),
/// Allows specifying the whole-archive link modifier
(accepted, native_link_modifiers_whole_archive, "1.61.0", Some(81490), None),
/// Allows using non lexical lifetimes (RFC 2094).
diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs
index 647ccdec7..8e708a009 100644
--- a/compiler/rustc_feature/src/active.rs
+++ b/compiler/rustc_feature/src/active.rs
@@ -152,6 +152,8 @@ declare_features! (
(active, anonymous_lifetime_in_impl_trait, "1.63.0", None, None),
/// Allows identifying the `compiler_builtins` crate.
(active, compiler_builtins, "1.13.0", None, None),
+ /// Allows writing custom MIR
+ (active, custom_mir, "1.65.0", None, None),
/// Outputs useful `assert!` messages
(active, generic_assert, "1.63.0", None, None),
/// Allows using the `rust-intrinsic`'s "ABI".
@@ -388,6 +390,9 @@ declare_features! (
(active, exclusive_range_pattern, "1.11.0", Some(37854), None),
/// Allows exhaustive pattern matching on types that contain uninhabited types.
(active, exhaustive_patterns, "1.13.0", Some(51085), None),
+ /// Allows using `efiapi`, `sysv64` and `win64` as calling convention
+ /// for functions with varargs.
+ (active, extended_varargs_abi_support, "1.65.0", Some(100189), None),
/// Allows defining `extern type`s.
(active, extern_types, "1.23.0", Some(43467), None),
/// Allows the use of `#[ffi_const]` on foreign functions.
@@ -412,6 +417,10 @@ declare_features! (
(active, half_open_range_patterns_in_slices, "1.66.0", Some(67264), None),
/// Allows `if let` guard in match arms.
(active, if_let_guard, "1.47.0", Some(51114), None),
+ /// Allows `impl Trait` as output type in `Fn` traits in return position of functions.
+ (active, impl_trait_in_fn_trait_return, "1.64.0", Some(99697), None),
+ /// Allows referencing `Self` and projections in impl-trait.
+ (active, impl_trait_projections, "1.67.0", Some(103532), None),
/// Allows using imported `main` function
(active, imported_main, "1.53.0", Some(28937), None),
/// Allows associated types in inherent impls.
@@ -422,8 +431,6 @@ declare_features! (
(incomplete, inline_const_pat, "1.58.0", Some(76001), None),
/// Allows using `pointer` and `reference` in intra-doc links
(active, intra_doc_pointers, "1.51.0", Some(80896), None),
- /// Allows `#[instruction_set(_)]` attribute
- (active, isa_attribute, "1.48.0", Some(74727), None),
// Allows setting the threshold for the `large_assignments` lint.
(active, large_assignments, "1.52.0", Some(83518), None),
/// Allows `if/while p && let q = r && ...` chains.
@@ -448,8 +455,6 @@ declare_features! (
(active, naked_functions, "1.9.0", Some(32408), None),
/// Allows specifying the as-needed link modifier
(active, native_link_modifiers_as_needed, "1.53.0", Some(81490), None),
- /// Allows specifying the verbatim link modifier
- (active, native_link_modifiers_verbatim, "1.53.0", Some(81490), None),
/// Allow negative trait implementations.
(active, negative_impls, "1.44.0", Some(68318), None),
/// Allows the `!` type. Does not imply 'exhaustive_patterns' (below) any more.
@@ -501,15 +506,17 @@ declare_features! (
(active, stmt_expr_attributes, "1.6.0", Some(15701), None),
/// Allows lints part of the strict provenance effort.
(active, strict_provenance, "1.61.0", Some(95228), None),
+ /// Allows string patterns to dereference values to match them.
+ (active, string_deref_patterns, "1.67.0", Some(87121), None),
/// Allows the use of `#[target_feature]` on safe functions.
(active, target_feature_11, "1.45.0", Some(69098), None),
/// Allows using `#[thread_local]` on `static` items.
(active, thread_local, "1.0.0", Some(29594), None),
/// Allows defining `trait X = A + B;` alias items.
(active, trait_alias, "1.24.0", Some(41517), None),
- /// Allows upcasting trait objects via supertraits.
- /// Trait upcasting is casting, e.g., `dyn Foo -> dyn Bar` where `Foo: Bar`.
- (incomplete, trait_upcasting, "1.56.0", Some(65991), None),
+ /// Allows dyn upcasting trait objects via supertraits.
+ /// Dyn upcasting is casting, e.g., `dyn Foo -> dyn Bar` where `Foo: Bar`.
+ (active, trait_upcasting, "1.56.0", Some(65991), None),
/// Allows #[repr(transparent)] on unions (RFC 2645).
(active, transparent_unions, "1.37.0", Some(60405), None),
/// Allows inconsistent bounds in where clauses.
diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs
index 2ead3c2c8..4b6e068db 100644
--- a/compiler/rustc_feature/src/builtin_attrs.rs
+++ b/compiler/rustc_feature/src/builtin_attrs.rs
@@ -147,7 +147,7 @@ pub enum AttributeDuplicates {
FutureWarnPreceding,
}
-/// A conveniece macro to deal with `$($expr)?`.
+/// A convenience macro to deal with `$($expr)?`.
macro_rules! or_default {
($default:expr,) => {
$default
@@ -391,6 +391,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
DuplicatesOk, @only_local: true,
),
ungated!(track_caller, Normal, template!(Word), WarnFollowing),
+ ungated!(instruction_set, Normal, template!(List: "set"), ErrorPreceding),
gated!(
no_sanitize, Normal,
template!(List: "address, memory, thread"), DuplicatesOk,
@@ -452,11 +453,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
optimize, Normal, template!(List: "size|speed"), ErrorPreceding, optimize_attribute,
experimental!(optimize),
),
- // RFC 2867
- gated!(
- instruction_set, Normal, template!(List: "set"), ErrorPreceding,
- isa_attribute, experimental!(instruction_set)
- ),
gated!(
ffi_returns_twice, Normal, template!(Word), WarnFollowing, experimental!(ffi_returns_twice)
@@ -555,10 +551,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(rustc_deallocator, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
rustc_attr!(rustc_allocator_zeroed, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
gated!(
- alloc_error_handler, Normal, template!(Word), WarnFollowing,
- experimental!(alloc_error_handler)
- ),
- gated!(
default_lib_allocator, Normal, template!(Word), WarnFollowing, allocator_internals,
experimental!(default_lib_allocator),
),
@@ -700,6 +692,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
"#[rustc_allow_incoherent_impl] has to be added to all impl items of an incoherent inherent impl."
),
rustc_attr!(
+ rustc_deny_explicit_impl, AttributeType::Normal, template!(Word), ErrorFollowing, @only_local: false,
+ "#[rustc_deny_explicit_impl] enforces that a trait can have no user-provided impls"
+ ),
+ rustc_attr!(
rustc_has_incoherent_inherent_impls, AttributeType::Normal, template!(Word), ErrorFollowing,
"#[rustc_has_incoherent_inherent_impls] allows the addition of incoherent inherent impls for \
the given type by annotating all impl items with #[rustc_allow_incoherent_impl]."
@@ -818,6 +814,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(TEST, rustc_polymorphize_error, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_def_path, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_mir, Normal, template!(List: "arg1, arg2, ..."), DuplicatesOk),
+ gated!(
+ custom_mir, Normal, template!(List: r#"dialect = "...", phase = "...""#),
+ ErrorFollowing, "the `#[custom_mir]` attribute is just used for the Rust test suite",
+ ),
rustc_attr!(TEST, rustc_dump_program_clauses, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_dump_env_program_clauses, Normal, template!(Word), WarnFollowing),
rustc_attr!(TEST, rustc_object_lifetime_default, Normal, template!(Word), WarnFollowing),
diff --git a/compiler/rustc_fs_util/src/lib.rs b/compiler/rustc_fs_util/src/lib.rs
index 63998bb6b..a7dfce3b9 100644
--- a/compiler/rustc_fs_util/src/lib.rs
+++ b/compiler/rustc_fs_util/src/lib.rs
@@ -65,7 +65,7 @@ pub enum LinkOrCopy {
pub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<LinkOrCopy> {
let p = p.as_ref();
let q = q.as_ref();
- match fs::remove_file(&q) {
+ match fs::remove_file(q) {
Ok(()) => (),
Err(err) if err.kind() == io::ErrorKind::NotFound => (),
Err(err) => return Err(err),
diff --git a/compiler/rustc_graphviz/src/lib.rs b/compiler/rustc_graphviz/src/lib.rs
index 3c1bb5532..1f8268cc1 100644
--- a/compiler/rustc_graphviz/src/lib.rs
+++ b/compiler/rustc_graphviz/src/lib.rs
@@ -410,7 +410,7 @@ impl<'a> Id<'a> {
}
pub fn as_slice(&'a self) -> &'a str {
- &*self.name
+ &self.name
}
}
@@ -471,7 +471,11 @@ pub trait Labeller<'a> {
/// Escape tags in such a way that it is suitable for inclusion in a
/// Graphviz HTML label.
pub fn escape_html(s: &str) -> String {
- s.replace('&', "&amp;").replace('\"', "&quot;").replace('<', "&lt;").replace('>', "&gt;")
+ s.replace('&', "&amp;")
+ .replace('\"', "&quot;")
+ .replace('<', "&lt;")
+ .replace('>', "&gt;")
+ .replace('\n', "<br align=\"left\"/>")
}
impl<'a> LabelText<'a> {
@@ -511,7 +515,7 @@ impl<'a> LabelText<'a> {
pub fn to_dot_string(&self) -> String {
match *self {
LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
- EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
+ EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(s)),
HtmlStr(ref s) => format!("<{}>", s),
}
}
@@ -525,7 +529,7 @@ impl<'a> LabelText<'a> {
EscStr(s) => s,
LabelStr(s) => {
if s.contains('\\') {
- (&*s).escape_default().to_string().into()
+ s.escape_default().to_string().into()
} else {
s
}
diff --git a/compiler/rustc_hir/src/arena.rs b/compiler/rustc_hir/src/arena.rs
index 44335b7f4..c89e7eb75 100644
--- a/compiler/rustc_hir/src/arena.rs
+++ b/compiler/rustc_hir/src/arena.rs
@@ -39,6 +39,7 @@ macro_rules! arena_types {
[] param: rustc_hir::Param<'tcx>,
[] pat: rustc_hir::Pat<'tcx>,
[] path: rustc_hir::Path<'tcx>,
+ [] use_path: rustc_hir::UsePath<'tcx>,
[] path_segment: rustc_hir::PathSegment<'tcx>,
[] poly_trait_ref: rustc_hir::PolyTraitRef<'tcx>,
[] qpath: rustc_hir::QPath<'tcx>,
diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs
index 4ef4aad90..149cf4ece 100644
--- a/compiler/rustc_hir/src/def.rs
+++ b/compiler/rustc_hir/src/def.rs
@@ -28,8 +28,6 @@ pub enum CtorKind {
Fn,
/// Constructor constant automatically created by a unit struct/variant.
Const,
- /// Unusable name in value namespace created by a struct variant.
- Fictive,
}
/// An attribute that is not a macro; e.g., `#[inline]` or `#[rustfmt::skip]`.
@@ -132,13 +130,9 @@ impl DefKind {
DefKind::Variant => "variant",
DefKind::Ctor(CtorOf::Variant, CtorKind::Fn) => "tuple variant",
DefKind::Ctor(CtorOf::Variant, CtorKind::Const) => "unit variant",
- DefKind::Ctor(CtorOf::Variant, CtorKind::Fictive) => "struct variant",
DefKind::Struct => "struct",
DefKind::Ctor(CtorOf::Struct, CtorKind::Fn) => "tuple struct",
DefKind::Ctor(CtorOf::Struct, CtorKind::Const) => "unit struct",
- DefKind::Ctor(CtorOf::Struct, CtorKind::Fictive) => {
- panic!("impossible struct constructor")
- }
DefKind::OpaqueTy => "opaque type",
DefKind::ImplTraitPlaceholder => "opaque type in trait",
DefKind::TyAlias => "type alias",
@@ -562,19 +556,11 @@ impl<T> PerNS<Option<T>> {
}
impl CtorKind {
- pub fn from_ast(vdata: &ast::VariantData) -> CtorKind {
- match *vdata {
- ast::VariantData::Tuple(..) => CtorKind::Fn,
- ast::VariantData::Unit(..) => CtorKind::Const,
- ast::VariantData::Struct(..) => CtorKind::Fictive,
- }
- }
-
- pub fn from_hir(vdata: &hir::VariantData<'_>) -> CtorKind {
+ pub fn from_ast(vdata: &ast::VariantData) -> Option<(CtorKind, NodeId)> {
match *vdata {
- hir::VariantData::Tuple(..) => CtorKind::Fn,
- hir::VariantData::Unit(..) => CtorKind::Const,
- hir::VariantData::Struct(..) => CtorKind::Fictive,
+ ast::VariantData::Tuple(_, node_id) => Some((CtorKind::Fn, node_id)),
+ ast::VariantData::Unit(node_id) => Some((CtorKind::Const, node_id)),
+ ast::VariantData::Struct(..) => None,
}
}
}
diff --git a/compiler/rustc_hir/src/definitions.rs b/compiler/rustc_hir/src/definitions.rs
index d85ac960f..dd37efb69 100644
--- a/compiler/rustc_hir/src/definitions.rs
+++ b/compiler/rustc_hir/src/definitions.rs
@@ -368,10 +368,6 @@ impl Definitions {
LocalDefId { local_def_index: self.table.allocate(key, def_path_hash) }
}
- pub fn iter_local_def_id(&self) -> impl Iterator<Item = LocalDefId> + '_ {
- self.table.def_path_hashes.indices().map(|local_def_index| LocalDefId { local_def_index })
- }
-
#[inline(always)]
pub fn local_def_path_hash_to_def_id(
&self,
@@ -389,6 +385,10 @@ impl Definitions {
pub fn def_path_hash_to_def_index_map(&self) -> &DefPathHashMap {
&self.table.def_path_hash_to_index
}
+
+ pub fn num_definitions(&self) -> usize {
+ self.table.def_path_hashes.len()
+ }
}
#[derive(Copy, Clone, PartialEq, Debug)]
diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs
index ef00c1ffc..8bc022e1e 100644
--- a/compiler/rustc_hir/src/hir.rs
+++ b/compiler/rustc_hir/src/hir.rs
@@ -29,15 +29,16 @@ use std::fmt;
#[derive(Debug, Copy, Clone, Encodable, HashStable_Generic)]
pub struct Lifetime {
pub hir_id: HirId,
- pub span: Span,
/// Either "`'a`", referring to a named lifetime definition,
- /// or "``" (i.e., `kw::Empty`), for elision placeholders.
+ /// `'_` referring to an anonymous lifetime (either explicitly `'_` or `&type`),
+ /// or "``" (i.e., `kw::Empty`) when appearing in path.
///
- /// HIR lowering inserts these placeholders in type paths that
- /// refer to type definitions needing lifetime parameters,
- /// `&T` and `&mut T`, and trait objects without `... + 'a`.
- pub name: LifetimeName,
+ /// See `Lifetime::suggestion_position` for practical use.
+ pub ident: Ident,
+
+ /// Semantics of this lifetime.
+ pub res: LifetimeName,
}
#[derive(Debug, Clone, PartialEq, Eq, Encodable, Hash, Copy)]
@@ -88,7 +89,7 @@ impl ParamName {
#[derive(HashStable_Generic)]
pub enum LifetimeName {
/// User-given names or fresh (synthetic) names.
- Param(LocalDefId, ParamName),
+ Param(LocalDefId),
/// Implicit lifetime in a context like `dyn Foo`. This is
/// distinguished from implicit lifetimes elsewhere because the
@@ -116,25 +117,6 @@ pub enum LifetimeName {
}
impl LifetimeName {
- pub fn ident(&self) -> Ident {
- match *self {
- LifetimeName::ImplicitObjectLifetimeDefault | LifetimeName::Error => Ident::empty(),
- LifetimeName::Infer => Ident::with_dummy_span(kw::UnderscoreLifetime),
- LifetimeName::Static => Ident::with_dummy_span(kw::StaticLifetime),
- LifetimeName::Param(_, param_name) => param_name.ident(),
- }
- }
-
- pub fn is_anonymous(&self) -> bool {
- match *self {
- LifetimeName::ImplicitObjectLifetimeDefault
- | LifetimeName::Infer
- | LifetimeName::Param(_, ParamName::Fresh)
- | LifetimeName::Error => true,
- LifetimeName::Static | LifetimeName::Param(..) => false,
- }
- }
-
pub fn is_elided(&self) -> bool {
match self {
LifetimeName::ImplicitObjectLifetimeDefault | LifetimeName::Infer => true,
@@ -146,34 +128,54 @@ impl LifetimeName {
LifetimeName::Error | LifetimeName::Param(..) | LifetimeName::Static => false,
}
}
-
- fn is_static(&self) -> bool {
- self == &LifetimeName::Static
- }
-
- pub fn normalize_to_macros_2_0(&self) -> LifetimeName {
- match *self {
- LifetimeName::Param(def_id, param_name) => {
- LifetimeName::Param(def_id, param_name.normalize_to_macros_2_0())
- }
- lifetime_name => lifetime_name,
- }
- }
}
impl fmt::Display for Lifetime {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.name.ident().fmt(f)
+ if self.ident.name != kw::Empty { self.ident.name.fmt(f) } else { "'_".fmt(f) }
}
}
+pub enum LifetimeSuggestionPosition {
+ /// The user wrote `'a` or `'_`.
+ Normal,
+ /// The user wrote `&type` or `&mut type`.
+ Ampersand,
+ /// The user wrote `Path` and omitted the `<'_>`.
+ ElidedPath,
+ /// The user wrote `Path<T>`, and omitted the `'_,`.
+ ElidedPathArgument,
+ /// The user wrote `dyn Trait` and omitted the `+ '_`.
+ ObjectDefault,
+}
+
impl Lifetime {
pub fn is_elided(&self) -> bool {
- self.name.is_elided()
+ self.res.is_elided()
+ }
+
+ pub fn is_anonymous(&self) -> bool {
+ self.ident.name == kw::Empty || self.ident.name == kw::UnderscoreLifetime
+ }
+
+ pub fn suggestion_position(&self) -> (LifetimeSuggestionPosition, Span) {
+ if self.ident.name == kw::Empty {
+ if self.ident.span.is_empty() {
+ (LifetimeSuggestionPosition::ElidedPathArgument, self.ident.span)
+ } else {
+ (LifetimeSuggestionPosition::ElidedPath, self.ident.span.shrink_to_hi())
+ }
+ } else if self.res == LifetimeName::ImplicitObjectLifetimeDefault {
+ (LifetimeSuggestionPosition::ObjectDefault, self.ident.span)
+ } else if self.ident.span.is_empty() {
+ (LifetimeSuggestionPosition::Ampersand, self.ident.span)
+ } else {
+ (LifetimeSuggestionPosition::Normal, self.ident.span)
+ }
}
pub fn is_static(&self) -> bool {
- self.name.is_static()
+ self.res == LifetimeName::Static
}
}
@@ -181,14 +183,17 @@ impl Lifetime {
/// `std::cmp::PartialEq`. It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
#[derive(Debug, HashStable_Generic)]
-pub struct Path<'hir> {
+pub struct Path<'hir, R = Res> {
pub span: Span,
/// The resolution for the path.
- pub res: Res,
+ pub res: R,
/// The segments in the path: the things separated by `::`.
pub segments: &'hir [PathSegment<'hir>],
}
+/// Up to three resolutions for type, value and macro namespaces.
+pub type UsePath<'hir> = Path<'hir, SmallVec<[Res; 3]>>;
+
impl Path<'_> {
pub fn is_global(&self) -> bool {
!self.segments.is_empty() && self.segments[0].ident.name == kw::PathRoot
@@ -267,7 +272,7 @@ pub enum GenericArg<'hir> {
impl GenericArg<'_> {
pub fn span(&self) -> Span {
match self {
- GenericArg::Lifetime(l) => l.span,
+ GenericArg::Lifetime(l) => l.ident.span,
GenericArg::Type(t) => t.span,
GenericArg::Const(c) => c.span,
GenericArg::Infer(i) => i.span,
@@ -284,7 +289,7 @@ impl GenericArg<'_> {
}
pub fn is_synthetic(&self) -> bool {
- matches!(self, GenericArg::Lifetime(lifetime) if lifetime.name.ident() == Ident::empty())
+ matches!(self, GenericArg::Lifetime(lifetime) if lifetime.ident == Ident::empty())
}
pub fn descr(&self) -> &'static str {
@@ -388,6 +393,8 @@ impl<'hir> GenericArgs<'hir> {
}
#[inline]
+ /// This function returns the number of type and const generic params.
+ /// It should only be used for diagnostics.
pub fn num_generic_params(&self) -> usize {
self.args.iter().filter(|arg| !matches!(arg, GenericArg::Lifetime(_))).count()
}
@@ -444,7 +451,7 @@ impl GenericBound<'_> {
match self {
GenericBound::Trait(t, ..) => t.span,
GenericBound::LangItemTrait(_, span, ..) => *span,
- GenericBound::Outlives(l) => l.span,
+ GenericBound::Outlives(l) => l.ident.span,
}
}
}
@@ -485,6 +492,7 @@ pub enum GenericParamKind<'hir> {
#[derive(Debug, HashStable_Generic)]
pub struct GenericParam<'hir> {
pub hir_id: HirId,
+ pub def_id: LocalDefId,
pub name: ParamName,
pub span: Span,
pub pure_wrt_drop: bool,
@@ -557,6 +565,19 @@ impl<'hir> Generics<'hir> {
}
/// If there are generic parameters, return where to introduce a new one.
+ pub fn span_for_lifetime_suggestion(&self) -> Option<Span> {
+ if let Some(first) = self.params.first()
+ && self.span.contains(first.span)
+ {
+ // `fn foo<A>(t: impl Trait)`
+ // ^ suggest `'a, ` here
+ Some(first.span.shrink_to_lo())
+ } else {
+ None
+ }
+ }
+
+ /// If there are generic parameters, return where to introduce a new one.
pub fn span_for_param_suggestion(&self) -> Option<Span> {
if self.params.iter().any(|p| self.span.contains(p.span)) {
// `fn foo<A>(t: impl Trait)`
@@ -762,10 +783,7 @@ pub struct WhereRegionPredicate<'hir> {
impl<'hir> WhereRegionPredicate<'hir> {
/// Returns `true` if `param_def_id` matches the `lifetime` of this predicate.
pub fn is_param_bound(&self, param_def_id: LocalDefId) -> bool {
- match self.lifetime.name {
- LifetimeName::Param(id, _) => id == param_def_id,
- _ => false,
- }
+ self.lifetime.res == LifetimeName::Param(param_def_id)
}
}
@@ -809,7 +827,7 @@ impl<'tcx> AttributeMap<'tcx> {
pub struct OwnerNodes<'tcx> {
/// Pre-computed hash of the full HIR.
pub hash_including_bodies: Fingerprint,
- /// Pre-computed hash of the item signature, sithout recursing into the body.
+ /// Pre-computed hash of the item signature, without recursing into the body.
pub hash_without_bodies: Fingerprint,
/// Full HIR for the current owner.
// The zeroth node's parent should never be accessed: the owner's parent is computed by the
@@ -919,12 +937,16 @@ pub struct Crate<'hir> {
#[derive(Debug, HashStable_Generic)]
pub struct Closure<'hir> {
+ pub def_id: LocalDefId,
pub binder: ClosureBinder,
pub capture_clause: CaptureBy,
pub bound_generic_params: &'hir [GenericParam<'hir>],
pub fn_decl: &'hir FnDecl<'hir>,
pub body: BodyId,
+ /// The span of the declaration block: 'move |...| -> ...'
pub fn_decl_span: Span,
+ /// The span of the argument block `|...|`
+ pub fn_arg_span: Option<Span>,
pub movability: Option<Movability>,
}
@@ -965,8 +987,8 @@ pub struct Pat<'hir> {
pub hir_id: HirId,
pub kind: PatKind<'hir>,
pub span: Span,
- // Whether to use default binding modes.
- // At present, this is false only for destructuring assignment.
+ /// Whether to use default binding modes.
+ /// At present, this is false only for destructuring assignment.
pub default_binding_modes: bool,
}
@@ -1074,7 +1096,7 @@ impl fmt::Display for RangeEnd {
pub struct DotDotPos(u32);
impl DotDotPos {
- // Panics if n >= u32::MAX.
+ /// Panics if n >= u32::MAX.
pub fn new(n: Option<usize>) -> Self {
match n {
Some(n) => {
@@ -1510,9 +1532,9 @@ pub enum AsyncGeneratorKind {
impl fmt::Display for AsyncGeneratorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(match self {
- AsyncGeneratorKind::Block => "`async` block",
- AsyncGeneratorKind::Closure => "`async` closure body",
- AsyncGeneratorKind::Fn => "`async fn` body",
+ AsyncGeneratorKind::Block => "async block",
+ AsyncGeneratorKind::Closure => "async closure body",
+ AsyncGeneratorKind::Fn => "async fn body",
})
}
}
@@ -1613,7 +1635,7 @@ pub enum ArrayLen {
impl ArrayLen {
pub fn hir_id(&self) -> HirId {
match self {
- &ArrayLen::Infer(hir_id, _) | &ArrayLen::Body(AnonConst { hir_id, body: _ }) => hir_id,
+ &ArrayLen::Infer(hir_id, _) | &ArrayLen::Body(AnonConst { hir_id, .. }) => hir_id,
}
}
}
@@ -1625,10 +1647,11 @@ impl ArrayLen {
/// explicit discriminant values for enum variants.
///
/// You can check if this anon const is a default in a const param
-/// `const N: usize = { ... }` with `tcx.hir().opt_const_param_default_param_hir_id(..)`
+/// `const N: usize = { ... }` with `tcx.hir().opt_const_param_default_param_def_id(..)`
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Debug, HashStable_Generic)]
pub struct AnonConst {
pub hir_id: HirId,
+ pub def_id: LocalDefId,
pub body: BodyId,
}
@@ -1677,10 +1700,10 @@ impl Expr<'_> {
}
}
- // Whether this looks like a place expr, without checking for deref
- // adjustments.
- // This will return `true` in some potentially surprising cases such as
- // `CONSTANT.field`.
+ /// Whether this looks like a place expr, without checking for deref
+ /// adjustments.
+ /// This will return `true` in some potentially surprising cases such as
+ /// `CONSTANT.field`.
pub fn is_syntactic_place_expr(&self) -> bool {
self.is_place_expr(|_| true)
}
@@ -1821,7 +1844,7 @@ impl Expr<'_> {
}
}
- // To a first-order approximation, is this a pattern
+ /// To a first-order approximation, is this a pattern?
pub fn is_approximately_pattern(&self) -> bool {
match &self.kind {
ExprKind::Box(_)
@@ -2143,11 +2166,11 @@ impl fmt::Display for LoopIdError {
#[derive(Copy, Clone, Encodable, Debug, HashStable_Generic)]
pub struct Destination {
- // This is `Some(_)` iff there is an explicit user-specified `label
+ /// This is `Some(_)` iff there is an explicit user-specified 'label
pub label: Option<Label>,
- // These errors are caught and then reported during the diagnostics pass in
- // librustc_passes/loops.rs
+ /// These errors are caught and then reported during the diagnostics pass in
+ /// `librustc_passes/loops.rs`
pub target_id: Result<HirId, LoopIdError>,
}
@@ -2318,7 +2341,7 @@ pub enum ImplItemKind<'hir> {
Type(&'hir Ty<'hir>),
}
-// The name of the associated type for `Fn` return types.
+/// The name of the associated type for `Fn` return types.
pub const FN_OUTPUT_NAME: Symbol = sym::Output;
/// Bind a type to an associated type (i.e., `A = Foo`).
@@ -2414,7 +2437,7 @@ impl<'hir> Ty<'hir> {
pub fn peel_refs(&self) -> &Self {
let mut final_ty = self;
while let TyKind::Rptr(_, MutTy { ty, .. }) = &final_ty.kind {
- final_ty = &ty;
+ final_ty = ty;
}
final_ty
}
@@ -2683,6 +2706,8 @@ pub struct FnDecl<'hir> {
pub c_variadic: bool,
/// Does the function have an implicit self?
pub implicit_self: ImplicitSelfKind,
+ /// Is lifetime elision allowed.
+ pub lifetime_elision_allowed: bool,
}
/// Represents what type of implicit self a function has, if any.
@@ -2715,6 +2740,12 @@ pub enum IsAsync {
NotAsync,
}
+impl IsAsync {
+ pub fn is_async(self) -> bool {
+ self == IsAsync::Async
+ }
+}
+
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable, HashStable_Generic)]
pub enum Defaultness {
Default { has_value: bool },
@@ -2796,7 +2827,8 @@ pub struct Variant<'hir> {
/// Name of the variant.
pub ident: Ident,
/// Id of the variant (not the constructor, see `VariantData::ctor_hir_id()`).
- pub id: HirId,
+ pub hir_id: HirId,
+ pub def_id: LocalDefId,
/// Fields and constructor id of the variant.
pub data: VariantData<'hir>,
/// Explicit discriminant (e.g., `Foo = 1`).
@@ -2863,6 +2895,7 @@ pub struct FieldDef<'hir> {
pub vis_span: Span,
pub ident: Ident,
pub hir_id: HirId,
+ pub def_id: LocalDefId,
pub ty: &'hir Ty<'hir>,
}
@@ -2884,11 +2917,11 @@ pub enum VariantData<'hir> {
/// A tuple variant.
///
/// E.g., `Bar(..)` as in `enum Foo { Bar(..) }`.
- Tuple(&'hir [FieldDef<'hir>], HirId),
+ Tuple(&'hir [FieldDef<'hir>], HirId, LocalDefId),
/// A unit variant.
///
/// E.g., `Bar = ..` as in `enum Foo { Bar = .. }`.
- Unit(HirId),
+ Unit(HirId, LocalDefId),
}
impl<'hir> VariantData<'hir> {
@@ -2900,13 +2933,30 @@ impl<'hir> VariantData<'hir> {
}
}
- /// Return the `HirId` of this variant's constructor, if it has one.
- pub fn ctor_hir_id(&self) -> Option<HirId> {
+ pub fn ctor(&self) -> Option<(CtorKind, HirId, LocalDefId)> {
match *self {
- VariantData::Struct(_, _) => None,
- VariantData::Tuple(_, hir_id) | VariantData::Unit(hir_id) => Some(hir_id),
+ VariantData::Tuple(_, hir_id, def_id) => Some((CtorKind::Fn, hir_id, def_id)),
+ VariantData::Unit(hir_id, def_id) => Some((CtorKind::Const, hir_id, def_id)),
+ VariantData::Struct(..) => None,
}
}
+
+ #[inline]
+ pub fn ctor_kind(&self) -> Option<CtorKind> {
+ self.ctor().map(|(kind, ..)| kind)
+ }
+
+ /// Return the `HirId` of this variant's constructor, if it has one.
+ #[inline]
+ pub fn ctor_hir_id(&self) -> Option<HirId> {
+ self.ctor().map(|(_, hir_id, _)| hir_id)
+ }
+
+ /// Return the `LocalDefId` of this variant's constructor, if it has one.
+ #[inline]
+ pub fn ctor_def_id(&self) -> Option<LocalDefId> {
+ self.ctor().map(|(.., def_id)| def_id)
+ }
}
// The bodies for items are stored "out of line", in a separate
@@ -3024,7 +3074,7 @@ pub enum ItemKind<'hir> {
/// or just
///
/// `use foo::bar::baz;` (with `as baz` implicitly on the right).
- Use(&'hir Path<'hir>, UseKind),
+ Use(&'hir UsePath<'hir>, UseKind),
/// A `static` item.
Static(&'hir Ty<'hir>, Mutability, BodyId),
@@ -3217,7 +3267,7 @@ pub enum ForeignItemKind<'hir> {
/// A variable captured by a closure.
#[derive(Debug, Copy, Clone, Encodable, HashStable_Generic)]
pub struct Upvar {
- // First span where it is accessed (there can be multiple).
+ /// First span where it is accessed (there can be multiple).
pub span: Span,
}
@@ -3423,7 +3473,7 @@ impl<'hir> Node<'hir> {
| Node::Variant(Variant { ident, .. })
| Node::Item(Item { ident, .. })
| Node::PathSegment(PathSegment { ident, .. }) => Some(*ident),
- Node::Lifetime(lt) => Some(lt.name.ident()),
+ Node::Lifetime(lt) => Some(lt.ident),
Node::GenericParam(p) => Some(p.name.ident()),
Node::TypeBinding(b) => Some(b.ident),
Node::Param(..)
@@ -3530,7 +3580,7 @@ impl<'hir> Node<'hir> {
/// Get the fields for the tuple-constructor,
/// if this node is a tuple constructor, otherwise None
pub fn tuple_fields(&self) -> Option<&'hir [FieldDef<'hir>]> {
- if let Node::Ctor(&VariantData::Tuple(fields, _)) = self { Some(fields) } else { None }
+ if let Node::Ctor(&VariantData::Tuple(fields, _, _)) = self { Some(fields) } else { None }
}
}
@@ -3546,7 +3596,7 @@ mod size_asserts {
static_assert_size!(FnDecl<'_>, 40);
static_assert_size!(ForeignItem<'_>, 72);
static_assert_size!(ForeignItemKind<'_>, 40);
- static_assert_size!(GenericArg<'_>, 24);
+ static_assert_size!(GenericArg<'_>, 32);
static_assert_size!(GenericBound<'_>, 48);
static_assert_size!(Generics<'_>, 56);
static_assert_size!(Impl<'_>, 80);
@@ -3564,9 +3614,16 @@ mod size_asserts {
static_assert_size!(Res, 12);
static_assert_size!(Stmt<'_>, 32);
static_assert_size!(StmtKind<'_>, 16);
+ // tidy-alphabetical-end
+ // FIXME: move the tidy directive to the end after the next bootstrap bump
+ #[cfg(bootstrap)]
static_assert_size!(TraitItem<'_>, 88);
+ #[cfg(not(bootstrap))]
+ static_assert_size!(TraitItem<'_>, 80);
+ #[cfg(bootstrap)]
static_assert_size!(TraitItemKind<'_>, 48);
+ #[cfg(not(bootstrap))]
+ static_assert_size!(TraitItemKind<'_>, 40);
static_assert_size!(Ty<'_>, 48);
static_assert_size!(TyKind<'_>, 32);
- // tidy-alphabetical-end
}
diff --git a/compiler/rustc_hir/src/hir_id.rs b/compiler/rustc_hir/src/hir_id.rs
index 752f760ea..060f40919 100644
--- a/compiler/rustc_hir/src/hir_id.rs
+++ b/compiler/rustc_hir/src/hir_id.rs
@@ -1,5 +1,5 @@
-use crate::def_id::{DefId, LocalDefId, CRATE_DEF_ID};
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
+use crate::def_id::{DefId, DefIndex, LocalDefId, CRATE_DEF_ID};
+use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd, ToStableHashKey};
use rustc_span::{def_id::DefPathHash, HashStableContext};
use std::fmt;
@@ -22,6 +22,18 @@ impl OwnerId {
}
}
+impl rustc_index::vec::Idx for OwnerId {
+ #[inline]
+ fn new(idx: usize) -> Self {
+ OwnerId { def_id: LocalDefId { local_def_index: DefIndex::from_usize(idx) } }
+ }
+
+ #[inline]
+ fn index(self) -> usize {
+ self.def_id.local_def_index.as_usize()
+ }
+}
+
impl<CTX: HashStableContext> HashStable<CTX> for OwnerId {
#[inline]
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
@@ -104,7 +116,7 @@ impl Ord for HirId {
impl PartialOrd for HirId {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
- Some(self.cmp(&other))
+ Some(self.cmp(other))
}
}
@@ -134,6 +146,10 @@ impl ItemLocalId {
pub const INVALID: ItemLocalId = ItemLocalId::MAX;
}
+// Safety: Ord is implement as just comparing the LocalItemId's numerical
+// values and these are not changed by (de-)serialization.
+unsafe impl StableOrd for ItemLocalId {}
+
/// The `HirId` corresponding to `CRATE_NODE_ID` and `CRATE_DEF_ID`.
pub const CRATE_HIR_ID: HirId =
HirId { owner: OwnerId { def_id: CRATE_DEF_ID }, local_id: ItemLocalId::from_u32(0) };
diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs
index be77e6fd3..938ace2c7 100644
--- a/compiler/rustc_hir/src/intravisit.rs
+++ b/compiler/rustc_hir/src/intravisit.rs
@@ -358,13 +358,16 @@ pub trait Visitor<'v>: Sized {
fn visit_where_predicate(&mut self, predicate: &'v WherePredicate<'v>) {
walk_where_predicate(self, predicate)
}
+ fn visit_fn_ret_ty(&mut self, ret_ty: &'v FnRetTy<'v>) {
+ walk_fn_ret_ty(self, ret_ty)
+ }
fn visit_fn_decl(&mut self, fd: &'v FnDecl<'v>) {
walk_fn_decl(self, fd)
}
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl<'v>, b: BodyId, _: Span, id: HirId) {
walk_fn(self, fk, fd, b, id)
}
- fn visit_use(&mut self, path: &'v Path<'v>, hir_id: HirId) {
+ fn visit_use(&mut self, path: &'v UsePath<'v>, hir_id: HirId) {
walk_use(self, path, hir_id)
}
fn visit_trait_item(&mut self, ti: &'v TraitItem<'v>) {
@@ -410,12 +413,7 @@ pub trait Visitor<'v>: Sized {
walk_inf(self, inf);
}
fn visit_generic_arg(&mut self, generic_arg: &'v GenericArg<'v>) {
- match generic_arg {
- GenericArg::Lifetime(lt) => self.visit_lifetime(lt),
- GenericArg::Type(ty) => self.visit_ty(ty),
- GenericArg::Const(ct) => self.visit_anon_const(&ct.value),
- GenericArg::Infer(inf) => self.visit_infer(inf),
- }
+ walk_generic_arg(self, generic_arg);
}
fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
walk_lifetime(self, lifetime)
@@ -424,7 +422,7 @@ pub trait Visitor<'v>: Sized {
fn visit_qpath(&mut self, qpath: &'v QPath<'v>, id: HirId, _span: Span) {
walk_qpath(self, qpath, id)
}
- fn visit_path(&mut self, path: &'v Path<'v>, _id: HirId) {
+ fn visit_path(&mut self, path: &Path<'v>, _id: HirId) {
walk_path(self, path)
}
fn visit_path_segment(&mut self, path_segment: &'v PathSegment<'v>) {
@@ -448,66 +446,9 @@ pub trait Visitor<'v>: Sized {
}
}
-pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod<'v>, mod_hir_id: HirId) {
- visitor.visit_id(mod_hir_id);
- for &item_id in module.item_ids {
- visitor.visit_nested_item(item_id);
- }
-}
-
-pub fn walk_body<'v, V: Visitor<'v>>(visitor: &mut V, body: &'v Body<'v>) {
- walk_list!(visitor, visit_param, body.params);
- visitor.visit_expr(&body.value);
-}
-
-pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local<'v>) {
- // Intentionally visiting the expr first - the initialization expr
- // dominates the local's definition.
- walk_list!(visitor, visit_expr, &local.init);
- visitor.visit_id(local.hir_id);
- visitor.visit_pat(&local.pat);
- if let Some(els) = local.els {
- visitor.visit_block(els);
- }
- walk_list!(visitor, visit_ty, &local.ty);
-}
-
-pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, ident: Ident) {
- visitor.visit_name(ident.name);
-}
-
-pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
- visitor.visit_ident(label.ident);
-}
-
-pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
- visitor.visit_id(lifetime.hir_id);
- match lifetime.name {
- LifetimeName::Param(_, ParamName::Plain(ident)) => {
- visitor.visit_ident(ident);
- }
- LifetimeName::Param(_, ParamName::Fresh)
- | LifetimeName::Param(_, ParamName::Error)
- | LifetimeName::Static
- | LifetimeName::Error
- | LifetimeName::ImplicitObjectLifetimeDefault
- | LifetimeName::Infer => {}
- }
-}
-
-pub fn walk_poly_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v PolyTraitRef<'v>) {
- walk_list!(visitor, visit_generic_param, trait_ref.bound_generic_params);
- visitor.visit_trait_ref(&trait_ref.trait_ref);
-}
-
-pub fn walk_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v TraitRef<'v>) {
- visitor.visit_id(trait_ref.hir_ref_id);
- visitor.visit_path(&trait_ref.path, trait_ref.hir_ref_id)
-}
-
pub fn walk_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Param<'v>) {
visitor.visit_id(param.hir_id);
- visitor.visit_pat(&param.pat);
+ visitor.visit_pat(param.pat);
}
pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) {
@@ -529,7 +470,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) {
}
ItemKind::Fn(ref sig, ref generics, body_id) => visitor.visit_fn(
FnKind::ItemFn(item.ident, generics, sig.header),
- &sig.decl,
+ sig.decl,
body_id,
item.span,
item.hir_id(),
@@ -601,142 +542,80 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) {
}
}
-pub fn walk_inline_asm<'v, V: Visitor<'v>>(visitor: &mut V, asm: &'v InlineAsm<'v>, id: HirId) {
- for (op, op_sp) in asm.operands {
- match op {
- InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => {
- visitor.visit_expr(expr)
- }
- InlineAsmOperand::Out { expr, .. } => {
- if let Some(expr) = expr {
- visitor.visit_expr(expr);
- }
- }
- InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
- visitor.visit_expr(in_expr);
- if let Some(out_expr) = out_expr {
- visitor.visit_expr(out_expr);
- }
- }
- InlineAsmOperand::Const { anon_const, .. }
- | InlineAsmOperand::SymFn { anon_const, .. } => visitor.visit_anon_const(anon_const),
- InlineAsmOperand::SymStatic { path, .. } => visitor.visit_qpath(path, id, *op_sp),
- }
- }
-}
-
-pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>, hir_id: HirId) {
- visitor.visit_id(hir_id);
- visitor.visit_path(path, hir_id);
+pub fn walk_body<'v, V: Visitor<'v>>(visitor: &mut V, body: &'v Body<'v>) {
+ walk_list!(visitor, visit_param, body.params);
+ visitor.visit_expr(body.value);
}
-pub fn walk_enum_def<'v, V: Visitor<'v>>(
- visitor: &mut V,
- enum_definition: &'v EnumDef<'v>,
- item_id: HirId,
-) {
- visitor.visit_id(item_id);
- walk_list!(visitor, visit_variant, enum_definition.variants);
+pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, ident: Ident) {
+ visitor.visit_name(ident.name);
}
-pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, variant: &'v Variant<'v>) {
- visitor.visit_ident(variant.ident);
- visitor.visit_id(variant.id);
- visitor.visit_variant_data(&variant.data);
- walk_list!(visitor, visit_anon_const, &variant.disr_expr);
+pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod<'v>, mod_hir_id: HirId) {
+ visitor.visit_id(mod_hir_id);
+ for &item_id in module.item_ids {
+ visitor.visit_nested_item(item_id);
+ }
}
-pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty<'v>) {
- visitor.visit_id(typ.hir_id);
+pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem<'v>) {
+ visitor.visit_id(foreign_item.hir_id());
+ visitor.visit_ident(foreign_item.ident);
- match typ.kind {
- TyKind::Slice(ref ty) => visitor.visit_ty(ty),
- TyKind::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty),
- TyKind::Rptr(ref lifetime, ref mutable_type) => {
- visitor.visit_lifetime(lifetime);
- visitor.visit_ty(&mutable_type.ty)
- }
- TyKind::Never => {}
- TyKind::Tup(tuple_element_types) => {
- walk_list!(visitor, visit_ty, tuple_element_types);
- }
- TyKind::BareFn(ref function_declaration) => {
- walk_list!(visitor, visit_generic_param, function_declaration.generic_params);
- visitor.visit_fn_decl(&function_declaration.decl);
- }
- TyKind::Path(ref qpath) => {
- visitor.visit_qpath(qpath, typ.hir_id, typ.span);
- }
- TyKind::OpaqueDef(item_id, lifetimes, _in_trait) => {
- visitor.visit_nested_item(item_id);
- walk_list!(visitor, visit_generic_arg, lifetimes);
- }
- TyKind::Array(ref ty, ref length) => {
- visitor.visit_ty(ty);
- visitor.visit_array_length(length)
- }
- TyKind::TraitObject(bounds, ref lifetime, _syntax) => {
- for bound in bounds {
- visitor.visit_poly_trait_ref(bound);
+ match foreign_item.kind {
+ ForeignItemKind::Fn(ref function_declaration, param_names, ref generics) => {
+ visitor.visit_generics(generics);
+ visitor.visit_fn_decl(function_declaration);
+ for &param_name in param_names {
+ visitor.visit_ident(param_name);
}
- visitor.visit_lifetime(lifetime);
}
- TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression),
- TyKind::Infer | TyKind::Err => {}
+ ForeignItemKind::Static(ref typ, _) => visitor.visit_ty(typ),
+ ForeignItemKind::Type => (),
}
}
-pub fn walk_inf<'v, V: Visitor<'v>>(visitor: &mut V, inf: &'v InferArg) {
- visitor.visit_id(inf.hir_id);
-}
-
-pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath<'v>, id: HirId) {
- match *qpath {
- QPath::Resolved(ref maybe_qself, ref path) => {
- walk_list!(visitor, visit_ty, maybe_qself);
- visitor.visit_path(path, id)
- }
- QPath::TypeRelative(ref qself, ref segment) => {
- visitor.visit_ty(qself);
- visitor.visit_path_segment(segment);
- }
- QPath::LangItem(..) => {}
+pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local<'v>) {
+ // Intentionally visiting the expr first - the initialization expr
+ // dominates the local's definition.
+ walk_list!(visitor, visit_expr, &local.init);
+ visitor.visit_id(local.hir_id);
+ visitor.visit_pat(local.pat);
+ if let Some(els) = local.els {
+ visitor.visit_block(els);
}
+ walk_list!(visitor, visit_ty, &local.ty);
}
-pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>) {
- for segment in path.segments {
- visitor.visit_path_segment(segment);
- }
+pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block<'v>) {
+ visitor.visit_id(block.hir_id);
+ walk_list!(visitor, visit_stmt, block.stmts);
+ walk_list!(visitor, visit_expr, &block.expr);
}
-pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, segment: &'v PathSegment<'v>) {
- visitor.visit_ident(segment.ident);
- visitor.visit_id(segment.hir_id);
- if let Some(ref args) = segment.args {
- visitor.visit_generic_args(args);
+pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt<'v>) {
+ visitor.visit_id(statement.hir_id);
+ match statement.kind {
+ StmtKind::Local(ref local) => visitor.visit_local(local),
+ StmtKind::Item(item) => visitor.visit_nested_item(item),
+ StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => {
+ visitor.visit_expr(expression)
+ }
}
}
-pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V, generic_args: &'v GenericArgs<'v>) {
- walk_list!(visitor, visit_generic_arg, generic_args.args);
- walk_list!(visitor, visit_assoc_type_binding, generic_args.bindings);
-}
-
-pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(
- visitor: &mut V,
- type_binding: &'v TypeBinding<'v>,
-) {
- visitor.visit_id(type_binding.hir_id);
- visitor.visit_ident(type_binding.ident);
- visitor.visit_generic_args(type_binding.gen_args);
- match type_binding.kind {
- TypeBindingKind::Equality { ref term } => match term {
- Term::Ty(ref ty) => visitor.visit_ty(ty),
- Term::Const(ref c) => visitor.visit_anon_const(c),
- },
- TypeBindingKind::Constraint { bounds } => walk_list!(visitor, visit_param_bound, bounds),
+pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm<'v>) {
+ visitor.visit_id(arm.hir_id);
+ visitor.visit_pat(arm.pat);
+ if let Some(ref g) = arm.guard {
+ match g {
+ Guard::If(ref e) => visitor.visit_expr(e),
+ Guard::IfLet(ref l) => {
+ visitor.visit_let_expr(l);
+ }
+ }
}
+ visitor.visit_expr(arm.body);
}
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) {
@@ -781,36 +660,186 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) {
pub fn walk_pat_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v PatField<'v>) {
visitor.visit_id(field.hir_id);
visitor.visit_ident(field.ident);
- visitor.visit_pat(&field.pat)
+ visitor.visit_pat(field.pat)
}
-pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem<'v>) {
- visitor.visit_id(foreign_item.hir_id());
- visitor.visit_ident(foreign_item.ident);
+pub fn walk_array_len<'v, V: Visitor<'v>>(visitor: &mut V, len: &'v ArrayLen) {
+ match len {
+ &ArrayLen::Infer(hir_id, _span) => visitor.visit_id(hir_id),
+ ArrayLen::Body(c) => visitor.visit_anon_const(c),
+ }
+}
- match foreign_item.kind {
- ForeignItemKind::Fn(ref function_declaration, param_names, ref generics) => {
- visitor.visit_generics(generics);
- visitor.visit_fn_decl(function_declaration);
- for &param_name in param_names {
- visitor.visit_ident(param_name);
- }
+pub fn walk_anon_const<'v, V: Visitor<'v>>(visitor: &mut V, constant: &'v AnonConst) {
+ visitor.visit_id(constant.hir_id);
+ visitor.visit_nested_body(constant.body);
+}
+
+pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>) {
+ visitor.visit_id(expression.hir_id);
+ match expression.kind {
+ ExprKind::Box(ref subexpression) => visitor.visit_expr(subexpression),
+ ExprKind::Array(subexpressions) => {
+ walk_list!(visitor, visit_expr, subexpressions);
}
- ForeignItemKind::Static(ref typ, _) => visitor.visit_ty(typ),
- ForeignItemKind::Type => (),
+ ExprKind::ConstBlock(ref anon_const) => visitor.visit_anon_const(anon_const),
+ ExprKind::Repeat(ref element, ref count) => {
+ visitor.visit_expr(element);
+ visitor.visit_array_length(count)
+ }
+ ExprKind::Struct(ref qpath, fields, ref optional_base) => {
+ visitor.visit_qpath(qpath, expression.hir_id, expression.span);
+ walk_list!(visitor, visit_expr_field, fields);
+ walk_list!(visitor, visit_expr, optional_base);
+ }
+ ExprKind::Tup(subexpressions) => {
+ walk_list!(visitor, visit_expr, subexpressions);
+ }
+ ExprKind::Call(ref callee_expression, arguments) => {
+ visitor.visit_expr(callee_expression);
+ walk_list!(visitor, visit_expr, arguments);
+ }
+ ExprKind::MethodCall(ref segment, receiver, arguments, _) => {
+ visitor.visit_path_segment(segment);
+ visitor.visit_expr(receiver);
+ walk_list!(visitor, visit_expr, arguments);
+ }
+ ExprKind::Binary(_, ref left_expression, ref right_expression) => {
+ visitor.visit_expr(left_expression);
+ visitor.visit_expr(right_expression)
+ }
+ ExprKind::AddrOf(_, _, ref subexpression) | ExprKind::Unary(_, ref subexpression) => {
+ visitor.visit_expr(subexpression)
+ }
+ ExprKind::Cast(ref subexpression, ref typ) | ExprKind::Type(ref subexpression, ref typ) => {
+ visitor.visit_expr(subexpression);
+ visitor.visit_ty(typ)
+ }
+ ExprKind::DropTemps(ref subexpression) => {
+ visitor.visit_expr(subexpression);
+ }
+ ExprKind::Let(ref let_expr) => visitor.visit_let_expr(let_expr),
+ ExprKind::If(ref cond, ref then, ref else_opt) => {
+ visitor.visit_expr(cond);
+ visitor.visit_expr(then);
+ walk_list!(visitor, visit_expr, else_opt);
+ }
+ ExprKind::Loop(ref block, ref opt_label, _, _) => {
+ walk_list!(visitor, visit_label, opt_label);
+ visitor.visit_block(block);
+ }
+ ExprKind::Match(ref subexpression, arms, _) => {
+ visitor.visit_expr(subexpression);
+ walk_list!(visitor, visit_arm, arms);
+ }
+ ExprKind::Closure(&Closure {
+ def_id: _,
+ binder: _,
+ bound_generic_params,
+ fn_decl,
+ body,
+ capture_clause: _,
+ fn_decl_span: _,
+ fn_arg_span: _,
+ movability: _,
+ }) => {
+ walk_list!(visitor, visit_generic_param, bound_generic_params);
+ visitor.visit_fn(FnKind::Closure, fn_decl, body, expression.span, expression.hir_id)
+ }
+ ExprKind::Block(ref block, ref opt_label) => {
+ walk_list!(visitor, visit_label, opt_label);
+ visitor.visit_block(block);
+ }
+ ExprKind::Assign(ref lhs, ref rhs, _) => {
+ visitor.visit_expr(rhs);
+ visitor.visit_expr(lhs)
+ }
+ ExprKind::AssignOp(_, ref left_expression, ref right_expression) => {
+ visitor.visit_expr(right_expression);
+ visitor.visit_expr(left_expression);
+ }
+ ExprKind::Field(ref subexpression, ident) => {
+ visitor.visit_expr(subexpression);
+ visitor.visit_ident(ident);
+ }
+ ExprKind::Index(ref main_expression, ref index_expression) => {
+ visitor.visit_expr(main_expression);
+ visitor.visit_expr(index_expression)
+ }
+ ExprKind::Path(ref qpath) => {
+ visitor.visit_qpath(qpath, expression.hir_id, expression.span);
+ }
+ ExprKind::Break(ref destination, ref opt_expr) => {
+ walk_list!(visitor, visit_label, &destination.label);
+ walk_list!(visitor, visit_expr, opt_expr);
+ }
+ ExprKind::Continue(ref destination) => {
+ walk_list!(visitor, visit_label, &destination.label);
+ }
+ ExprKind::Ret(ref optional_expression) => {
+ walk_list!(visitor, visit_expr, optional_expression);
+ }
+ ExprKind::InlineAsm(ref asm) => {
+ visitor.visit_inline_asm(asm, expression.hir_id);
+ }
+ ExprKind::Yield(ref subexpression, _) => {
+ visitor.visit_expr(subexpression);
+ }
+ ExprKind::Lit(_) | ExprKind::Err => {}
}
}
-pub fn walk_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, bound: &'v GenericBound<'v>) {
- match *bound {
- GenericBound::Trait(ref typ, _modifier) => {
- visitor.visit_poly_trait_ref(typ);
+pub fn walk_let_expr<'v, V: Visitor<'v>>(visitor: &mut V, let_expr: &'v Let<'v>) {
+ // match the visit order in walk_local
+ visitor.visit_expr(let_expr.init);
+ visitor.visit_id(let_expr.hir_id);
+ visitor.visit_pat(let_expr.pat);
+ walk_list!(visitor, visit_ty, let_expr.ty);
+}
+
+pub fn walk_expr_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v ExprField<'v>) {
+ visitor.visit_id(field.hir_id);
+ visitor.visit_ident(field.ident);
+ visitor.visit_expr(field.expr)
+}
+
+pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty<'v>) {
+ visitor.visit_id(typ.hir_id);
+
+ match typ.kind {
+ TyKind::Slice(ref ty) => visitor.visit_ty(ty),
+ TyKind::Ptr(ref mutable_type) => visitor.visit_ty(mutable_type.ty),
+ TyKind::Rptr(ref lifetime, ref mutable_type) => {
+ visitor.visit_lifetime(lifetime);
+ visitor.visit_ty(mutable_type.ty)
}
- GenericBound::LangItemTrait(_, _span, hir_id, args) => {
- visitor.visit_id(hir_id);
- visitor.visit_generic_args(args);
+ TyKind::Never => {}
+ TyKind::Tup(tuple_element_types) => {
+ walk_list!(visitor, visit_ty, tuple_element_types);
}
- GenericBound::Outlives(ref lifetime) => visitor.visit_lifetime(lifetime),
+ TyKind::BareFn(ref function_declaration) => {
+ walk_list!(visitor, visit_generic_param, function_declaration.generic_params);
+ visitor.visit_fn_decl(function_declaration.decl);
+ }
+ TyKind::Path(ref qpath) => {
+ visitor.visit_qpath(qpath, typ.hir_id, typ.span);
+ }
+ TyKind::OpaqueDef(item_id, lifetimes, _in_trait) => {
+ visitor.visit_nested_item(item_id);
+ walk_list!(visitor, visit_generic_arg, lifetimes);
+ }
+ TyKind::Array(ref ty, ref length) => {
+ visitor.visit_ty(ty);
+ visitor.visit_array_length(length)
+ }
+ TyKind::TraitObject(bounds, ref lifetime, _syntax) => {
+ for bound in bounds {
+ visitor.visit_poly_trait_ref(bound);
+ }
+ visitor.visit_lifetime(lifetime);
+ }
+ TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression),
+ TyKind::Infer | TyKind::Err => {}
}
}
@@ -875,25 +904,16 @@ pub fn walk_where_predicate<'v, V: Visitor<'v>>(
}
}
-pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FnRetTy<'v>) {
- if let FnRetTy::Return(ref output_ty) = *ret_ty {
- visitor.visit_ty(output_ty)
- }
-}
-
pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl<'v>) {
for ty in function_declaration.inputs {
visitor.visit_ty(ty)
}
- walk_fn_ret_ty(visitor, &function_declaration.output)
+ visitor.visit_fn_ret_ty(&function_declaration.output)
}
-pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'v>) {
- match function_kind {
- FnKind::ItemFn(_, generics, ..) => {
- visitor.visit_generics(generics);
- }
- FnKind::Closure | FnKind::Method(..) => {}
+pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FnRetTy<'v>) {
+ if let FnRetTy::Return(ref output_ty) = *ret_ty {
+ visitor.visit_ty(output_ty)
}
}
@@ -910,13 +930,30 @@ pub fn walk_fn<'v, V: Visitor<'v>>(
visitor.visit_nested_body(body_id)
}
+pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'v>) {
+ match function_kind {
+ FnKind::ItemFn(_, generics, ..) => {
+ visitor.visit_generics(generics);
+ }
+ FnKind::Closure | FnKind::Method(..) => {}
+ }
+}
+
+pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v UsePath<'v>, hir_id: HirId) {
+ visitor.visit_id(hir_id);
+ let UsePath { segments, ref res, span } = *path;
+ for &res in res {
+ visitor.visit_path(&Path { segments, res, span }, hir_id);
+ }
+}
+
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem<'v>) {
// N.B., deliberately force a compilation error if/when new fields are added.
let TraitItem { ident, generics, ref defaultness, ref kind, span, owner_id: _ } = *trait_item;
let hir_id = trait_item.hir_id();
visitor.visit_ident(ident);
- visitor.visit_generics(&generics);
- visitor.visit_defaultness(&defaultness);
+ visitor.visit_generics(generics);
+ visitor.visit_defaultness(defaultness);
match *kind {
TraitItemKind::Const(ref ty, default) => {
visitor.visit_id(hir_id);
@@ -925,13 +962,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai
}
TraitItemKind::Fn(ref sig, TraitFn::Required(param_names)) => {
visitor.visit_id(hir_id);
- visitor.visit_fn_decl(&sig.decl);
+ visitor.visit_fn_decl(sig.decl);
for &param_name in param_names {
visitor.visit_ident(param_name);
}
}
TraitItemKind::Fn(ref sig, TraitFn::Provided(body_id)) => {
- visitor.visit_fn(FnKind::Method(ident, sig), &sig.decl, body_id, span, hir_id);
+ visitor.visit_fn(FnKind::Method(ident, sig), sig.decl, body_id, span, hir_id);
}
TraitItemKind::Type(bounds, ref default) => {
visitor.visit_id(hir_id);
@@ -973,7 +1010,7 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt
ImplItemKind::Fn(ref sig, body_id) => {
visitor.visit_fn(
FnKind::Method(impl_item.ident, sig),
- &sig.decl,
+ sig.decl,
body_id,
impl_item.span,
impl_item.hir_id(),
@@ -1004,6 +1041,29 @@ pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'
visitor.visit_associated_item_kind(kind);
}
+pub fn walk_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v TraitRef<'v>) {
+ visitor.visit_id(trait_ref.hir_ref_id);
+ visitor.visit_path(trait_ref.path, trait_ref.hir_ref_id)
+}
+
+pub fn walk_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, bound: &'v GenericBound<'v>) {
+ match *bound {
+ GenericBound::Trait(ref typ, _modifier) => {
+ visitor.visit_poly_trait_ref(typ);
+ }
+ GenericBound::LangItemTrait(_, _span, hir_id, args) => {
+ visitor.visit_id(hir_id);
+ visitor.visit_generic_args(args);
+ }
+ GenericBound::Outlives(ref lifetime) => visitor.visit_lifetime(lifetime),
+ }
+}
+
+pub fn walk_poly_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v PolyTraitRef<'v>) {
+ walk_list!(visitor, visit_generic_param, trait_ref.bound_generic_params);
+ visitor.visit_trait_ref(&trait_ref.trait_ref);
+}
+
pub fn walk_struct_def<'v, V: Visitor<'v>>(
visitor: &mut V,
struct_definition: &'v VariantData<'v>,
@@ -1015,176 +1075,94 @@ pub fn walk_struct_def<'v, V: Visitor<'v>>(
pub fn walk_field_def<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v FieldDef<'v>) {
visitor.visit_id(field.hir_id);
visitor.visit_ident(field.ident);
- visitor.visit_ty(&field.ty);
+ visitor.visit_ty(field.ty);
}
-pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block<'v>) {
- visitor.visit_id(block.hir_id);
- walk_list!(visitor, visit_stmt, block.stmts);
- walk_list!(visitor, visit_expr, &block.expr);
+pub fn walk_enum_def<'v, V: Visitor<'v>>(
+ visitor: &mut V,
+ enum_definition: &'v EnumDef<'v>,
+ item_id: HirId,
+) {
+ visitor.visit_id(item_id);
+ walk_list!(visitor, visit_variant, enum_definition.variants);
}
-pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt<'v>) {
- visitor.visit_id(statement.hir_id);
- match statement.kind {
- StmtKind::Local(ref local) => visitor.visit_local(local),
- StmtKind::Item(item) => visitor.visit_nested_item(item),
- StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => {
- visitor.visit_expr(expression)
- }
- }
+pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, variant: &'v Variant<'v>) {
+ visitor.visit_ident(variant.ident);
+ visitor.visit_id(variant.hir_id);
+ visitor.visit_variant_data(&variant.data);
+ walk_list!(visitor, visit_anon_const, &variant.disr_expr);
}
-pub fn walk_array_len<'v, V: Visitor<'v>>(visitor: &mut V, len: &'v ArrayLen) {
- match len {
- &ArrayLen::Infer(hir_id, _span) => visitor.visit_id(hir_id),
- ArrayLen::Body(c) => visitor.visit_anon_const(c),
- }
+pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
+ visitor.visit_ident(label.ident);
}
-pub fn walk_anon_const<'v, V: Visitor<'v>>(visitor: &mut V, constant: &'v AnonConst) {
- visitor.visit_id(constant.hir_id);
- visitor.visit_nested_body(constant.body);
+pub fn walk_inf<'v, V: Visitor<'v>>(visitor: &mut V, inf: &'v InferArg) {
+ visitor.visit_id(inf.hir_id);
}
-pub fn walk_let_expr<'v, V: Visitor<'v>>(visitor: &mut V, let_expr: &'v Let<'v>) {
- // match the visit order in walk_local
- visitor.visit_expr(let_expr.init);
- visitor.visit_id(let_expr.hir_id);
- visitor.visit_pat(let_expr.pat);
- walk_list!(visitor, visit_ty, let_expr.ty);
+pub fn walk_generic_arg<'v, V: Visitor<'v>>(visitor: &mut V, generic_arg: &'v GenericArg<'v>) {
+ match generic_arg {
+ GenericArg::Lifetime(lt) => visitor.visit_lifetime(lt),
+ GenericArg::Type(ty) => visitor.visit_ty(ty),
+ GenericArg::Const(ct) => visitor.visit_anon_const(&ct.value),
+ GenericArg::Infer(inf) => visitor.visit_infer(inf),
+ }
}
-pub fn walk_expr_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v ExprField<'v>) {
- visitor.visit_id(field.hir_id);
- visitor.visit_ident(field.ident);
- visitor.visit_expr(&field.expr)
+pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
+ visitor.visit_id(lifetime.hir_id);
+ visitor.visit_ident(lifetime.ident);
}
-pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>) {
- visitor.visit_id(expression.hir_id);
- match expression.kind {
- ExprKind::Box(ref subexpression) => visitor.visit_expr(subexpression),
- ExprKind::Array(subexpressions) => {
- walk_list!(visitor, visit_expr, subexpressions);
- }
- ExprKind::ConstBlock(ref anon_const) => visitor.visit_anon_const(anon_const),
- ExprKind::Repeat(ref element, ref count) => {
- visitor.visit_expr(element);
- visitor.visit_array_length(count)
- }
- ExprKind::Struct(ref qpath, fields, ref optional_base) => {
- visitor.visit_qpath(qpath, expression.hir_id, expression.span);
- walk_list!(visitor, visit_expr_field, fields);
- walk_list!(visitor, visit_expr, optional_base);
- }
- ExprKind::Tup(subexpressions) => {
- walk_list!(visitor, visit_expr, subexpressions);
- }
- ExprKind::Call(ref callee_expression, arguments) => {
- visitor.visit_expr(callee_expression);
- walk_list!(visitor, visit_expr, arguments);
+pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath<'v>, id: HirId) {
+ match *qpath {
+ QPath::Resolved(ref maybe_qself, ref path) => {
+ walk_list!(visitor, visit_ty, maybe_qself);
+ visitor.visit_path(path, id)
}
- ExprKind::MethodCall(ref segment, receiver, arguments, _) => {
+ QPath::TypeRelative(ref qself, ref segment) => {
+ visitor.visit_ty(qself);
visitor.visit_path_segment(segment);
- visitor.visit_expr(receiver);
- walk_list!(visitor, visit_expr, arguments);
- }
- ExprKind::Binary(_, ref left_expression, ref right_expression) => {
- visitor.visit_expr(left_expression);
- visitor.visit_expr(right_expression)
- }
- ExprKind::AddrOf(_, _, ref subexpression) | ExprKind::Unary(_, ref subexpression) => {
- visitor.visit_expr(subexpression)
- }
- ExprKind::Cast(ref subexpression, ref typ) | ExprKind::Type(ref subexpression, ref typ) => {
- visitor.visit_expr(subexpression);
- visitor.visit_ty(typ)
- }
- ExprKind::DropTemps(ref subexpression) => {
- visitor.visit_expr(subexpression);
- }
- ExprKind::Let(ref let_expr) => visitor.visit_let_expr(let_expr),
- ExprKind::If(ref cond, ref then, ref else_opt) => {
- visitor.visit_expr(cond);
- visitor.visit_expr(then);
- walk_list!(visitor, visit_expr, else_opt);
- }
- ExprKind::Loop(ref block, ref opt_label, _, _) => {
- walk_list!(visitor, visit_label, opt_label);
- visitor.visit_block(block);
- }
- ExprKind::Match(ref subexpression, arms, _) => {
- visitor.visit_expr(subexpression);
- walk_list!(visitor, visit_arm, arms);
- }
- ExprKind::Closure(&Closure {
- binder: _,
- bound_generic_params,
- fn_decl,
- body,
- capture_clause: _,
- fn_decl_span: _,
- movability: _,
- }) => {
- walk_list!(visitor, visit_generic_param, bound_generic_params);
- visitor.visit_fn(FnKind::Closure, fn_decl, body, expression.span, expression.hir_id)
- }
- ExprKind::Block(ref block, ref opt_label) => {
- walk_list!(visitor, visit_label, opt_label);
- visitor.visit_block(block);
- }
- ExprKind::Assign(ref lhs, ref rhs, _) => {
- visitor.visit_expr(rhs);
- visitor.visit_expr(lhs)
- }
- ExprKind::AssignOp(_, ref left_expression, ref right_expression) => {
- visitor.visit_expr(right_expression);
- visitor.visit_expr(left_expression);
- }
- ExprKind::Field(ref subexpression, ident) => {
- visitor.visit_expr(subexpression);
- visitor.visit_ident(ident);
- }
- ExprKind::Index(ref main_expression, ref index_expression) => {
- visitor.visit_expr(main_expression);
- visitor.visit_expr(index_expression)
- }
- ExprKind::Path(ref qpath) => {
- visitor.visit_qpath(qpath, expression.hir_id, expression.span);
- }
- ExprKind::Break(ref destination, ref opt_expr) => {
- walk_list!(visitor, visit_label, &destination.label);
- walk_list!(visitor, visit_expr, opt_expr);
- }
- ExprKind::Continue(ref destination) => {
- walk_list!(visitor, visit_label, &destination.label);
- }
- ExprKind::Ret(ref optional_expression) => {
- walk_list!(visitor, visit_expr, optional_expression);
}
- ExprKind::InlineAsm(ref asm) => {
- visitor.visit_inline_asm(asm, expression.hir_id);
- }
- ExprKind::Yield(ref subexpression, _) => {
- visitor.visit_expr(subexpression);
- }
- ExprKind::Lit(_) | ExprKind::Err => {}
+ QPath::LangItem(..) => {}
}
}
-pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm<'v>) {
- visitor.visit_id(arm.hir_id);
- visitor.visit_pat(&arm.pat);
- if let Some(ref g) = arm.guard {
- match g {
- Guard::If(ref e) => visitor.visit_expr(e),
- Guard::IfLet(ref l) => {
- visitor.visit_let_expr(l);
- }
- }
+pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &Path<'v>) {
+ for segment in path.segments {
+ visitor.visit_path_segment(segment);
+ }
+}
+
+pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, segment: &'v PathSegment<'v>) {
+ visitor.visit_ident(segment.ident);
+ visitor.visit_id(segment.hir_id);
+ if let Some(ref args) = segment.args {
+ visitor.visit_generic_args(args);
+ }
+}
+
+pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V, generic_args: &'v GenericArgs<'v>) {
+ walk_list!(visitor, visit_generic_arg, generic_args.args);
+ walk_list!(visitor, visit_assoc_type_binding, generic_args.bindings);
+}
+
+pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(
+ visitor: &mut V,
+ type_binding: &'v TypeBinding<'v>,
+) {
+ visitor.visit_id(type_binding.hir_id);
+ visitor.visit_ident(type_binding.ident);
+ visitor.visit_generic_args(type_binding.gen_args);
+ match type_binding.kind {
+ TypeBindingKind::Equality { ref term } => match term {
+ Term::Ty(ref ty) => visitor.visit_ty(ty),
+ Term::Const(ref c) => visitor.visit_anon_const(c),
+ },
+ TypeBindingKind::Constraint { bounds } => walk_list!(visitor, visit_param_bound, bounds),
}
- visitor.visit_expr(&arm.body);
}
pub fn walk_associated_item_kind<'v, V: Visitor<'v>>(_: &mut V, _: &'v AssocItemKind) {
@@ -1198,3 +1176,27 @@ pub fn walk_defaultness<'v, V: Visitor<'v>>(_: &mut V, _: &'v Defaultness) {
// the right thing to do, should content be added in the future,
// would be to walk it.
}
+
+pub fn walk_inline_asm<'v, V: Visitor<'v>>(visitor: &mut V, asm: &'v InlineAsm<'v>, id: HirId) {
+ for (op, op_sp) in asm.operands {
+ match op {
+ InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => {
+ visitor.visit_expr(expr)
+ }
+ InlineAsmOperand::Out { expr, .. } => {
+ if let Some(expr) = expr {
+ visitor.visit_expr(expr);
+ }
+ }
+ InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
+ visitor.visit_expr(in_expr);
+ if let Some(out_expr) = out_expr {
+ visitor.visit_expr(out_expr);
+ }
+ }
+ InlineAsmOperand::Const { anon_const, .. }
+ | InlineAsmOperand::SymFn { anon_const, .. } => visitor.visit_anon_const(anon_const),
+ InlineAsmOperand::SymStatic { path, .. } => visitor.visit_qpath(path, id, *op_sp),
+ }
+ }
+}
diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs
index ca615a491..038509031 100644
--- a/compiler/rustc_hir/src/lang_items.rs
+++ b/compiler/rustc_hir/src/lang_items.rs
@@ -12,35 +12,56 @@ use crate::errors::LangItemError;
use crate::{MethodKind, Target};
use rustc_ast as ast;
-use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_macros::HashStable_Generic;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
-use std::sync::LazyLock;
-
-pub enum LangItemGroup {
- Op,
- Fn,
+/// All of the language items, defined or not.
+/// Defined lang items can come from the current crate or its dependencies.
+#[derive(HashStable_Generic, Debug)]
+pub struct LanguageItems {
+ /// Mappings from lang items to their possibly found [`DefId`]s.
+ /// The index corresponds to the order in [`LangItem`].
+ items: [Option<DefId>; std::mem::variant_count::<LangItem>()],
+ /// Lang items that were not found during collection.
+ pub missing: Vec<LangItem>,
}
-const NUM_GROUPS: usize = 2;
+impl LanguageItems {
+ /// Construct an empty collection of lang items and no missing ones.
+ pub fn new() -> Self {
+ Self { items: [None; std::mem::variant_count::<LangItem>()], missing: Vec::new() }
+ }
+
+ pub fn get(&self, item: LangItem) -> Option<DefId> {
+ self.items[item as usize]
+ }
+
+ pub fn set(&mut self, item: LangItem, def_id: DefId) {
+ self.items[item as usize] = Some(def_id);
+ }
+
+ /// Requires that a given `LangItem` was bound and returns the corresponding `DefId`.
+ /// If it wasn't bound, e.g. due to a missing `#[lang = "<it.name()>"]`,
+ /// returns an error encapsulating the `LangItem`.
+ pub fn require(&self, it: LangItem) -> Result<DefId, LangItemError> {
+ self.get(it).ok_or_else(|| LangItemError(it))
+ }
-macro_rules! expand_group {
- () => {
- None
- };
- ($group:expr) => {
- Some($group)
- };
+ pub fn iter<'a>(&'a self) -> impl Iterator<Item = (LangItem, DefId)> + 'a {
+ self.items
+ .iter()
+ .enumerate()
+ .filter_map(|(i, id)| id.map(|id| (LangItem::from_u32(i as u32).unwrap(), id)))
+ }
}
// The actual lang items defined come at the end of this file in one handy table.
// So you probably just want to nip down to the end.
macro_rules! language_item_table {
(
- $( $(#[$attr:meta])* $variant:ident $($group:expr)?, $module:ident :: $name:ident, $method:ident, $target:expr, $generics:expr; )*
+ $( $(#[$attr:meta])* $variant:ident, $module:ident :: $name:ident, $method:ident, $target:expr, $generics:expr; )*
) => {
enum_from_u32! {
@@ -66,66 +87,36 @@ macro_rules! language_item_table {
}
}
- /// The [group](LangItemGroup) that this lang item belongs to,
- /// or `None` if it doesn't belong to a group.
- pub fn group(self) -> Option<LangItemGroup> {
- use LangItemGroup::*;
- match self {
- $( LangItem::$variant => expand_group!($($group)*), )*
+ /// Opposite of [`LangItem::name`]
+ pub fn from_name(name: Symbol) -> Option<Self> {
+ match name {
+ $( $module::$name => Some(LangItem::$variant), )*
+ _ => None,
}
}
- pub fn required_generics(&self) -> GenericRequirement {
+ /// Returns the name of the `LangItem` enum variant.
+ // This method is used by Clippy for internal lints.
+ pub fn variant_name(self) -> &'static str {
match self {
- $( LangItem::$variant => $generics, )*
+ $( LangItem::$variant => stringify!($variant), )*
}
}
- }
-
- /// All of the language items, defined or not.
- /// Defined lang items can come from the current crate or its dependencies.
- #[derive(HashStable_Generic, Debug)]
- pub struct LanguageItems {
- /// Mappings from lang items to their possibly found [`DefId`]s.
- /// The index corresponds to the order in [`LangItem`].
- pub items: Vec<Option<DefId>>,
- /// Lang items that were not found during collection.
- pub missing: Vec<LangItem>,
- /// Mapping from [`LangItemGroup`] discriminants to all
- /// [`DefId`]s of lang items in that group.
- pub groups: [Vec<DefId>; NUM_GROUPS],
- }
- impl LanguageItems {
- /// Construct an empty collection of lang items and no missing ones.
- pub fn new() -> Self {
- fn init_none(_: LangItem) -> Option<DefId> { None }
- const EMPTY: Vec<DefId> = Vec::new();
-
- Self {
- items: vec![$(init_none(LangItem::$variant)),*],
- missing: Vec::new(),
- groups: [EMPTY; NUM_GROUPS],
+ pub fn target(self) -> Target {
+ match self {
+ $( LangItem::$variant => $target, )*
}
}
- /// Returns the mappings to the possibly found `DefId`s for each lang item.
- pub fn items(&self) -> &[Option<DefId>] {
- &*self.items
- }
-
- /// Requires that a given `LangItem` was bound and returns the corresponding `DefId`.
- /// If it wasn't bound, e.g. due to a missing `#[lang = "<it.name()>"]`,
- /// returns an error encapsulating the `LangItem`.
- pub fn require(&self, it: LangItem) -> Result<DefId, LangItemError> {
- self.items[it as usize].ok_or_else(|| LangItemError(it))
- }
-
- /// Returns the [`DefId`]s of all lang items in a group.
- pub fn group(&self, group: LangItemGroup) -> &[DefId] {
- self.groups[group as usize].as_ref()
+ pub fn required_generics(&self) -> GenericRequirement {
+ match self {
+ $( LangItem::$variant => $generics, )*
+ }
}
+ }
+ impl LanguageItems {
$(
#[doc = concat!("Returns the [`DefId`] of the `", stringify!($name), "` lang item if it is defined.")]
pub fn $method(&self) -> Option<DefId> {
@@ -133,15 +124,6 @@ macro_rules! language_item_table {
}
)*
}
-
- /// A mapping from the name of the lang item to its order and the form it must be of.
- pub static ITEM_REFS: LazyLock<FxIndexMap<Symbol, (usize, Target)>> = LazyLock::new(|| {
- let mut item_refs = FxIndexMap::default();
- $( item_refs.insert($module::$name, (LangItem::$variant as usize, $target)); )*
- item_refs
- });
-
-// End of the macro
}
}
@@ -152,14 +134,12 @@ impl<CTX> HashStable<CTX> for LangItem {
}
/// Extracts the first `lang = "$name"` out of a list of attributes.
-/// The attributes `#[panic_handler]` and `#[alloc_error_handler]`
-/// are also extracted out when found.
+/// The `#[panic_handler]` attribute is also extracted out when found.
pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
attrs.iter().find_map(|attr| {
Some(match attr {
_ if attr.has_name(sym::lang) => (attr.value_str()?, attr.span),
_ if attr.has_name(sym::panic_handler) => (sym::panic_impl, attr.span),
- _ if attr.has_name(sym::alloc_error_handler) => (sym::oom, attr.span),
_ => return None,
})
})
@@ -196,30 +176,30 @@ language_item_table! {
TransmuteOpts, sym::transmute_opts, transmute_opts, Target::Struct, GenericRequirement::Exact(0);
TransmuteTrait, sym::transmute_trait, transmute_trait, Target::Trait, GenericRequirement::Exact(3);
- Add(Op), sym::add, add_trait, Target::Trait, GenericRequirement::Exact(1);
- Sub(Op), sym::sub, sub_trait, Target::Trait, GenericRequirement::Exact(1);
- Mul(Op), sym::mul, mul_trait, Target::Trait, GenericRequirement::Exact(1);
- Div(Op), sym::div, div_trait, Target::Trait, GenericRequirement::Exact(1);
- Rem(Op), sym::rem, rem_trait, Target::Trait, GenericRequirement::Exact(1);
- Neg(Op), sym::neg, neg_trait, Target::Trait, GenericRequirement::Exact(0);
- Not(Op), sym::not, not_trait, Target::Trait, GenericRequirement::Exact(0);
- BitXor(Op), sym::bitxor, bitxor_trait, Target::Trait, GenericRequirement::Exact(1);
- BitAnd(Op), sym::bitand, bitand_trait, Target::Trait, GenericRequirement::Exact(1);
- BitOr(Op), sym::bitor, bitor_trait, Target::Trait, GenericRequirement::Exact(1);
- Shl(Op), sym::shl, shl_trait, Target::Trait, GenericRequirement::Exact(1);
- Shr(Op), sym::shr, shr_trait, Target::Trait, GenericRequirement::Exact(1);
- AddAssign(Op), sym::add_assign, add_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- SubAssign(Op), sym::sub_assign, sub_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- MulAssign(Op), sym::mul_assign, mul_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- DivAssign(Op), sym::div_assign, div_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- RemAssign(Op), sym::rem_assign, rem_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- BitXorAssign(Op), sym::bitxor_assign, bitxor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- BitAndAssign(Op), sym::bitand_assign, bitand_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- BitOrAssign(Op), sym::bitor_assign, bitor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- ShlAssign(Op), sym::shl_assign, shl_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- ShrAssign(Op), sym::shr_assign, shr_assign_trait, Target::Trait, GenericRequirement::Exact(1);
- Index(Op), sym::index, index_trait, Target::Trait, GenericRequirement::Exact(1);
- IndexMut(Op), sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1);
+ Add, sym::add, add_trait, Target::Trait, GenericRequirement::Exact(1);
+ Sub, sym::sub, sub_trait, Target::Trait, GenericRequirement::Exact(1);
+ Mul, sym::mul, mul_trait, Target::Trait, GenericRequirement::Exact(1);
+ Div, sym::div, div_trait, Target::Trait, GenericRequirement::Exact(1);
+ Rem, sym::rem, rem_trait, Target::Trait, GenericRequirement::Exact(1);
+ Neg, sym::neg, neg_trait, Target::Trait, GenericRequirement::Exact(0);
+ Not, sym::not, not_trait, Target::Trait, GenericRequirement::Exact(0);
+ BitXor, sym::bitxor, bitxor_trait, Target::Trait, GenericRequirement::Exact(1);
+ BitAnd, sym::bitand, bitand_trait, Target::Trait, GenericRequirement::Exact(1);
+ BitOr, sym::bitor, bitor_trait, Target::Trait, GenericRequirement::Exact(1);
+ Shl, sym::shl, shl_trait, Target::Trait, GenericRequirement::Exact(1);
+ Shr, sym::shr, shr_trait, Target::Trait, GenericRequirement::Exact(1);
+ AddAssign, sym::add_assign, add_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ SubAssign, sym::sub_assign, sub_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ MulAssign, sym::mul_assign, mul_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ DivAssign, sym::div_assign, div_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ RemAssign, sym::rem_assign, rem_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ BitXorAssign, sym::bitxor_assign, bitxor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ BitAndAssign, sym::bitand_assign, bitand_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ BitOrAssign, sym::bitor_assign, bitor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ ShlAssign, sym::shl_assign, shl_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ ShrAssign, sym::shr_assign, shr_assign_trait, Target::Trait, GenericRequirement::Exact(1);
+ Index, sym::index, index_trait, Target::Trait, GenericRequirement::Exact(1);
+ IndexMut, sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1);
UnsafeCell, sym::unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None;
VaList, sym::va_list, va_list, Target::Struct, GenericRequirement::None;
@@ -229,9 +209,9 @@ language_item_table! {
DerefTarget, sym::deref_target, deref_target, Target::AssocTy, GenericRequirement::None;
Receiver, sym::receiver, receiver_trait, Target::Trait, GenericRequirement::None;
- Fn(Fn), kw::Fn, fn_trait, Target::Trait, GenericRequirement::Exact(1);
- FnMut(Fn), sym::fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
- FnOnce(Fn), sym::fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
+ Fn, kw::Fn, fn_trait, Target::Trait, GenericRequirement::Exact(1);
+ FnMut, sym::fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
+ FnOnce, sym::fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
FnOnceOutput, sym::fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None;
@@ -241,8 +221,8 @@ language_item_table! {
Unpin, sym::unpin, unpin_trait, Target::Trait, GenericRequirement::None;
Pin, sym::pin, pin_type, Target::Struct, GenericRequirement::None;
- PartialEq(Op), sym::eq, eq_trait, Target::Trait, GenericRequirement::Exact(1);
- PartialOrd(Op), sym::partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1);
+ PartialEq, sym::eq, eq_trait, Target::Trait, GenericRequirement::Exact(1);
+ PartialOrd, sym::partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1);
// A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and
// various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays.
@@ -266,7 +246,6 @@ language_item_table! {
ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, Target::Fn, GenericRequirement::None;
BoxFree, sym::box_free, box_free_fn, Target::Fn, GenericRequirement::Minimum(1);
DropInPlace, sym::drop_in_place, drop_in_place_fn, Target::Fn, GenericRequirement::Minimum(1);
- Oom, sym::oom, oom, Target::Fn, GenericRequirement::None;
AllocLayout, sym::alloc_layout, alloc_layout, Target::Struct, GenericRequirement::None;
Start, sym::start, start_fn, Target::Fn, GenericRequirement::Exact(1);
@@ -299,10 +278,16 @@ language_item_table! {
TryTraitBranch, sym::branch, branch_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
TryTraitFromYeet, sym::from_yeet, from_yeet_fn, Target::Fn, GenericRequirement::None;
+ PointerSized, sym::pointer_sized, pointer_sized, Target::Trait, GenericRequirement::Exact(0);
+
+ Poll, sym::Poll, poll, Target::Enum, GenericRequirement::None;
PollReady, sym::Ready, poll_ready_variant, Target::Variant, GenericRequirement::None;
PollPending, sym::Pending, poll_pending_variant, Target::Variant, GenericRequirement::None;
- FromGenerator, sym::from_generator, from_generator_fn, Target::Fn, GenericRequirement::None;
+ // FIXME(swatinem): the following lang items are used for async lowering and
+ // should become obsolete eventually.
+ ResumeTy, sym::ResumeTy, resume_ty, Target::Struct, GenericRequirement::None;
+ IdentityFuture, sym::identity_future, identity_future_fn, Target::Fn, GenericRequirement::None;
GetContext, sym::get_context, get_context_fn, Target::Fn, GenericRequirement::None;
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
@@ -331,6 +316,8 @@ language_item_table! {
Range, sym::Range, range_struct, Target::Struct, GenericRequirement::None;
RangeToInclusive, sym::RangeToInclusive, range_to_inclusive_struct, Target::Struct, GenericRequirement::None;
RangeTo, sym::RangeTo, range_to_struct, Target::Struct, GenericRequirement::None;
+
+ String, sym::String, string, Target::Struct, GenericRequirement::None;
}
pub enum GenericRequirement {
@@ -338,3 +325,34 @@ pub enum GenericRequirement {
Minimum(usize),
Exact(usize),
}
+
+pub static FN_TRAITS: &'static [LangItem] = &[LangItem::Fn, LangItem::FnMut, LangItem::FnOnce];
+
+pub static OPERATORS: &'static [LangItem] = &[
+ LangItem::Add,
+ LangItem::Sub,
+ LangItem::Mul,
+ LangItem::Div,
+ LangItem::Rem,
+ LangItem::Neg,
+ LangItem::Not,
+ LangItem::BitXor,
+ LangItem::BitAnd,
+ LangItem::BitOr,
+ LangItem::Shl,
+ LangItem::Shr,
+ LangItem::AddAssign,
+ LangItem::SubAssign,
+ LangItem::MulAssign,
+ LangItem::DivAssign,
+ LangItem::RemAssign,
+ LangItem::BitXorAssign,
+ LangItem::BitAndAssign,
+ LangItem::BitOrAssign,
+ LangItem::ShlAssign,
+ LangItem::ShrAssign,
+ LangItem::Index,
+ LangItem::IndexMut,
+ LangItem::PartialEq,
+ LangItem::PartialOrd,
+];
diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs
index 1c4aa420c..98d967cc0 100644
--- a/compiler/rustc_hir/src/lib.rs
+++ b/compiler/rustc_hir/src/lib.rs
@@ -5,10 +5,11 @@
#![feature(associated_type_defaults)]
#![feature(closure_track_caller)]
#![feature(const_btree_len)]
-#![feature(once_cell)]
+#![feature(let_chains)]
#![feature(min_specialization)]
#![feature(never_type)]
#![feature(rustc_attrs)]
+#![feature(variant_count)]
#![recursion_limit = "256"]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
diff --git a/compiler/rustc_hir/src/pat_util.rs b/compiler/rustc_hir/src/pat_util.rs
index 0c1819bb0..6e2fbf96c 100644
--- a/compiler/rustc_hir/src/pat_util.rs
+++ b/compiler/rustc_hir/src/pat_util.rs
@@ -130,10 +130,7 @@ impl hir::Pat<'_> {
pub fn contains_explicit_ref_binding(&self) -> Option<hir::Mutability> {
let mut result = None;
self.each_binding(|annotation, _, _, _| match annotation {
- hir::BindingAnnotation::REF => match result {
- None | Some(hir::Mutability::Not) => result = Some(hir::Mutability::Not),
- _ => {}
- },
+ hir::BindingAnnotation::REF if result.is_none() => result = Some(hir::Mutability::Not),
hir::BindingAnnotation::REF_MUT => result = Some(hir::Mutability::Mut),
_ => {}
});
diff --git a/compiler/rustc_hir/src/tests.rs b/compiler/rustc_hir/src/tests.rs
index 4636d5152..d47911509 100644
--- a/compiler/rustc_hir/src/tests.rs
+++ b/compiler/rustc_hir/src/tests.rs
@@ -1,5 +1,7 @@
use crate::definitions::{DefKey, DefPathData, DisambiguatedDefPathData};
use rustc_span::def_id::{DefPathHash, StableCrateId};
+use rustc_span::edition::Edition;
+use rustc_span::{create_session_if_not_set_then, Symbol};
#[test]
fn def_path_hash_depends_on_crate_id() {
@@ -11,26 +13,28 @@ fn def_path_hash_depends_on_crate_id() {
// the crate by changing the crate disambiguator (e.g. via bumping the
// crate's version number).
- let id0 = StableCrateId::new("foo", false, vec!["1".to_string()]);
- let id1 = StableCrateId::new("foo", false, vec!["2".to_string()]);
+ create_session_if_not_set_then(Edition::Edition2024, |_| {
+ let id0 = StableCrateId::new(Symbol::intern("foo"), false, vec!["1".to_string()]);
+ let id1 = StableCrateId::new(Symbol::intern("foo"), false, vec!["2".to_string()]);
- let h0 = mk_test_hash(id0);
- let h1 = mk_test_hash(id1);
+ let h0 = mk_test_hash(id0);
+ let h1 = mk_test_hash(id1);
- assert_ne!(h0.stable_crate_id(), h1.stable_crate_id());
- assert_ne!(h0.local_hash(), h1.local_hash());
+ assert_ne!(h0.stable_crate_id(), h1.stable_crate_id());
+ assert_ne!(h0.local_hash(), h1.local_hash());
- fn mk_test_hash(stable_crate_id: StableCrateId) -> DefPathHash {
- let parent_hash = DefPathHash::new(stable_crate_id, 0);
+ fn mk_test_hash(stable_crate_id: StableCrateId) -> DefPathHash {
+ let parent_hash = DefPathHash::new(stable_crate_id, 0);
- let key = DefKey {
- parent: None,
- disambiguated_data: DisambiguatedDefPathData {
- data: DefPathData::CrateRoot,
- disambiguator: 0,
- },
- };
+ let key = DefKey {
+ parent: None,
+ disambiguated_data: DisambiguatedDefPathData {
+ data: DefPathData::CrateRoot,
+ disambiguator: 0,
+ },
+ };
- key.compute_stable_hash(parent_hash)
- }
+ key.compute_stable_hash(parent_hash)
+ }
+ })
}
diff --git a/compiler/rustc_hir/src/weak_lang_items.rs b/compiler/rustc_hir/src/weak_lang_items.rs
index da9c9c121..0cc50c6dd 100644
--- a/compiler/rustc_hir/src/weak_lang_items.rs
+++ b/compiler/rustc_hir/src/weak_lang_items.rs
@@ -1,53 +1,30 @@
//! Validity checking for weak lang items
-use crate::def_id::DefId;
-use crate::{lang_items, LangItem, LanguageItems};
+use crate::LangItem;
-use rustc_ast as ast;
-use rustc_data_structures::fx::FxIndexMap;
use rustc_span::symbol::{sym, Symbol};
-use std::sync::LazyLock;
-
macro_rules! weak_lang_items {
- ($($name:ident, $item:ident, $sym:ident;)*) => (
-
-pub static WEAK_ITEMS_REFS: LazyLock<FxIndexMap<Symbol, LangItem>> = LazyLock::new(|| {
- let mut map = FxIndexMap::default();
- $(map.insert(sym::$name, LangItem::$item);)*
- map
-});
-
-pub static WEAK_ITEMS_SYMBOLS: LazyLock<FxIndexMap<LangItem, Symbol>> = LazyLock::new(|| {
- let mut map = FxIndexMap::default();
- $(map.insert(LangItem::$item, sym::$sym);)*
- map
-});
-
-pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol>
-{
- lang_items::extract(attrs).and_then(|(name, _)| {
- $(if name == sym::$name {
- Some(sym::$sym)
- } else)* {
- None
+ ($($item:ident, $sym:ident;)*) => {
+ pub static WEAK_LANG_ITEMS: &[LangItem] = &[$(LangItem::$item,)*];
+
+ impl LangItem {
+ pub fn is_weak(self) -> bool {
+ matches!(self, $(LangItem::$item)|*)
+ }
+
+ pub fn link_name(self) -> Option<Symbol> {
+ match self {
+ $( LangItem::$item => Some(sym::$sym),)*
+ _ => None,
+ }
+ }
}
- })
-}
-
-impl LanguageItems {
- pub fn is_weak_lang_item(&self, item_def_id: DefId) -> bool {
- let did = Some(item_def_id);
-
- $(self.$name() == did)||*
}
}
-) }
-
weak_lang_items! {
- panic_impl, PanicImpl, rust_begin_unwind;
- eh_personality, EhPersonality, rust_eh_personality;
- eh_catch_typeinfo, EhCatchTypeinfo, rust_eh_catch_typeinfo;
- oom, Oom, rust_oom;
+ PanicImpl, rust_begin_unwind;
+ EhPersonality, rust_eh_personality;
+ EhCatchTypeinfo, rust_eh_catch_typeinfo;
}
diff --git a/compiler/rustc_hir_analysis/src/astconv/errors.rs b/compiler/rustc_hir_analysis/src/astconv/errors.rs
index a9152bdc5..e6465d641 100644
--- a/compiler/rustc_hir_analysis/src/astconv/errors.rs
+++ b/compiler/rustc_hir_analysis/src/astconv/errors.rs
@@ -177,11 +177,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
.all_traits()
.filter(|trait_def_id| {
let viz = self.tcx().visibility(*trait_def_id);
- if let Some(def_id) = self.item_def_id() {
- viz.is_accessible_from(def_id, self.tcx())
- } else {
- viz.is_visible_locally()
- }
+ let def_id = self.item_def_id();
+ viz.is_accessible_from(def_id, self.tcx())
})
.collect();
diff --git a/compiler/rustc_hir_analysis/src/astconv/generics.rs b/compiler/rustc_hir_analysis/src/astconv/generics.rs
index 47915b4bd..f64d65cc6 100644
--- a/compiler/rustc_hir_analysis/src/astconv/generics.rs
+++ b/compiler/rustc_hir_analysis/src/astconv/generics.rs
@@ -11,7 +11,6 @@ use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::GenericArg;
-use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::ty::{
self, subst, subst::SubstsRef, GenericParamDef, GenericParamDefKind, IsSuggestable, Ty, TyCtxt,
};
@@ -83,9 +82,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
Res::Def(DefKind::TyParam, src_def_id) => {
if let Some(param_local_id) = param.def_id.as_local() {
let param_name = tcx.hir().ty_param_name(param_local_id);
- let infcx = tcx.infer_ctxt().build();
- let param_type =
- infcx.resolve_numeric_literals_with_default(tcx.type_of(param.def_id));
+ let param_type = tcx.type_of(param.def_id);
if param_type.is_suggestable(tcx, false) {
err.span_suggestion(
tcx.def_span(src_def_id),
diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs
index 38f195dab..78d204d47 100644
--- a/compiler/rustc_hir_analysis/src/astconv/mod.rs
+++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs
@@ -23,7 +23,6 @@ use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Namespace, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::{walk_generics, Visitor as _};
-use rustc_hir::lang_items::LangItem;
use rustc_hir::{GenericArg, GenericArgs, OpaqueTyOrigin};
use rustc_middle::middle::stability::AllowUnstable;
use rustc_middle::ty::subst::{self, GenericArgKind, InternalSubsts, SubstsRef};
@@ -55,7 +54,7 @@ pub struct PathSeg(pub DefId, pub usize);
pub trait AstConv<'tcx> {
fn tcx<'a>(&'a self) -> TyCtxt<'tcx>;
- fn item_def_id(&self) -> Option<DefId>;
+ fn item_def_id(&self) -> DefId;
/// Returns predicates in scope of the form `X: Foo<T>`, where `X`
/// is a type parameter `X` with the given id `def_id` and T
@@ -110,13 +109,16 @@ pub trait AstConv<'tcx> {
) -> Ty<'tcx>;
/// Normalize an associated type coming from the user.
+ ///
+ /// This should only be used by astconv. Use `FnCtxt::normalize`
+ /// or `ObligationCtxt::normalize` in downstream crates.
fn normalize_ty(&self, span: Span, ty: Ty<'tcx>) -> Ty<'tcx>;
/// Invoked when we encounter an error from some prior pass
/// (e.g., resolve) that is translated into a ty-error. This is
/// used to help suppress derived errors typeck might otherwise
/// report.
- fn set_tainted_by_errors(&self);
+ fn set_tainted_by_errors(&self, e: ErrorGuaranteed);
fn record_ty(&self, hir_id: hir::HirId, ty: Ty<'tcx>, span: Span);
}
@@ -242,14 +244,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
None => {
- self.re_infer(def, lifetime.span).unwrap_or_else(|| {
+ self.re_infer(def, lifetime.ident.span).unwrap_or_else(|| {
debug!(?lifetime, "unelided lifetime in signature");
// This indicates an illegal lifetime
// elision. `resolve_lifetime` should have
// reported an error in this case -- but if
// not, let's error out.
- tcx.sess.delay_span_bug(lifetime.span, "unelided lifetime in signature");
+ tcx.sess.delay_span_bug(lifetime.ident.span, "unelided lifetime in signature");
// Supply some dummy value. We don't have an
// `re_error`, annoyingly, so use `'static`.
@@ -275,7 +277,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
item_segment.args(),
item_segment.infer_args,
None,
- None,
+ ty::BoundConstness::NotConst,
);
if let Some(b) = item_segment.args().bindings.first() {
Self::prohibit_assoc_ty_binding(self.tcx(), b.span);
@@ -325,7 +327,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
generic_args: &'a hir::GenericArgs<'_>,
infer_args: bool,
self_ty: Option<Ty<'tcx>>,
- constness: Option<ty::BoundConstness>,
+ constness: ty::BoundConstness,
) -> (SubstsRef<'tcx>, GenericArgCountResult) {
// If the type is parameterized by this region, then replace this
// region with the current anon region binding (in other words,
@@ -345,7 +347,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
assert!(self_ty.is_some());
}
} else {
- assert!(self_ty.is_none() && parent_substs.is_empty());
+ assert!(self_ty.is_none());
}
let arg_count = Self::check_generic_arg_count(
@@ -433,7 +435,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
ty::Const::from_opt_const_arg_anon_const(
tcx,
ty::WithOptConstParam {
- did: tcx.hir().local_def_id(ct.value.hir_id),
+ did: ct.value.def_id,
const_param_did: Some(param.def_id),
},
)
@@ -501,6 +503,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
GenericParamDefKind::Const { has_default } => {
let ty = tcx.at(self.span).type_of(param.def_id);
+ if ty.references_error() {
+ return tcx.const_error(ty).into();
+ }
if !infer_args && has_default {
tcx.bound_const_param_default(param.def_id)
.subst(tcx, substs.unwrap())
@@ -536,7 +541,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
&mut substs_ctx,
);
- if let Some(ty::BoundConstness::ConstIfConst) = constness
+ if let ty::BoundConstness::ConstIfConst = constness
&& generics.has_self && !tcx.has_attr(def_id, sym::const_trait)
{
tcx.sess.emit_err(crate::errors::ConstBoundForNonConstTrait { span } );
@@ -568,8 +573,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
ConvertedBindingKind::Equality(self.ast_ty_to_ty(ty).into())
}
hir::Term::Const(ref c) => {
- let local_did = self.tcx().hir().local_def_id(c.hir_id);
- let c = Const::from_anon_const(self.tcx(), local_did);
+ let c = Const::from_anon_const(self.tcx(), c.def_id);
ConvertedBindingKind::Equality(c.into())
}
},
@@ -609,7 +613,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
item_segment.args(),
item_segment.infer_args,
None,
- None,
+ ty::BoundConstness::NotConst,
);
if let Some(b) = item_segment.args().bindings.first() {
@@ -639,7 +643,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
self_ty,
trait_ref.path.segments.last().unwrap(),
true,
- Some(constness),
+ constness,
)
}
@@ -666,7 +670,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
args,
infer_args,
Some(self_ty),
- Some(constness),
+ constness,
);
let tcx = self.tcx();
@@ -796,7 +800,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
self_ty: Ty<'tcx>,
trait_segment: &hir::PathSegment<'_>,
is_impl: bool,
- constness: Option<ty::BoundConstness>,
+ constness: ty::BoundConstness,
) -> ty::TraitRef<'tcx> {
let (substs, _) = self.create_substs_for_ast_trait_ref(
span,
@@ -820,7 +824,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
self_ty: Ty<'tcx>,
trait_segment: &'a hir::PathSegment<'a>,
is_impl: bool,
- constness: Option<ty::BoundConstness>,
+ constness: ty::BoundConstness,
) -> (SubstsRef<'tcx>, GenericArgCountResult) {
self.complain_about_internal_fn_trait(span, trait_def_id, trait_segment, is_impl);
@@ -849,12 +853,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
.is_some()
}
- // Sets `implicitly_sized` to true on `Bounds` if necessary
+ /// Sets `implicitly_sized` to true on `Bounds` if necessary
pub(crate) fn add_implicitly_sized<'hir>(
&self,
bounds: &mut Bounds<'hir>,
ast_bounds: &'hir [hir::GenericBound<'hir>],
- self_ty_where_predicates: Option<(hir::HirId, &'hir [hir::WherePredicate<'hir>])>,
+ self_ty_where_predicates: Option<(LocalDefId, &'hir [hir::WherePredicate<'hir>])>,
span: Span,
) {
let tcx = self.tcx();
@@ -874,19 +878,18 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
};
search_bounds(ast_bounds);
if let Some((self_ty, where_clause)) = self_ty_where_predicates {
- let self_ty_def_id = tcx.hir().local_def_id(self_ty).to_def_id();
for clause in where_clause {
if let hir::WherePredicate::BoundPredicate(pred) = clause {
- if pred.is_param_bound(self_ty_def_id) {
+ if pred.is_param_bound(self_ty.to_def_id()) {
search_bounds(pred.bounds);
}
}
}
}
- let sized_def_id = tcx.lang_items().require(LangItem::Sized);
+ let sized_def_id = tcx.lang_items().sized_trait();
match (&sized_def_id, unbound) {
- (Ok(sized_def_id), Some(tpb))
+ (Some(sized_def_id), Some(tpb))
if tpb.path.res == Res::Def(DefKind::Trait, *sized_def_id) =>
{
// There was in fact a `?Sized` bound, return without doing anything
@@ -906,7 +909,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// There was no `?Sized` bound; add implicitly sized if `Sized` is available.
}
}
- if sized_def_id.is_err() {
+ if sized_def_id.is_none() {
// No lang item for `Sized`, so we can't add it as a bound.
return;
}
@@ -961,9 +964,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
hir::GenericBound::Outlives(lifetime) => {
let region = self.ast_region_to_region(lifetime, None);
- bounds
- .region_bounds
- .push((ty::Binder::bind_with_vars(region, bound_vars), lifetime.span));
+ bounds.region_bounds.push((
+ ty::Binder::bind_with_vars(region, bound_vars),
+ lifetime.ident.span,
+ ));
}
}
}
@@ -1199,7 +1203,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
(_, _) => {
let got = if let Some(_) = term.ty() { "type" } else { "constant" };
let expected = def_kind.descr(assoc_item_def_id);
- tcx.sess
+ let reported = tcx
+ .sess
.struct_span_err(
binding.span,
&format!("expected {expected} bound, found {got}"),
@@ -1210,11 +1215,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
)
.emit();
term = match def_kind {
- hir::def::DefKind::AssocTy => tcx.ty_error().into(),
+ hir::def::DefKind::AssocTy => {
+ tcx.ty_error_with_guaranteed(reported).into()
+ }
hir::def::DefKind::AssocConst => tcx
- .const_error(
+ .const_error_with_guaranteed(
tcx.bound_type_of(assoc_item_def_id)
.subst(tcx, projection_ty.skip_binder().substs),
+ reported,
)
.into(),
_ => unreachable!(),
@@ -1332,8 +1340,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
.map(|&(trait_ref, _, _)| trait_ref.def_id())
.find(|&trait_ref| tcx.is_trait_alias(trait_ref))
.map(|trait_ref| tcx.def_span(trait_ref));
- tcx.sess.emit_err(TraitObjectDeclaredWithNoTraits { span, trait_alias_span });
- return tcx.ty_error();
+ let reported =
+ tcx.sess.emit_err(TraitObjectDeclaredWithNoTraits { span, trait_alias_span });
+ return tcx.ty_error_with_guaranteed(reported);
}
// Check that there are no gross object safety violations;
@@ -1343,14 +1352,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
let object_safety_violations =
astconv_object_safety_violations(tcx, item.trait_ref().def_id());
if !object_safety_violations.is_empty() {
- report_object_safety_error(
+ let reported = report_object_safety_error(
tcx,
span,
item.trait_ref().def_id(),
&object_safety_violations,
)
.emit();
- return tcx.ty_error();
+ return tcx.ty_error_with_guaranteed(reported);
}
}
@@ -1373,7 +1382,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => {
let pred = bound_predicate.rebind(pred);
associated_types.entry(span).or_default().extend(
tcx.associated_items(pred.def_id())
@@ -1382,7 +1391,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
.map(|item| item.def_id),
);
}
- ty::PredicateKind::Projection(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
let pred = bound_predicate.rebind(pred);
// A `Self` within the original bound will be substituted with a
// `trait_object_dummy_self`, so check for that.
@@ -1812,7 +1821,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// Check if we have an enum variant.
let mut variant_resolution = None;
- if let ty::Adt(adt_def, _) = qself_ty.kind() {
+ if let ty::Adt(adt_def, adt_substs) = qself_ty.kind() {
if adt_def.is_enum() {
let variant_def = adt_def
.variants()
@@ -1908,6 +1917,22 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
}
}
+
+ // see if we can satisfy using an inherent associated type
+ for &impl_ in tcx.inherent_impls(adt_def.did()) {
+ let Some(assoc_ty_did) = self.lookup_assoc_ty(assoc_ident, hir_ref_id, span, impl_) else {
+ continue;
+ };
+ let item_substs = self.create_substs_for_associated_item(
+ span,
+ assoc_ty_did,
+ assoc_segment,
+ adt_substs,
+ );
+ let ty = tcx.bound_type_of(assoc_ty_did).subst(tcx, item_substs);
+ let ty = self.normalize_ty(span, ty);
+ return Ok((ty, DefKind::AssocTy, assoc_ty_did));
+ }
}
// Find the type of the associated item, and the trait where the associated
@@ -1977,7 +2002,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
err.emit()
- } else if let Some(reported) = qself_ty.error_reported() {
+ } else if let Err(reported) = qself_ty.error_reported() {
reported
} else {
// Don't print `TyErr` to the user.
@@ -1993,37 +2018,17 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
};
let trait_did = bound.def_id();
- let (assoc_ident, def_scope) =
- tcx.adjust_ident_and_get_scope(assoc_ident, trait_did, hir_ref_id);
-
- // We have already adjusted the item name above, so compare with `ident.normalize_to_macros_2_0()` instead
- // of calling `filter_by_name_and_kind`.
- let item = tcx.associated_items(trait_did).in_definition_order().find(|i| {
- i.kind.namespace() == Namespace::TypeNS
- && i.ident(tcx).normalize_to_macros_2_0() == assoc_ident
- });
- // Assume that if it's not matched, there must be a const defined with the same name
- // but it was used in a type position.
- let Some(item) = item else {
+ let Some(assoc_ty_did) = self.lookup_assoc_ty(assoc_ident, hir_ref_id, span, trait_did) else {
+ // Assume that if it's not matched, there must be a const defined with the same name
+ // but it was used in a type position.
let msg = format!("found associated const `{assoc_ident}` when type was expected");
let guar = tcx.sess.struct_span_err(span, &msg).emit();
return Err(guar);
};
- let ty = self.projected_ty_from_poly_trait_ref(span, item.def_id, assoc_segment, bound);
+ let ty = self.projected_ty_from_poly_trait_ref(span, assoc_ty_did, assoc_segment, bound);
let ty = self.normalize_ty(span, ty);
- let kind = DefKind::AssocTy;
- if !item.visibility(tcx).is_accessible_from(def_scope, tcx) {
- let kind = kind.descr(item.def_id);
- let msg = format!("{} `{}` is private", kind, assoc_ident);
- tcx.sess
- .struct_span_err(span, &msg)
- .span_label(span, &format!("private {}", kind))
- .emit();
- }
- tcx.check_stability(item.def_id, Some(hir_ref_id), span, None);
-
if let Some(variant_def_id) = variant_resolution {
tcx.struct_span_lint_hir(
AMBIGUOUS_ASSOCIATED_ITEMS,
@@ -2042,7 +2047,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
};
could_refer_to(DefKind::Variant, variant_def_id, "");
- could_refer_to(kind, item.def_id, " also");
+ could_refer_to(DefKind::AssocTy, assoc_ty_did, " also");
lint.span_suggestion(
span,
@@ -2055,7 +2060,40 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
},
);
}
- Ok((ty, kind, item.def_id))
+ Ok((ty, DefKind::AssocTy, assoc_ty_did))
+ }
+
+ fn lookup_assoc_ty(
+ &self,
+ ident: Ident,
+ block: hir::HirId,
+ span: Span,
+ scope: DefId,
+ ) -> Option<DefId> {
+ let tcx = self.tcx();
+ let (ident, def_scope) = tcx.adjust_ident_and_get_scope(ident, scope, block);
+
+ // We have already adjusted the item name above, so compare with `ident.normalize_to_macros_2_0()` instead
+ // of calling `find_by_name_and_kind`.
+ let item = tcx.associated_items(scope).in_definition_order().find(|i| {
+ i.kind.namespace() == Namespace::TypeNS
+ && i.ident(tcx).normalize_to_macros_2_0() == ident
+ })?;
+
+ let kind = DefKind::AssocTy;
+ if !item.visibility(tcx).is_accessible_from(def_scope, tcx) {
+ let kind = kind.descr(item.def_id);
+ let msg = format!("{kind} `{ident}` is private");
+ let def_span = self.tcx().def_span(item.def_id);
+ tcx.sess
+ .struct_span_err_with_code(span, &msg, rustc_errors::error_code!(E0624))
+ .span_label(span, &format!("private {kind}"))
+ .span_label(def_span, &format!("{kind} defined here"))
+ .emit();
+ }
+ tcx.check_stability(item.def_id, Some(block), span, None);
+
+ Some(item.def_id)
}
fn qpath_to_ty(
@@ -2080,17 +2118,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
debug!("qpath_to_ty: self.item_def_id()={:?}", def_id);
- let parent_def_id = def_id
- .and_then(|def_id| {
- def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
- })
+ let parent_def_id = def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
.map(|hir_id| tcx.hir().get_parent_item(hir_id).to_def_id());
debug!("qpath_to_ty: parent_def_id={:?}", parent_def_id);
// If the trait in segment is the same as the trait defining the item,
// use the `<Self as ..>` syntax in the error.
- let is_part_of_self_trait_constraints = def_id == Some(trait_def_id);
+ let is_part_of_self_trait_constraints = def_id == trait_def_id;
let is_part_of_fn_in_self_trait = parent_def_id == Some(trait_def_id);
let type_name = if is_part_of_self_trait_constraints || is_part_of_fn_in_self_trait {
@@ -2099,13 +2134,13 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
"Type"
};
- self.report_ambiguous_associated_type(
+ let reported = self.report_ambiguous_associated_type(
span,
type_name,
&path_str,
item_segment.ident.name,
);
- return tcx.ty_error();
+ return tcx.ty_error_with_guaranteed(reported)
};
debug!("qpath_to_ty: self_type={:?}", self_ty);
@@ -2116,7 +2151,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
self_ty,
trait_segment,
false,
- Some(constness),
+ constness,
);
let item_substs = self.create_substs_for_associated_item(
@@ -2365,7 +2400,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
path_segs
}
- // Check a type `Path` and convert it to a `Ty`.
+ /// Check a type `Path` and convert it to a `Ty`.
pub fn res_to_ty(
&self,
opt_self_ty: Option<Ty<'tcx>>,
@@ -2383,7 +2418,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
match path.res {
Res::Def(DefKind::OpaqueTy | DefKind::ImplTraitPlaceholder, did) => {
// Check for desugared `impl Trait`.
- assert!(ty::is_impl_trait_defn(tcx, did).is_none());
+ assert!(tcx.is_type_alias_impl_trait(did));
let item_segment = path.segments.split_last().unwrap();
self.prohibit_generics(item_segment.1.iter(), |err| {
err.note("`impl Trait` types can't have type parameters");
@@ -2547,8 +2582,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
{
err.span_note(impl_.self_ty.span, "not a concrete type");
}
- err.emit();
- tcx.ty_error()
+ tcx.ty_error_with_guaranteed(err.emit())
} else {
self.normalize_ty(span, ty)
}
@@ -2596,8 +2630,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
}
Res::Err => {
- self.set_tainted_by_errors();
- self.tcx().ty_error()
+ let e = self
+ .tcx()
+ .sess
+ .delay_span_bug(path.span, "path with `Res:Err` but no error emitted");
+ self.set_tainted_by_errors(e);
+ self.tcx().ty_error_with_guaranteed(e)
}
_ => span_bug!(span, "unexpected resolution: {:?}", path.res),
}
@@ -2687,7 +2725,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
&GenericArgs::none(),
true,
None,
- None,
+ ty::BoundConstness::NotConst,
);
EarlyBinder(self.normalize_ty(span, tcx.at(span).type_of(def_id)))
.subst(tcx, substs)
@@ -2696,8 +2734,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
let length = match length {
&hir::ArrayLen::Infer(_, span) => self.ct_infer(tcx.types.usize, None, span),
hir::ArrayLen::Body(constant) => {
- let length_def_id = tcx.hir().local_def_id(constant.hir_id);
- ty::Const::from_anon_const(tcx, length_def_id)
+ ty::Const::from_anon_const(tcx, constant.def_id)
}
};
@@ -2705,7 +2742,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
self.normalize_ty(ast_ty.span, array_ty)
}
hir::TyKind::Typeof(ref e) => {
- let ty_erased = tcx.type_of(tcx.hir().local_def_id(e.hir_id));
+ let ty_erased = tcx.type_of(e.def_id);
let ty = tcx.fold_regions(ty_erased, |r, _| {
if r.is_erased() { tcx.lifetimes.re_static } else { r }
});
@@ -2750,35 +2787,11 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
let substs = InternalSubsts::for_item(tcx, def_id, |param, _| {
if let Some(i) = (param.index as usize).checked_sub(generics.parent_count) {
// Our own parameters are the resolved lifetimes.
- if let GenericParamDefKind::Lifetime = param.kind {
- if let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] {
- self.ast_region_to_region(lifetime, None).into()
- } else {
- bug!()
- }
- } else {
- bug!()
- }
+ let GenericParamDefKind::Lifetime { .. } = param.kind else { bug!() };
+ let hir::GenericArg::Lifetime(lifetime) = &lifetimes[i] else { bug!() };
+ self.ast_region_to_region(lifetime, None).into()
} else {
- match param.kind {
- // For RPIT (return position impl trait), only lifetimes
- // mentioned in the impl Trait predicate are captured by
- // the opaque type, so the lifetime parameters from the
- // parent item need to be replaced with `'static`.
- //
- // For `impl Trait` in the types of statics, constants,
- // locals and type aliases. These capture all parent
- // lifetimes, so they can use their identity subst.
- GenericParamDefKind::Lifetime
- if matches!(
- origin,
- hir::OpaqueTyOrigin::FnReturn(..) | hir::OpaqueTyOrigin::AsyncFn(..)
- ) =>
- {
- tcx.lifetimes.re_static.into()
- }
- _ => tcx.mk_param_from_def(param),
- }
+ tcx.mk_param_from_def(param)
}
});
debug!("impl_trait_ty_to_ty: substs={:?}", substs);
@@ -2955,6 +2968,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
Some(tcx.liberate_late_bound_regions(fn_hir_id.expect_owner().to_def_id(), ty))
}
+ #[instrument(level = "trace", skip(self, generate_err))]
fn validate_late_bound_regions(
&self,
constrained_regions: FxHashSet<ty::BoundRegionKind>,
@@ -2963,7 +2977,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
) {
for br in referenced_regions.difference(&constrained_regions) {
let br_name = match *br {
- ty::BrNamed(_, kw::UnderscoreLifetime) | ty::BrAnon(_) | ty::BrEnv => {
+ ty::BrNamed(_, kw::UnderscoreLifetime) | ty::BrAnon(..) | ty::BrEnv => {
"an anonymous lifetime".to_string()
}
ty::BrNamed(_, name) => format!("lifetime `{}`", name),
@@ -2971,7 +2985,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
let mut err = generate_err(&br_name);
- if let ty::BrNamed(_, kw::UnderscoreLifetime) | ty::BrAnon(_) = *br {
+ if let ty::BrNamed(_, kw::UnderscoreLifetime) | ty::BrAnon(..) = *br {
// The only way for an anonymous lifetime to wind up
// in the return type but **also** be unconstrained is
// if it only appears in "associated types" in the
@@ -2996,7 +3010,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
fn compute_object_lifetime_bound(
&self,
span: Span,
- existential_predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ existential_predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Option<ty::Region<'tcx>> // if None, use the default
{
let tcx = self.tcx();
diff --git a/compiler/rustc_hir_analysis/src/bounds.rs b/compiler/rustc_hir_analysis/src/bounds.rs
index 6a28bb16a..3e3544ce6 100644
--- a/compiler/rustc_hir_analysis/src/bounds.rs
+++ b/compiler/rustc_hir_analysis/src/bounds.rs
@@ -60,13 +60,10 @@ impl<'tcx> Bounds<'tcx> {
{
// If it could be sized, and is, add the `Sized` predicate.
let sized_predicate = self.implicitly_sized.and_then(|span| {
- tcx.lang_items().sized_trait().map(move |sized| {
- let trait_ref = ty::Binder::dummy(ty::TraitRef {
- def_id: sized,
- substs: tcx.mk_substs_trait(param_ty, &[]),
- });
- (trait_ref.without_const().to_predicate(tcx), span)
- })
+ // FIXME: use tcx.at(span).mk_trait_ref(LangItem::Sized) here? This may make no-core code harder to write.
+ let sized = tcx.lang_items().sized_trait()?;
+ let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(sized, [param_ty]));
+ Some((trait_ref.without_const().to_predicate(tcx), span))
});
let region_preds = self.region_bounds.iter().map(move |&(region_bound, span)| {
diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs
index b70ac0205..fc0ca6209 100644
--- a/compiler/rustc_hir_analysis/src/check/check.rs
+++ b/compiler/rustc_hir_analysis/src/check/check.rs
@@ -1,4 +1,5 @@
use crate::check::intrinsicck::InlineAsmCtxt;
+use crate::errors::LinkageType;
use super::compare_method::check_type_bounds;
use super::compare_method::{compare_impl_method, compare_ty_impl};
@@ -6,10 +7,11 @@ use super::*;
use rustc_attr as attr;
use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
-use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::Visitor;
use rustc_hir::{ItemKind, Node, PathSegment};
+use rustc_infer::infer::opaque_types::ConstrainOpaqueTypeRegionVisitor;
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::{DefiningAnchor, RegionVariableOrigin, TyCtxtInferExt};
use rustc_infer::traits::Obligation;
@@ -19,13 +21,12 @@ use rustc_middle::middle::stability::EvalResult;
use rustc_middle::ty::layout::{LayoutError, MAX_SIMD_LANES};
use rustc_middle::ty::subst::GenericArgKind;
use rustc_middle::ty::util::{Discr, IntTypeExt};
-use rustc_middle::ty::{
- self, ParamEnv, ToPredicate, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
-};
+use rustc_middle::ty::{self, AdtDef, ParamEnv, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable};
use rustc_session::lint::builtin::{UNINHABITED_STATIC, UNSUPPORTED_CALLING_CONVENTIONS};
use rustc_span::symbol::sym;
use rustc_span::{self, Span};
use rustc_target::spec::abi::Abi;
+use rustc_trait_selection::traits::error_reporting::on_unimplemented::OnUnimplementedDirective;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
use rustc_trait_selection::traits::{self, ObligationCtxt};
@@ -75,7 +76,7 @@ fn check_struct(tcx: TyCtxt<'_>, def_id: LocalDefId) {
check_simd(tcx, span, def_id);
}
- check_transparent(tcx, span, def);
+ check_transparent(tcx, def);
check_packed(tcx, span, def);
}
@@ -83,7 +84,7 @@ fn check_union(tcx: TyCtxt<'_>, def_id: LocalDefId) {
let def = tcx.adt_def(def_id);
let span = tcx.def_span(def_id);
def.destructor(tcx); // force the destructor to be evaluated
- check_transparent(tcx, span, def);
+ check_transparent(tcx, def);
check_union_fields(tcx, span, def_id);
check_packed(tcx, span, def);
}
@@ -230,7 +231,9 @@ fn check_opaque<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
let substs = InternalSubsts::identity_for_item(tcx, item.owner_id.to_def_id());
let span = tcx.def_span(item.owner_id.def_id);
- check_opaque_for_inheriting_lifetimes(tcx, item.owner_id.def_id, span);
+ if !tcx.features().impl_trait_projections {
+ check_opaque_for_inheriting_lifetimes(tcx, item.owner_id.def_id, span);
+ }
if tcx.type_of(item.owner_id.def_id).references_error() {
return;
}
@@ -239,6 +242,7 @@ fn check_opaque<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
}
check_opaque_meets_bounds(tcx, item.owner_id.def_id, substs, span, &origin);
}
+
/// Checks that an opaque type does not use `Self` or `T::Foo` projections that would result
/// in "inheriting lifetimes".
#[instrument(level = "debug", skip(tcx, span))]
@@ -250,39 +254,11 @@ pub(super) fn check_opaque_for_inheriting_lifetimes<'tcx>(
let item = tcx.hir().expect_item(def_id);
debug!(?item, ?span);
- struct FoundParentLifetime;
- struct FindParentLifetimeVisitor<'tcx>(&'tcx ty::Generics);
- impl<'tcx> ty::visit::TypeVisitor<'tcx> for FindParentLifetimeVisitor<'tcx> {
- type BreakTy = FoundParentLifetime;
-
- fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
- debug!("FindParentLifetimeVisitor: r={:?}", r);
- if let ty::ReEarlyBound(ty::EarlyBoundRegion { index, .. }) = *r {
- if index < self.0.parent_count as u32 {
- return ControlFlow::Break(FoundParentLifetime);
- } else {
- return ControlFlow::CONTINUE;
- }
- }
-
- r.super_visit_with(self)
- }
-
- fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
- if let ty::ConstKind::Unevaluated(..) = c.kind() {
- // FIXME(#72219) We currently don't detect lifetimes within substs
- // which would violate this check. Even though the particular substitution is not used
- // within the const, this should still be fixed.
- return ControlFlow::CONTINUE;
- }
- c.super_visit_with(self)
- }
- }
-
struct ProhibitOpaqueVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
opaque_identity_ty: Ty<'tcx>,
- generics: &'tcx ty::Generics,
+ parent_count: u32,
+ references_parent_regions: bool,
selftys: Vec<(Span, Option<String>)>,
}
@@ -290,12 +266,25 @@ pub(super) fn check_opaque_for_inheriting_lifetimes<'tcx>(
type BreakTy = Ty<'tcx>;
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
- debug!("check_opaque_for_inheriting_lifetimes: (visit_ty) t={:?}", t);
+ debug!(?t, "root_visit_ty");
if t == self.opaque_identity_ty {
ControlFlow::CONTINUE
} else {
- t.super_visit_with(&mut FindParentLifetimeVisitor(self.generics))
- .map_break(|FoundParentLifetime| t)
+ t.visit_with(&mut ConstrainOpaqueTypeRegionVisitor {
+ tcx: self.tcx,
+ op: |region| {
+ if let ty::ReEarlyBound(ty::EarlyBoundRegion { index, .. }) = *region
+ && index < self.parent_count
+ {
+ self.references_parent_regions= true;
+ }
+ },
+ });
+ if self.references_parent_regions {
+ ControlFlow::Break(t)
+ } else {
+ ControlFlow::CONTINUE
+ }
}
}
}
@@ -328,15 +317,20 @@ pub(super) fn check_opaque_for_inheriting_lifetimes<'tcx>(
if let ItemKind::OpaqueTy(hir::OpaqueTy {
origin: hir::OpaqueTyOrigin::AsyncFn(..) | hir::OpaqueTyOrigin::FnReturn(..),
+ in_trait,
..
}) = item.kind
{
+ let substs = InternalSubsts::identity_for_item(tcx, def_id.to_def_id());
+ let opaque_identity_ty = if in_trait {
+ tcx.mk_projection(def_id.to_def_id(), substs)
+ } else {
+ tcx.mk_opaque(def_id.to_def_id(), substs)
+ };
let mut visitor = ProhibitOpaqueVisitor {
- opaque_identity_ty: tcx.mk_opaque(
- def_id.to_def_id(),
- InternalSubsts::identity_for_item(tcx, def_id.to_def_id()),
- ),
- generics: tcx.generics_of(def_id),
+ opaque_identity_ty,
+ parent_count: tcx.generics_of(def_id).parent_count as u32,
+ references_parent_regions: false,
tcx,
selftys: vec![],
};
@@ -344,10 +338,6 @@ pub(super) fn check_opaque_for_inheriting_lifetimes<'tcx>(
.explicit_item_bounds(def_id)
.iter()
.try_for_each(|(predicate, _)| predicate.visit_with(&mut visitor));
- debug!(
- "check_opaque_for_inheriting_lifetimes: prohibit_opaque={:?}, visitor.opaque_identity_ty={:?}, visitor.generics={:?}",
- prohibit_opaque, visitor.opaque_identity_ty, visitor.generics
- );
if let Some(ty) = prohibit_opaque.break_value() {
visitor.visit_item(&item);
@@ -358,15 +348,16 @@ pub(super) fn check_opaque_for_inheriting_lifetimes<'tcx>(
_ => unreachable!(),
};
- let mut err = struct_span_err!(
- tcx.sess,
+ let mut err = feature_err(
+ &tcx.sess.parse_sess,
+ sym::impl_trait_projections,
span,
- E0760,
- "`{}` return type cannot contain a projection or `Self` that references lifetimes from \
- a parent scope",
- if is_async { "async fn" } else { "impl Trait" },
+ &format!(
+ "`{}` return type cannot contain a projection or `Self` that references \
+ lifetimes from a parent scope",
+ if is_async { "async fn" } else { "impl Trait" },
+ ),
);
-
for (span, name) in visitor.selftys {
err.span_suggestion(
span,
@@ -450,8 +441,8 @@ fn check_opaque_meets_bounds<'tcx>(
let misc_cause = traits::ObligationCause::misc(span, hir_id);
- match infcx.at(&misc_cause, param_env).eq(opaque_ty, hidden_ty) {
- Ok(infer_ok) => ocx.register_infer_ok_obligations(infer_ok),
+ match ocx.eq(&misc_cause, param_env, opaque_ty, hidden_ty) {
+ Ok(()) => {}
Err(ty_err) => {
tcx.sess.delay_span_bug(
span,
@@ -463,15 +454,14 @@ fn check_opaque_meets_bounds<'tcx>(
// Additionally require the hidden type to be well-formed with only the generics of the opaque type.
// Defining use functions may have more bounds than the opaque type, which is ok, as long as the
// hidden type is well formed even without those bounds.
- let predicate =
- ty::Binder::dummy(ty::PredicateKind::WellFormed(hidden_ty.into())).to_predicate(tcx);
- ocx.register_obligation(Obligation::new(misc_cause, param_env, predicate));
+ let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(hidden_ty.into()));
+ ocx.register_obligation(Obligation::new(tcx, misc_cause, param_env, predicate));
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
}
match origin {
// Checked when type checking the function containing them.
@@ -489,6 +479,36 @@ fn check_opaque_meets_bounds<'tcx>(
let _ = infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
}
+fn is_enum_of_nonnullable_ptr<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ adt_def: AdtDef<'tcx>,
+ substs: SubstsRef<'tcx>,
+) -> bool {
+ if adt_def.repr().inhibit_enum_layout_opt() {
+ return false;
+ }
+
+ let [var_one, var_two] = &adt_def.variants().raw[..] else {
+ return false;
+ };
+ let (([], [field]) | ([field], [])) = (&var_one.fields[..], &var_two.fields[..]) else {
+ return false;
+ };
+ matches!(field.ty(tcx, substs).kind(), ty::FnPtr(..) | ty::Ref(..))
+}
+
+fn check_static_linkage<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) {
+ if tcx.codegen_fn_attrs(def_id).import_linkage.is_some() {
+ if match tcx.type_of(def_id).kind() {
+ ty::RawPtr(_) => false,
+ ty::Adt(adt_def, substs) => !is_enum_of_nonnullable_ptr(tcx, *adt_def, *substs),
+ _ => true,
+ } {
+ tcx.sess.emit_err(LinkageType { span: tcx.def_span(def_id) });
+ }
+ }
+}
+
fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
debug!(
"check_item_type(it.def_id={:?}, it.name={})",
@@ -501,16 +521,13 @@ fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
tcx.ensure().typeck(id.owner_id.def_id);
maybe_check_static_with_link_section(tcx, id.owner_id.def_id);
check_static_inhabited(tcx, id.owner_id.def_id);
+ check_static_linkage(tcx, id.owner_id.def_id);
}
DefKind::Const => {
tcx.ensure().typeck(id.owner_id.def_id);
}
DefKind::Enum => {
- let item = tcx.hir().item(id);
- let hir::ItemKind::Enum(ref enum_definition, _) = item.kind else {
- return;
- };
- check_enum(tcx, &enum_definition.variants, item.owner_id.def_id);
+ check_enum(tcx, id.owner_id.def_id);
}
DefKind::Fn => {} // entirely within check_item_body
DefKind::Impl => {
@@ -642,6 +659,7 @@ fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
}
hir::ForeignItemKind::Static(..) => {
check_static_inhabited(tcx, def_id);
+ check_static_linkage(tcx, def_id);
}
_ => {}
}
@@ -659,7 +677,7 @@ fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
pub(super) fn check_on_unimplemented(tcx: TyCtxt<'_>, item: &hir::Item<'_>) {
// an error would be reported if this fails.
- let _ = traits::OnUnimplementedDirective::of_item(tcx, item.owner_id.to_def_id());
+ let _ = OnUnimplementedDirective::of_item(tcx, item.owner_id.to_def_id());
}
pub(super) fn check_specialization_validity<'tcx>(
@@ -1026,7 +1044,7 @@ pub(super) fn check_packed_inner(
None
}
-pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtDef<'tcx>) {
+pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>) {
if !adt.repr().transparent() {
return;
}
@@ -1035,14 +1053,14 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtD
feature_err(
&tcx.sess.parse_sess,
sym::transparent_unions,
- sp,
+ tcx.def_span(adt.did()),
"transparent unions are unstable",
)
.emit();
}
if adt.variants().len() != 1 {
- bad_variant_count(tcx, adt, sp, adt.did());
+ bad_variant_count(tcx, adt, tcx.def_span(adt.did()), adt.did());
if adt.variants().is_empty() {
// Don't bother checking the fields. No variants (and thus no fields) exist.
return;
@@ -1103,7 +1121,7 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtD
.filter_map(|(span, zst, _align1, _non_exhaustive)| if !zst { Some(span) } else { None });
let non_zst_count = non_zst_fields.clone().count();
if non_zst_count >= 2 {
- bad_non_zero_sized_fields(tcx, adt, non_zst_count, non_zst_fields, sp);
+ bad_non_zero_sized_fields(tcx, adt, non_zst_count, non_zst_fields, tcx.def_span(adt.did()));
}
let incompatible_zst_fields =
field_infos.clone().filter(|(_, _, _, opt)| opt.is_some()).count();
@@ -1143,12 +1161,11 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtD
}
#[allow(trivial_numeric_casts)]
-fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: LocalDefId) {
+fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) {
let def = tcx.adt_def(def_id);
- let sp = tcx.def_span(def_id);
def.destructor(tcx); // force the destructor to be evaluated
- if vs.is_empty() {
+ if def.variants().is_empty() {
if let Some(attr) = tcx.get_attrs(def_id.to_def_id(), sym::repr).next() {
struct_span_err!(
tcx.sess,
@@ -1156,7 +1173,7 @@ fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: L
E0084,
"unsupported representation for zero-variant enum"
)
- .span_label(sp, "zero-variant enum")
+ .span_label(tcx.def_span(def_id), "zero-variant enum")
.emit();
}
}
@@ -1167,88 +1184,96 @@ fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: L
feature_err(
&tcx.sess.parse_sess,
sym::repr128,
- sp,
+ tcx.def_span(def_id),
"repr with 128-bit type is unstable",
)
.emit();
}
}
- for v in vs {
- if let Some(ref e) = v.disr_expr {
- tcx.ensure().typeck(tcx.hir().local_def_id(e.hir_id));
+ for v in def.variants() {
+ if let ty::VariantDiscr::Explicit(discr_def_id) = v.discr {
+ tcx.ensure().typeck(discr_def_id.expect_local());
}
}
- if tcx.adt_def(def_id).repr().int.is_none() {
- let is_unit = |var: &hir::Variant<'_>| matches!(var.data, hir::VariantData::Unit(..));
+ if def.repr().int.is_none() {
+ let is_unit = |var: &ty::VariantDef| matches!(var.ctor_kind(), Some(CtorKind::Const));
+ let has_disr = |var: &ty::VariantDef| matches!(var.discr, ty::VariantDiscr::Explicit(_));
- let has_disr = |var: &hir::Variant<'_>| var.disr_expr.is_some();
- let has_non_units = vs.iter().any(|var| !is_unit(var));
- let disr_units = vs.iter().any(|var| is_unit(&var) && has_disr(&var));
- let disr_non_unit = vs.iter().any(|var| !is_unit(&var) && has_disr(&var));
+ let has_non_units = def.variants().iter().any(|var| !is_unit(var));
+ let disr_units = def.variants().iter().any(|var| is_unit(&var) && has_disr(&var));
+ let disr_non_unit = def.variants().iter().any(|var| !is_unit(&var) && has_disr(&var));
if disr_non_unit || (disr_units && has_non_units) {
- let mut err =
- struct_span_err!(tcx.sess, sp, E0732, "`#[repr(inttype)]` must be specified");
+ let mut err = struct_span_err!(
+ tcx.sess,
+ tcx.def_span(def_id),
+ E0732,
+ "`#[repr(inttype)]` must be specified"
+ );
err.emit();
}
}
- detect_discriminant_duplicate(tcx, def.discriminants(tcx).collect(), vs, sp);
-
- check_transparent(tcx, sp, def);
+ detect_discriminant_duplicate(tcx, def);
+ check_transparent(tcx, def);
}
/// Part of enum check. Given the discriminants of an enum, errors if two or more discriminants are equal
-fn detect_discriminant_duplicate<'tcx>(
- tcx: TyCtxt<'tcx>,
- mut discrs: Vec<(VariantIdx, Discr<'tcx>)>,
- vs: &'tcx [hir::Variant<'tcx>],
- self_span: Span,
-) {
+fn detect_discriminant_duplicate<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>) {
// Helper closure to reduce duplicate code. This gets called everytime we detect a duplicate.
// Here `idx` refers to the order of which the discriminant appears, and its index in `vs`
- let report = |dis: Discr<'tcx>, idx: usize, err: &mut Diagnostic| {
- let var = &vs[idx]; // HIR for the duplicate discriminant
- let (span, display_discr) = match var.disr_expr {
- Some(ref expr) => {
+ let report = |dis: Discr<'tcx>, idx, err: &mut Diagnostic| {
+ let var = adt.variant(idx); // HIR for the duplicate discriminant
+ let (span, display_discr) = match var.discr {
+ ty::VariantDiscr::Explicit(discr_def_id) => {
// In the case the discriminant is both a duplicate and overflowed, let the user know
- if let hir::ExprKind::Lit(lit) = &tcx.hir().body(expr.body).value.kind
+ if let hir::Node::AnonConst(expr) = tcx.hir().get_by_def_id(discr_def_id.expect_local())
+ && let hir::ExprKind::Lit(lit) = &tcx.hir().body(expr.body).value.kind
&& let rustc_ast::LitKind::Int(lit_value, _int_kind) = &lit.node
&& *lit_value != dis.val
{
- (tcx.hir().span(expr.hir_id), format!("`{dis}` (overflowed from `{lit_value}`)"))
- // Otherwise, format the value as-is
+ (tcx.def_span(discr_def_id), format!("`{dis}` (overflowed from `{lit_value}`)"))
} else {
- (tcx.hir().span(expr.hir_id), format!("`{dis}`"))
+ // Otherwise, format the value as-is
+ (tcx.def_span(discr_def_id), format!("`{dis}`"))
}
}
- None => {
+ // This should not happen.
+ ty::VariantDiscr::Relative(0) => (tcx.def_span(var.def_id), format!("`{dis}`")),
+ ty::VariantDiscr::Relative(distance_to_explicit) => {
// At this point we know this discriminant is a duplicate, and was not explicitly
// assigned by the user. Here we iterate backwards to fetch the HIR for the last
// explicitly assigned discriminant, and letting the user know that this was the
// increment startpoint, and how many steps from there leading to the duplicate
- if let Some((n, hir::Variant { span, ident, .. })) =
- vs[..idx].iter().rev().enumerate().find(|v| v.1.disr_expr.is_some())
+ if let Some(explicit_idx) =
+ idx.as_u32().checked_sub(distance_to_explicit).map(VariantIdx::from_u32)
{
- let ve_ident = var.ident;
- let n = n + 1;
- let sp = if n > 1 { "variants" } else { "variant" };
+ let explicit_variant = adt.variant(explicit_idx);
+ let ve_ident = var.name;
+ let ex_ident = explicit_variant.name;
+ let sp = if distance_to_explicit > 1 { "variants" } else { "variant" };
err.span_label(
- *span,
- format!("discriminant for `{ve_ident}` incremented from this startpoint (`{ident}` + {n} {sp} later => `{ve_ident}` = {dis})"),
+ tcx.def_span(explicit_variant.def_id),
+ format!(
+ "discriminant for `{ve_ident}` incremented from this startpoint \
+ (`{ex_ident}` + {distance_to_explicit} {sp} later \
+ => `{ve_ident}` = {dis})"
+ ),
);
}
- (vs[idx].span, format!("`{dis}`"))
+ (tcx.def_span(var.def_id), format!("`{dis}`"))
}
};
err.span_label(span, format!("{display_discr} assigned here"));
};
+ let mut discrs = adt.discriminants(tcx).collect::<Vec<_>>();
+
// Here we loop through the discriminants, comparing each discriminant to another.
// When a duplicate is detected, we instantiate an error and point to both
// initial and duplicate value. The duplicate discriminant is then discarded by swapping
@@ -1257,29 +1282,29 @@ fn detect_discriminant_duplicate<'tcx>(
// style as we are mutating `discrs` on the fly).
let mut i = 0;
while i < discrs.len() {
- let hir_var_i_idx = discrs[i].0.index();
+ let var_i_idx = discrs[i].0;
let mut error: Option<DiagnosticBuilder<'_, _>> = None;
let mut o = i + 1;
while o < discrs.len() {
- let hir_var_o_idx = discrs[o].0.index();
+ let var_o_idx = discrs[o].0;
if discrs[i].1.val == discrs[o].1.val {
let err = error.get_or_insert_with(|| {
let mut ret = struct_span_err!(
tcx.sess,
- self_span,
+ tcx.def_span(adt.did()),
E0081,
"discriminant value `{}` assigned more than once",
discrs[i].1,
);
- report(discrs[i].1, hir_var_i_idx, &mut ret);
+ report(discrs[i].1, var_i_idx, &mut ret);
ret
});
- report(discrs[o].1, hir_var_o_idx, err);
+ report(discrs[o].1, var_o_idx, err);
// Safe to unwrap here, as we wouldn't reach this point if `discrs` was empty
discrs[o] = *discrs.last().unwrap();
diff --git a/compiler/rustc_hir_analysis/src/check/compare_method.rs b/compiler/rustc_hir_analysis/src/check/compare_method.rs
index 32f66b06f..db150ebf0 100644
--- a/compiler/rustc_hir_analysis/src/check/compare_method.rs
+++ b/compiler/rustc_hir_analysis/src/check/compare_method.rs
@@ -1,7 +1,7 @@
use super::potentially_plural_count;
use crate::errors::LifetimesOrBoundsMismatchOnTrait;
use hir::def_id::{DefId, LocalDefId};
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticId, ErrorGuaranteed};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
@@ -9,13 +9,12 @@ use rustc_hir::intravisit;
use rustc_hir::{GenericParamKind, ImplItemKind, TraitItemKind};
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
-use rustc_infer::infer::{self, TyCtxtInferExt};
+use rustc_infer::infer::{self, InferCtxt, TyCtxtInferExt};
use rustc_infer::traits::util;
use rustc_middle::ty::error::{ExpectedFound, TypeError};
use rustc_middle::ty::util::ExplicitSelf;
-use rustc_middle::ty::InternalSubsts;
use rustc_middle::ty::{
- self, AssocItem, DefIdTree, Ty, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitable,
+ self, DefIdTree, InternalSubsts, Ty, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitable,
};
use rustc_middle::ty::{GenericParamDefKind, ToPredicate, TyCtxt};
use rustc_span::Span;
@@ -50,11 +49,11 @@ pub(crate) fn compare_impl_method<'tcx>(
return;
}
- if let Err(_) = compare_number_of_generics(tcx, impl_m, impl_m_span, trait_m, trait_item_span) {
+ if let Err(_) = compare_number_of_generics(tcx, impl_m, trait_m, trait_item_span, false) {
return;
}
- if let Err(_) = compare_generic_param_kinds(tcx, impl_m, trait_m) {
+ if let Err(_) = compare_generic_param_kinds(tcx, impl_m, trait_m, false) {
return;
}
@@ -68,8 +67,14 @@ pub(crate) fn compare_impl_method<'tcx>(
return;
}
- if let Err(_) = compare_predicate_entailment(tcx, impl_m, impl_m_span, trait_m, impl_trait_ref)
- {
+ if let Err(_) = compare_predicate_entailment(
+ tcx,
+ impl_m,
+ impl_m_span,
+ trait_m,
+ impl_trait_ref,
+ CheckImpliedWfMode::Check,
+ ) {
return;
}
}
@@ -143,10 +148,11 @@ pub(crate) fn compare_impl_method<'tcx>(
#[instrument(level = "debug", skip(tcx, impl_m_span, impl_trait_ref))]
fn compare_predicate_entailment<'tcx>(
tcx: TyCtxt<'tcx>,
- impl_m: &AssocItem,
+ impl_m: &ty::AssocItem,
impl_m_span: Span,
- trait_m: &AssocItem,
+ trait_m: &ty::AssocItem,
impl_trait_ref: ty::TraitRef<'tcx>,
+ check_implied_wf: CheckImpliedWfMode,
) -> Result<(), ErrorGuaranteed> {
let trait_to_impl_substs = impl_trait_ref.substs;
@@ -156,8 +162,7 @@ fn compare_predicate_entailment<'tcx>(
// FIXME(@lcnr): remove that after removing `cause.body_id` from
// obligations.
let impl_m_hir_id = tcx.hir().local_def_id_to_hir_id(impl_m.def_id.expect_local());
- // We sometimes modify the span further down.
- let mut cause = ObligationCause::new(
+ let cause = ObligationCause::new(
impl_m_span,
impl_m_hir_id,
ObligationCauseCode::CompareImplItemObligation {
@@ -175,13 +180,11 @@ fn compare_predicate_entailment<'tcx>(
impl_to_placeholder_substs.rebase_onto(tcx, impl_m.container_id(tcx), trait_to_impl_substs);
debug!("compare_impl_method: trait_to_placeholder_substs={:?}", trait_to_placeholder_substs);
- let impl_m_generics = tcx.generics_of(impl_m.def_id);
- let trait_m_generics = tcx.generics_of(trait_m.def_id);
let impl_m_predicates = tcx.predicates_of(impl_m.def_id);
let trait_m_predicates = tcx.predicates_of(trait_m.def_id);
// Check region bounds.
- check_region_bounds_on_impl_item(tcx, impl_m, trait_m, &trait_m_generics, &impl_m_generics)?;
+ check_region_bounds_on_impl_item(tcx, impl_m, trait_m, false)?;
// Create obligations for each predicate declared by the impl
// definition in the context of the trait's parameter
@@ -220,14 +223,11 @@ fn compare_predicate_entailment<'tcx>(
debug!("compare_impl_method: caller_bounds={:?}", param_env.caller_bounds());
- let mut selcx = traits::SelectionContext::new(&infcx);
let impl_m_own_bounds = impl_m_predicates.instantiate_own(tcx, impl_to_placeholder_substs);
for (predicate, span) in iter::zip(impl_m_own_bounds.predicates, impl_m_own_bounds.spans) {
let normalize_cause = traits::ObligationCause::misc(span, impl_m_hir_id);
- let traits::Normalized { value: predicate, obligations } =
- traits::normalize(&mut selcx, param_env, normalize_cause, predicate);
+ let predicate = ocx.normalize(&normalize_cause, param_env, predicate);
- ocx.register_obligations(obligations);
let cause = ObligationCause::new(
span,
impl_m_hir_id,
@@ -237,7 +237,7 @@ fn compare_predicate_entailment<'tcx>(
kind: impl_m.kind,
},
);
- ocx.register_obligation(traits::Obligation::new(cause, param_env, predicate));
+ ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate));
}
// We now need to check that the signature of the impl method is
@@ -256,17 +256,17 @@ fn compare_predicate_entailment<'tcx>(
// Compute placeholder form of impl and trait method tys.
let tcx = infcx.tcx;
- let mut wf_tys = FxHashSet::default();
+ let mut wf_tys = FxIndexSet::default();
- let impl_sig = infcx.replace_bound_vars_with_fresh_vars(
+ let unnormalized_impl_sig = infcx.replace_bound_vars_with_fresh_vars(
impl_m_span,
infer::HigherRankedType,
tcx.fn_sig(impl_m.def_id),
);
+ let unnormalized_impl_fty = tcx.mk_fn_ptr(ty::Binder::dummy(unnormalized_impl_sig));
let norm_cause = ObligationCause::misc(impl_m_span, impl_m_hir_id);
- let impl_sig = ocx.normalize(norm_cause.clone(), param_env, impl_sig);
- let impl_fty = tcx.mk_fn_ptr(ty::Binder::dummy(impl_sig));
+ let impl_fty = ocx.normalize(&norm_cause, param_env, unnormalized_impl_fty);
debug!("compare_impl_method: impl_fty={:?}", impl_fty);
let trait_sig = tcx.bound_fn_sig(trait_m.def_id).subst(tcx, trait_to_placeholder_substs);
@@ -276,7 +276,7 @@ fn compare_predicate_entailment<'tcx>(
// we have to do this before normalization, since the normalized ty may
// not contain the input parameters. See issue #87748.
wf_tys.extend(trait_sig.inputs_and_output.iter());
- let trait_sig = ocx.normalize(norm_cause, param_env, trait_sig);
+ let trait_sig = ocx.normalize(&norm_cause, param_env, trait_sig);
// We also have to add the normalized trait signature
// as we don't normalize during implied bounds computation.
wf_tys.extend(trait_sig.inputs_and_output.iter());
@@ -290,147 +290,126 @@ fn compare_predicate_entailment<'tcx>(
// type would be more appropriate. In other places we have a `Vec<Span>`
// corresponding to their `Vec<Predicate>`, but we don't have that here.
// Fixing this would improve the output of test `issue-83765.rs`.
- let mut result = infcx
- .at(&cause, param_env)
- .sup(trait_fty, impl_fty)
- .map(|infer_ok| ocx.register_infer_ok_obligations(infer_ok));
-
- // HACK(RPITIT): #101614. When we are trying to infer the hidden types for
- // RPITITs, we need to equate the output tys instead of just subtyping. If
- // we just use `sup` above, we'll end up `&'static str <: _#1t`, which causes
- // us to infer `_#1t = #'_#2r str`, where `'_#2r` is unconstrained, which gets
- // fixed up to `ReEmpty`, and which is certainly not what we want.
- if trait_fty.has_infer_types() {
- result = result.and_then(|()| {
- infcx
- .at(&cause, param_env)
- .eq(trait_sig.output(), impl_sig.output())
- .map(|infer_ok| ocx.register_infer_ok_obligations(infer_ok))
- });
- }
+ let result = ocx.sup(&cause, param_env, trait_fty, impl_fty);
if let Err(terr) = result {
- debug!("sub_types failed: impl ty {:?}, trait ty {:?}", impl_fty, trait_fty);
-
- let (impl_err_span, trait_err_span) =
- extract_spans_for_error_reporting(&infcx, terr, &cause, impl_m, trait_m);
-
- cause.span = impl_err_span;
-
- let mut diag = struct_span_err!(
- tcx.sess,
- cause.span(),
- E0053,
- "method `{}` has an incompatible type for trait",
- trait_m.name
- );
- match &terr {
- TypeError::ArgumentMutability(0) | TypeError::ArgumentSorts(_, 0)
- if trait_m.fn_has_self_parameter =>
- {
- let ty = trait_sig.inputs()[0];
- let sugg = match ExplicitSelf::determine(ty, |_| ty == impl_trait_ref.self_ty()) {
- ExplicitSelf::ByValue => "self".to_owned(),
- ExplicitSelf::ByReference(_, hir::Mutability::Not) => "&self".to_owned(),
- ExplicitSelf::ByReference(_, hir::Mutability::Mut) => "&mut self".to_owned(),
- _ => format!("self: {ty}"),
- };
-
- // When the `impl` receiver is an arbitrary self type, like `self: Box<Self>`, the
- // span points only at the type `Box<Self`>, but we want to cover the whole
- // argument pattern and type.
- let span = match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
- ImplItemKind::Fn(ref sig, body) => tcx
- .hir()
- .body_param_names(body)
- .zip(sig.decl.inputs.iter())
- .map(|(param, ty)| param.span.to(ty.span))
- .next()
- .unwrap_or(impl_err_span),
- _ => bug!("{:?} is not a method", impl_m),
- };
-
- diag.span_suggestion(
- span,
- "change the self-receiver type to match the trait",
- sugg,
- Applicability::MachineApplicable,
- );
- }
- TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(_, i) => {
- if trait_sig.inputs().len() == *i {
- // Suggestion to change output type. We do not suggest in `async` functions
- // to avoid complex logic or incorrect output.
- match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
- ImplItemKind::Fn(ref sig, _)
- if sig.header.asyncness == hir::IsAsync::NotAsync =>
- {
- let msg = "change the output type to match the trait";
- let ap = Applicability::MachineApplicable;
- match sig.decl.output {
- hir::FnRetTy::DefaultReturn(sp) => {
- let sugg = format!("-> {} ", trait_sig.output());
- diag.span_suggestion_verbose(sp, msg, sugg, ap);
- }
- hir::FnRetTy::Return(hir_ty) => {
- let sugg = trait_sig.output();
- diag.span_suggestion(hir_ty.span, msg, sugg, ap);
- }
- };
- }
- _ => {}
- };
- } else if let Some(trait_ty) = trait_sig.inputs().get(*i) {
- diag.span_suggestion(
- impl_err_span,
- "change the parameter type to match the trait",
- trait_ty,
- Applicability::MachineApplicable,
- );
- }
- }
- _ => {}
- }
+ debug!(?terr, "sub_types failed: impl ty {:?}, trait ty {:?}", impl_fty, trait_fty);
- infcx.err_ctxt().note_type_err(
- &mut diag,
- &cause,
- trait_err_span.map(|sp| (sp, "type in trait".to_owned())),
- Some(infer::ValuePairs::Terms(ExpectedFound {
- expected: trait_fty.into(),
- found: impl_fty.into(),
- })),
+ let emitted = report_trait_method_mismatch(
+ &infcx,
+ cause,
terr,
- false,
- false,
+ (trait_m, trait_fty),
+ (impl_m, impl_fty),
+ trait_sig,
+ impl_trait_ref,
);
+ return Err(emitted);
+ }
- return Err(diag.emit());
+ if check_implied_wf == CheckImpliedWfMode::Check {
+ // We need to check that the impl's args are well-formed given
+ // the hybrid param-env (impl + trait method where-clauses).
+ ocx.register_obligation(traits::Obligation::new(
+ infcx.tcx,
+ ObligationCause::dummy(),
+ param_env,
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(unnormalized_impl_fty.into())),
+ ));
}
+ let emit_implied_wf_lint = || {
+ infcx.tcx.struct_span_lint_hir(
+ rustc_session::lint::builtin::IMPLIED_BOUNDS_ENTAILMENT,
+ impl_m_hir_id,
+ infcx.tcx.def_span(impl_m.def_id),
+ "impl method assumes more implied bounds than the corresponding trait method",
+ |lint| lint,
+ );
+ };
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
- return Err(reported);
+ match check_implied_wf {
+ CheckImpliedWfMode::Check => {
+ return compare_predicate_entailment(
+ tcx,
+ impl_m,
+ impl_m_span,
+ trait_m,
+ impl_trait_ref,
+ CheckImpliedWfMode::Skip,
+ )
+ .map(|()| {
+ // If the skip-mode was successful, emit a lint.
+ emit_implied_wf_lint();
+ });
+ }
+ CheckImpliedWfMode::Skip => {
+ let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None);
+ return Err(reported);
+ }
+ }
}
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters.
- let outlives_environment = OutlivesEnvironment::with_bounds(
+ let outlives_env = OutlivesEnvironment::with_bounds(
param_env,
Some(infcx),
- infcx.implied_bounds_tys(param_env, impl_m_hir_id, wf_tys),
+ infcx.implied_bounds_tys(param_env, impl_m_hir_id, wf_tys.clone()),
);
- infcx.check_region_obligations_and_report_errors(
- impl_m.def_id.expect_local(),
- &outlives_environment,
+ infcx.process_registered_region_obligations(
+ outlives_env.region_bound_pairs(),
+ outlives_env.param_env,
);
+ let errors = infcx.resolve_regions(&outlives_env);
+ if !errors.is_empty() {
+ // FIXME(compiler-errors): This can be simplified when IMPLIED_BOUNDS_ENTAILMENT
+ // becomes a hard error (i.e. ideally we'd just call `resolve_regions_and_report_errors`
+ match check_implied_wf {
+ CheckImpliedWfMode::Check => {
+ return compare_predicate_entailment(
+ tcx,
+ impl_m,
+ impl_m_span,
+ trait_m,
+ impl_trait_ref,
+ CheckImpliedWfMode::Skip,
+ )
+ .map(|()| {
+ // If the skip-mode was successful, emit a lint.
+ emit_implied_wf_lint();
+ });
+ }
+ CheckImpliedWfMode::Skip => {
+ if infcx.tainted_by_errors().is_none() {
+ infcx.err_ctxt().report_region_errors(impl_m.def_id.expect_local(), &errors);
+ }
+ return Err(tcx
+ .sess
+ .delay_span_bug(rustc_span::DUMMY_SP, "error should have been emitted"));
+ }
+ }
+ }
Ok(())
}
+#[derive(Debug, PartialEq, Eq)]
+enum CheckImpliedWfMode {
+ /// Checks implied well-formedness of the impl method. If it fails, we will
+ /// re-check with `Skip`, and emit a lint if it succeeds.
+ Check,
+ /// Skips checking implied well-formedness of the impl method, but will emit
+ /// a lint if the `compare_predicate_entailment` succeeded. This means that
+ /// the reason that we had failed earlier during `Check` was due to the impl
+ /// having stronger requirements than the trait.
+ Skip,
+}
+
+#[instrument(skip(tcx), level = "debug", ret)]
pub fn collect_trait_impl_trait_tys<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
@@ -440,6 +419,11 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
let impl_trait_ref = tcx.impl_trait_ref(impl_m.impl_container(tcx).unwrap()).unwrap();
let param_env = tcx.param_env(def_id);
+ // First, check a few of the same thing as `compare_impl_method`, just so we don't ICE during substitutions later.
+ compare_number_of_generics(tcx, impl_m, trait_m, tcx.hir().span_if_local(impl_m.def_id), true)?;
+ compare_generic_param_kinds(tcx, impl_m, trait_m, true)?;
+ check_region_bounds_on_impl_item(tcx, impl_m, trait_m, true)?;
+
let trait_to_impl_substs = impl_trait_ref.substs;
let impl_m_hir_id = tcx.hir().local_def_id_to_hir_id(impl_m.def_id.expect_local());
@@ -464,9 +448,10 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
let infcx = &tcx.infer_ctxt().build();
let ocx = ObligationCtxt::new(infcx);
+ // Normalize the impl signature with fresh variables for lifetime inference.
let norm_cause = ObligationCause::misc(return_span, impl_m_hir_id);
let impl_sig = ocx.normalize(
- norm_cause.clone(),
+ &norm_cause,
param_env,
infcx.replace_bound_vars_with_fresh_vars(
return_span,
@@ -476,6 +461,10 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
);
let impl_return_ty = impl_sig.output();
+ // Normalize the trait signature with liberated bound vars, passing it through
+ // the ImplTraitInTraitCollector, which gathers all of the RPITITs and replaces
+ // them with inference variables.
+ // We will use these inference variables to collect the hidden types of RPITITs.
let mut collector = ImplTraitInTraitCollector::new(&ocx, return_span, param_env, impl_m_hir_id);
let unnormalized_trait_sig = tcx
.liberate_late_bound_regions(
@@ -483,17 +472,15 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
tcx.bound_fn_sig(trait_m.def_id).subst(tcx, trait_to_placeholder_substs),
)
.fold_with(&mut collector);
- let trait_sig = ocx.normalize(norm_cause.clone(), param_env, unnormalized_trait_sig);
+ let trait_sig = ocx.normalize(&norm_cause, param_env, unnormalized_trait_sig);
let trait_return_ty = trait_sig.output();
- let wf_tys = FxHashSet::from_iter(
+ let wf_tys = FxIndexSet::from_iter(
unnormalized_trait_sig.inputs_and_output.iter().chain(trait_sig.inputs_and_output.iter()),
);
- match infcx.at(&cause, param_env).eq(trait_return_ty, impl_return_ty) {
- Ok(infer::InferOk { value: (), obligations }) => {
- ocx.register_obligations(obligations);
- }
+ match ocx.eq(&cause, param_env, trait_return_ty, impl_return_ty) {
+ Ok(()) => {}
Err(terr) => {
let mut diag = struct_span_err!(
tcx.sess,
@@ -521,23 +508,32 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
}
}
+ debug!(?trait_sig, ?impl_sig, "equating function signatures");
+
+ let trait_fty = tcx.mk_fn_ptr(ty::Binder::dummy(trait_sig));
+ let impl_fty = tcx.mk_fn_ptr(ty::Binder::dummy(impl_sig));
+
// Unify the whole function signature. We need to do this to fully infer
// the lifetimes of the return type, but do this after unifying just the
// return types, since we want to avoid duplicating errors from
// `compare_predicate_entailment`.
- match infcx
- .at(&cause, param_env)
- .eq(tcx.mk_fn_ptr(ty::Binder::dummy(trait_sig)), tcx.mk_fn_ptr(ty::Binder::dummy(impl_sig)))
- {
- Ok(infer::InferOk { value: (), obligations }) => {
- ocx.register_obligations(obligations);
- }
+ match ocx.eq(&cause, param_env, trait_fty, impl_fty) {
+ Ok(()) => {}
Err(terr) => {
- let guar = tcx.sess.delay_span_bug(
- return_span,
- format!("could not unify `{trait_sig}` and `{impl_sig}`: {terr:?}"),
+ // This function gets called during `compare_predicate_entailment` when normalizing a
+ // signature that contains RPITIT. When the method signatures don't match, we have to
+ // emit an error now because `compare_predicate_entailment` will not report the error
+ // when normalization fails.
+ let emitted = report_trait_method_mismatch(
+ infcx,
+ cause,
+ terr,
+ (trait_m, trait_fty),
+ (impl_m, impl_fty),
+ trait_sig,
+ impl_trait_ref,
);
- return Err(guar);
+ return Err(emitted);
}
}
@@ -545,7 +541,7 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
// RPITs.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None);
return Err(reported);
}
@@ -597,7 +593,13 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
let num_trait_substs = trait_to_impl_substs.len();
let num_impl_substs = tcx.generics_of(impl_m.container_id(tcx)).params.len();
let ty = tcx.fold_regions(ty, |region, _| {
- let (ty::ReFree(_) | ty::ReEarlyBound(_)) = region.kind() else { return region; };
+ match region.kind() {
+ // Remap all free regions, which correspond to late-bound regions in the function.
+ ty::ReFree(_) => {}
+ // Remap early-bound regions as long as they don't come from the `impl` itself.
+ ty::ReEarlyBound(ebr) if tcx.parent(ebr.def_id) != impl_m.container_id(tcx) => {}
+ _ => return region,
+ }
let Some(ty::ReEarlyBound(e)) = map.get(&region.into()).map(|r| r.expect_region().kind())
else {
tcx
@@ -618,11 +620,11 @@ pub fn collect_trait_impl_trait_tys<'tcx>(
collected_tys.insert(def_id, ty);
}
Err(err) => {
- tcx.sess.delay_span_bug(
+ let reported = tcx.sess.delay_span_bug(
return_span,
format!("could not fully resolve: {ty} => {err:?}"),
);
- collected_tys.insert(def_id, tcx.ty_error());
+ collected_tys.insert(def_id, tcx.ty_error_with_guaranteed(reported));
}
}
}
@@ -675,12 +677,13 @@ impl<'tcx> TypeFolder<'tcx> for ImplTraitInTraitCollector<'_, 'tcx> {
for (pred, pred_span) in self.tcx().bound_explicit_item_bounds(proj.item_def_id).subst_iter_copied(self.tcx(), proj.substs) {
let pred = pred.fold_with(self);
let pred = self.ocx.normalize(
- ObligationCause::misc(self.span, self.body_id),
+ &ObligationCause::misc(self.span, self.body_id),
self.param_env,
pred,
);
self.ocx.register_obligation(traits::Obligation::new(
+ self.tcx(),
ObligationCause::new(
self.span,
self.body_id,
@@ -697,16 +700,121 @@ impl<'tcx> TypeFolder<'tcx> for ImplTraitInTraitCollector<'_, 'tcx> {
}
}
+fn report_trait_method_mismatch<'tcx>(
+ infcx: &InferCtxt<'tcx>,
+ mut cause: ObligationCause<'tcx>,
+ terr: TypeError<'tcx>,
+ (trait_m, trait_fty): (&ty::AssocItem, Ty<'tcx>),
+ (impl_m, impl_fty): (&ty::AssocItem, Ty<'tcx>),
+ trait_sig: ty::FnSig<'tcx>,
+ impl_trait_ref: ty::TraitRef<'tcx>,
+) -> ErrorGuaranteed {
+ let tcx = infcx.tcx;
+ let (impl_err_span, trait_err_span) =
+ extract_spans_for_error_reporting(&infcx, terr, &cause, impl_m, trait_m);
+
+ let mut diag = struct_span_err!(
+ tcx.sess,
+ impl_err_span,
+ E0053,
+ "method `{}` has an incompatible type for trait",
+ trait_m.name
+ );
+ match &terr {
+ TypeError::ArgumentMutability(0) | TypeError::ArgumentSorts(_, 0)
+ if trait_m.fn_has_self_parameter =>
+ {
+ let ty = trait_sig.inputs()[0];
+ let sugg = match ExplicitSelf::determine(ty, |_| ty == impl_trait_ref.self_ty()) {
+ ExplicitSelf::ByValue => "self".to_owned(),
+ ExplicitSelf::ByReference(_, hir::Mutability::Not) => "&self".to_owned(),
+ ExplicitSelf::ByReference(_, hir::Mutability::Mut) => "&mut self".to_owned(),
+ _ => format!("self: {ty}"),
+ };
+
+ // When the `impl` receiver is an arbitrary self type, like `self: Box<Self>`, the
+ // span points only at the type `Box<Self`>, but we want to cover the whole
+ // argument pattern and type.
+ let span = match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
+ ImplItemKind::Fn(ref sig, body) => tcx
+ .hir()
+ .body_param_names(body)
+ .zip(sig.decl.inputs.iter())
+ .map(|(param, ty)| param.span.to(ty.span))
+ .next()
+ .unwrap_or(impl_err_span),
+ _ => bug!("{:?} is not a method", impl_m),
+ };
+
+ diag.span_suggestion(
+ span,
+ "change the self-receiver type to match the trait",
+ sugg,
+ Applicability::MachineApplicable,
+ );
+ }
+ TypeError::ArgumentMutability(i) | TypeError::ArgumentSorts(_, i) => {
+ if trait_sig.inputs().len() == *i {
+ // Suggestion to change output type. We do not suggest in `async` functions
+ // to avoid complex logic or incorrect output.
+ match tcx.hir().expect_impl_item(impl_m.def_id.expect_local()).kind {
+ ImplItemKind::Fn(ref sig, _) if !sig.header.asyncness.is_async() => {
+ let msg = "change the output type to match the trait";
+ let ap = Applicability::MachineApplicable;
+ match sig.decl.output {
+ hir::FnRetTy::DefaultReturn(sp) => {
+ let sugg = format!("-> {} ", trait_sig.output());
+ diag.span_suggestion_verbose(sp, msg, sugg, ap);
+ }
+ hir::FnRetTy::Return(hir_ty) => {
+ let sugg = trait_sig.output();
+ diag.span_suggestion(hir_ty.span, msg, sugg, ap);
+ }
+ };
+ }
+ _ => {}
+ };
+ } else if let Some(trait_ty) = trait_sig.inputs().get(*i) {
+ diag.span_suggestion(
+ impl_err_span,
+ "change the parameter type to match the trait",
+ trait_ty,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ _ => {}
+ }
+
+ cause.span = impl_err_span;
+ infcx.err_ctxt().note_type_err(
+ &mut diag,
+ &cause,
+ trait_err_span.map(|sp| (sp, "type in trait".to_owned())),
+ Some(infer::ValuePairs::Terms(ExpectedFound {
+ expected: trait_fty.into(),
+ found: impl_fty.into(),
+ })),
+ terr,
+ false,
+ false,
+ );
+
+ return diag.emit();
+}
+
fn check_region_bounds_on_impl_item<'tcx>(
tcx: TyCtxt<'tcx>,
impl_m: &ty::AssocItem,
trait_m: &ty::AssocItem,
- trait_generics: &ty::Generics,
- impl_generics: &ty::Generics,
+ delay: bool,
) -> Result<(), ErrorGuaranteed> {
- let trait_params = trait_generics.own_counts().lifetimes;
+ let impl_generics = tcx.generics_of(impl_m.def_id);
let impl_params = impl_generics.own_counts().lifetimes;
+ let trait_generics = tcx.generics_of(trait_m.def_id);
+ let trait_params = trait_generics.own_counts().lifetimes;
+
debug!(
"check_region_bounds_on_impl_item: \
trait_generics={:?} \
@@ -729,23 +837,56 @@ fn check_region_bounds_on_impl_item<'tcx>(
.get_generics(impl_m.def_id.expect_local())
.expect("expected impl item to have generics or else we can't compare them")
.span;
- let generics_span = if let Some(local_def_id) = trait_m.def_id.as_local() {
- Some(
- tcx.hir()
- .get_generics(local_def_id)
- .expect("expected trait item to have generics or else we can't compare them")
- .span,
- )
- } else {
- None
- };
- let reported = tcx.sess.emit_err(LifetimesOrBoundsMismatchOnTrait {
- span,
- item_kind: assoc_item_kind_str(impl_m),
- ident: impl_m.ident(tcx),
- generics_span,
- });
+ let mut generics_span = None;
+ let mut bounds_span = vec![];
+ let mut where_span = None;
+ if let Some(trait_node) = tcx.hir().get_if_local(trait_m.def_id)
+ && let Some(trait_generics) = trait_node.generics()
+ {
+ generics_span = Some(trait_generics.span);
+ // FIXME: we could potentially look at the impl's bounds to not point at bounds that
+ // *are* present in the impl.
+ for p in trait_generics.predicates {
+ if let hir::WherePredicate::BoundPredicate(pred) = p {
+ for b in pred.bounds {
+ if let hir::GenericBound::Outlives(lt) = b {
+ bounds_span.push(lt.ident.span);
+ }
+ }
+ }
+ }
+ if let Some(impl_node) = tcx.hir().get_if_local(impl_m.def_id)
+ && let Some(impl_generics) = impl_node.generics()
+ {
+ let mut impl_bounds = 0;
+ for p in impl_generics.predicates {
+ if let hir::WherePredicate::BoundPredicate(pred) = p {
+ for b in pred.bounds {
+ if let hir::GenericBound::Outlives(_) = b {
+ impl_bounds += 1;
+ }
+ }
+ }
+ }
+ if impl_bounds == bounds_span.len() {
+ bounds_span = vec![];
+ } else if impl_generics.has_where_clause_predicates {
+ where_span = Some(impl_generics.where_clause_span);
+ }
+ }
+ }
+ let reported = tcx
+ .sess
+ .create_err(LifetimesOrBoundsMismatchOnTrait {
+ span,
+ item_kind: assoc_item_kind_str(impl_m),
+ ident: impl_m.ident(tcx),
+ generics_span,
+ bounds_span,
+ where_span,
+ })
+ .emit_unless(delay);
return Err(reported);
}
@@ -891,9 +1032,9 @@ fn compare_self_type<'tcx>(
fn compare_number_of_generics<'tcx>(
tcx: TyCtxt<'tcx>,
impl_: &ty::AssocItem,
- _impl_span: Span,
trait_: &ty::AssocItem,
trait_span: Option<Span>,
+ delay: bool,
) -> Result<(), ErrorGuaranteed> {
let trait_own_counts = tcx.generics_of(trait_.def_id).own_counts();
let impl_own_counts = tcx.generics_of(impl_.def_id).own_counts();
@@ -1023,7 +1164,7 @@ fn compare_number_of_generics<'tcx>(
err.span_label(*span, "`impl Trait` introduces an implicit type parameter");
}
- let reported = err.emit();
+ let reported = err.emit_unless(delay);
err_occurred = Some(reported);
}
}
@@ -1275,6 +1416,7 @@ fn compare_generic_param_kinds<'tcx>(
tcx: TyCtxt<'tcx>,
impl_item: &ty::AssocItem,
trait_item: &ty::AssocItem,
+ delay: bool,
) -> Result<(), ErrorGuaranteed> {
assert_eq!(impl_item.kind, trait_item.kind);
@@ -1332,7 +1474,7 @@ fn compare_generic_param_kinds<'tcx>(
err.span_label(impl_header_span, "");
err.span_label(param_impl_span, make_param_message("found", param_impl));
- let reported = err.emit();
+ let reported = err.emit_unless(delay);
return Err(reported);
}
}
@@ -1381,18 +1523,15 @@ pub(crate) fn raw_compare_const_impl<'tcx>(
);
// There is no "body" here, so just pass dummy id.
- let impl_ty = ocx.normalize(cause.clone(), param_env, impl_ty);
+ let impl_ty = ocx.normalize(&cause, param_env, impl_ty);
debug!("compare_const_impl: impl_ty={:?}", impl_ty);
- let trait_ty = ocx.normalize(cause.clone(), param_env, trait_ty);
+ let trait_ty = ocx.normalize(&cause, param_env, trait_ty);
debug!("compare_const_impl: trait_ty={:?}", trait_ty);
- let err = infcx
- .at(&cause, param_env)
- .sup(trait_ty, impl_ty)
- .map(|ok| ocx.register_infer_ok_obligations(ok));
+ let err = ocx.sup(&cause, param_env, trait_ty, impl_ty);
if let Err(terr) = err {
debug!(
@@ -1441,7 +1580,7 @@ pub(crate) fn raw_compare_const_impl<'tcx>(
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- return Err(infcx.err_ctxt().report_fulfillment_errors(&errors, None, false));
+ return Err(infcx.err_ctxt().report_fulfillment_errors(&errors, None));
}
// FIXME return `ErrorReported` if region obligations error?
@@ -1461,9 +1600,9 @@ pub(crate) fn compare_ty_impl<'tcx>(
debug!("compare_impl_type(impl_trait_ref={:?})", impl_trait_ref);
let _: Result<(), ErrorGuaranteed> = (|| {
- compare_number_of_generics(tcx, impl_ty, impl_ty_span, trait_ty, trait_item_span)?;
+ compare_number_of_generics(tcx, impl_ty, trait_ty, trait_item_span, false)?;
- compare_generic_param_kinds(tcx, impl_ty, trait_ty)?;
+ compare_generic_param_kinds(tcx, impl_ty, trait_ty, false)?;
let sp = tcx.def_span(impl_ty.def_id);
compare_type_predicate_entailment(tcx, impl_ty, sp, trait_ty, impl_trait_ref)?;
@@ -1485,18 +1624,10 @@ fn compare_type_predicate_entailment<'tcx>(
let trait_to_impl_substs =
impl_substs.rebase_onto(tcx, impl_ty.container_id(tcx), impl_trait_ref.substs);
- let impl_ty_generics = tcx.generics_of(impl_ty.def_id);
- let trait_ty_generics = tcx.generics_of(trait_ty.def_id);
let impl_ty_predicates = tcx.predicates_of(impl_ty.def_id);
let trait_ty_predicates = tcx.predicates_of(trait_ty.def_id);
- check_region_bounds_on_impl_item(
- tcx,
- impl_ty,
- trait_ty,
- &trait_ty_generics,
- &impl_ty_generics,
- )?;
+ check_region_bounds_on_impl_item(tcx, impl_ty, trait_ty, false)?;
let impl_ty_own_bounds = impl_ty_predicates.instantiate_own(tcx, impl_substs);
@@ -1533,14 +1664,11 @@ fn compare_type_predicate_entailment<'tcx>(
debug!("compare_type_predicate_entailment: caller_bounds={:?}", param_env.caller_bounds());
- let mut selcx = traits::SelectionContext::new(&infcx);
-
assert_eq!(impl_ty_own_bounds.predicates.len(), impl_ty_own_bounds.spans.len());
for (span, predicate) in std::iter::zip(impl_ty_own_bounds.spans, impl_ty_own_bounds.predicates)
{
let cause = ObligationCause::misc(span, impl_ty_hir_id);
- let traits::Normalized { value: predicate, obligations } =
- traits::normalize(&mut selcx, param_env, cause, predicate);
+ let predicate = ocx.normalize(&cause, param_env, predicate);
let cause = ObligationCause::new(
span,
@@ -1551,15 +1679,14 @@ fn compare_type_predicate_entailment<'tcx>(
kind: impl_ty.kind,
},
);
- ocx.register_obligations(obligations);
- ocx.register_obligation(traits::Obligation::new(cause, param_env, predicate));
+ ocx.register_obligation(traits::Obligation::new(tcx, cause, param_env, predicate));
}
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None);
return Err(reported);
}
@@ -1665,13 +1792,10 @@ pub fn check_type_bounds<'tcx>(
GenericParamDefKind::Const { .. } => {
let bound_var = ty::BoundVariableKind::Const;
bound_vars.push(bound_var);
- tcx.mk_const(ty::ConstS {
- ty: tcx.type_of(param.def_id),
- kind: ty::ConstKind::Bound(
- ty::INNERMOST,
- ty::BoundVar::from_usize(bound_vars.len() - 1),
- ),
- })
+ tcx.mk_const(
+ ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_usize(bound_vars.len() - 1)),
+ tcx.type_of(param.def_id),
+ )
.into()
}
});
@@ -1737,7 +1861,6 @@ pub fn check_type_bounds<'tcx>(
let assumed_wf_types =
ocx.assumed_wf_types(param_env, impl_ty_span, impl_ty.def_id.expect_local());
- let mut selcx = traits::SelectionContext::new(&infcx);
let normalize_cause = ObligationCause::new(
impl_ty_span,
impl_ty_hir_id,
@@ -1760,29 +1883,24 @@ pub fn check_type_bounds<'tcx>(
.subst_iter_copied(tcx, rebased_substs)
.map(|(concrete_ty_bound, span)| {
debug!("check_type_bounds: concrete_ty_bound = {:?}", concrete_ty_bound);
- traits::Obligation::new(mk_cause(span), param_env, concrete_ty_bound)
+ traits::Obligation::new(tcx, mk_cause(span), param_env, concrete_ty_bound)
})
.collect();
debug!("check_type_bounds: item_bounds={:?}", obligations);
for mut obligation in util::elaborate_obligations(tcx, obligations) {
- let traits::Normalized { value: normalized_predicate, obligations } = traits::normalize(
- &mut selcx,
- normalize_param_env,
- normalize_cause.clone(),
- obligation.predicate,
- );
+ let normalized_predicate =
+ ocx.normalize(&normalize_cause, normalize_param_env, obligation.predicate);
debug!("compare_projection_bounds: normalized predicate = {:?}", normalized_predicate);
obligation.predicate = normalized_predicate;
- ocx.register_obligations(obligations);
ocx.register_obligation(obligation);
}
// Check that all obligations are satisfied by the implementation's
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None);
return Err(reported);
}
diff --git a/compiler/rustc_hir_analysis/src/check/dropck.rs b/compiler/rustc_hir_analysis/src/check/dropck.rs
index a74016e22..d6e3ddb0a 100644
--- a/compiler/rustc_hir_analysis/src/check/dropck.rs
+++ b/compiler/rustc_hir_analysis/src/check/dropck.rs
@@ -183,19 +183,27 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
let predicate = predicate.kind();
let p = p.kind();
match (predicate.skip_binder(), p.skip_binder()) {
- (ty::PredicateKind::Trait(a), ty::PredicateKind::Trait(b)) => {
- relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
- }
- (ty::PredicateKind::Projection(a), ty::PredicateKind::Projection(b)) => {
- relator.relate(predicate.rebind(a), p.rebind(b)).is_ok()
- }
+ (
+ ty::PredicateKind::Clause(ty::Clause::Trait(a)),
+ ty::PredicateKind::Clause(ty::Clause::Trait(b)),
+ ) => relator.relate(predicate.rebind(a), p.rebind(b)).is_ok(),
+ (
+ ty::PredicateKind::Clause(ty::Clause::Projection(a)),
+ ty::PredicateKind::Clause(ty::Clause::Projection(b)),
+ ) => relator.relate(predicate.rebind(a), p.rebind(b)).is_ok(),
(
ty::PredicateKind::ConstEvaluatable(a),
ty::PredicateKind::ConstEvaluatable(b),
) => relator.relate(predicate.rebind(a), predicate.rebind(b)).is_ok(),
(
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_a, lt_a)),
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_b, lt_b)),
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty_a,
+ lt_a,
+ ))),
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty_b,
+ lt_b,
+ ))),
) => {
relator.relate(predicate.rebind(ty_a), p.rebind(ty_b)).is_ok()
&& relator.relate(predicate.rebind(lt_a), p.rebind(lt_b)).is_ok()
@@ -225,9 +233,10 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
result
}
-// This is an implementation of the TypeRelation trait with the
-// aim of simply comparing for equality (without side-effects).
-// It is not intended to be used anywhere else other than here.
+/// This is an implementation of the [`TypeRelation`] trait with the
+/// aim of simply comparing for equality (without side-effects).
+///
+/// It is not intended to be used anywhere else other than here.
pub(crate) struct SimpleEqRelation<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
@@ -244,6 +253,10 @@ impl<'tcx> TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
self.tcx
}
+ fn intercrate(&self) -> bool {
+ false
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
@@ -256,6 +269,10 @@ impl<'tcx> TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
true
}
+ fn mark_ambiguous(&mut self) {
+ bug!()
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs
index 609095c9c..69e54b41d 100644
--- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs
+++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs
@@ -134,15 +134,18 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
let name_str = intrinsic_name.as_str();
let bound_vars = tcx.mk_bound_variable_kinds(
- [ty::BoundVariableKind::Region(ty::BrAnon(0)), ty::BoundVariableKind::Region(ty::BrEnv)]
- .iter()
- .copied(),
+ [
+ ty::BoundVariableKind::Region(ty::BrAnon(0, None)),
+ ty::BoundVariableKind::Region(ty::BrEnv),
+ ]
+ .iter()
+ .copied(),
);
let mk_va_list_ty = |mutbl| {
tcx.lang_items().va_list().map(|did| {
let region = tcx.mk_region(ty::ReLateBound(
ty::INNERMOST,
- ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon(0) },
+ ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon(0, None) },
));
let env_region = tcx.mk_region(ty::ReLateBound(
ty::INNERMOST,
@@ -364,7 +367,8 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
);
let discriminant_def_id = assoc_items[0];
- let br = ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon(0) };
+ let br =
+ ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon(0, None) };
(
1,
vec![
@@ -418,7 +422,8 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
sym::nontemporal_store => (1, vec![tcx.mk_mut_ptr(param(0)), param(0)], tcx.mk_unit()),
sym::raw_eq => {
- let br = ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon(0) };
+ let br =
+ ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind: ty::BrAnon(0, None) };
let param_ty =
tcx.mk_imm_ref(tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br)), param(0));
(1, vec![param_ty; 2], tcx.types.bool)
diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs
index 2e7b10257..29255472a 100644
--- a/compiler/rustc_hir_analysis/src/check/mod.rs
+++ b/compiler/rustc_hir_analysis/src/check/mod.rs
@@ -159,7 +159,7 @@ fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: LocalDefId) {
// the consumer's responsibility to ensure all bytes that have been read
// have defined values.
if let Ok(alloc) = tcx.eval_static_initializer(id.to_def_id())
- && alloc.inner().provenance().len() != 0
+ && alloc.inner().provenance().ptrs().len() != 0
{
let msg = "statics with a custom `#[link_section]` must be a \
simple list of bytes on the wasm target with no \
@@ -309,7 +309,7 @@ fn bounds_from_generic_predicates<'tcx>(
debug!("predicate {:?}", predicate);
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(trait_predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_predicate)) => {
let entry = types.entry(trait_predicate.self_ty()).or_default();
let def_id = trait_predicate.def_id();
if Some(def_id) != tcx.lang_items().sized_trait() {
@@ -318,7 +318,7 @@ fn bounds_from_generic_predicates<'tcx>(
entry.push(trait_predicate.def_id());
}
}
- ty::PredicateKind::Projection(projection_pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(projection_pred)) => {
projections.push(bound_predicate.rebind(projection_pred));
}
_ => {}
diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs
index ff32329e4..b315ebad4 100644
--- a/compiler/rustc_hir_analysis/src/check/region.rs
+++ b/compiler/rustc_hir_analysis/src/check/region.rs
@@ -241,17 +241,46 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
// scopes, meaning that temporaries cannot outlive them.
// This ensures fixed size stacks.
hir::ExprKind::Binary(
- source_map::Spanned { node: hir::BinOpKind::And, .. },
- _,
- ref r,
- )
- | hir::ExprKind::Binary(
- source_map::Spanned { node: hir::BinOpKind::Or, .. },
- _,
+ source_map::Spanned { node: hir::BinOpKind::And | hir::BinOpKind::Or, .. },
+ ref l,
ref r,
) => {
- // For shortcircuiting operators, mark the RHS as a terminating
- // scope since it only executes conditionally.
+ // expr is a short circuiting operator (|| or &&). As its
+ // functionality can't be overridden by traits, it always
+ // processes bool sub-expressions. bools are Copy and thus we
+ // can drop any temporaries in evaluation (read) order
+ // (with the exception of potentially failing let expressions).
+ // We achieve this by enclosing the operands in a terminating
+ // scope, both the LHS and the RHS.
+
+ // We optimize this a little in the presence of chains.
+ // Chains like a && b && c get lowered to AND(AND(a, b), c).
+ // In here, b and c are RHS, while a is the only LHS operand in
+ // that chain. This holds true for longer chains as well: the
+ // leading operand is always the only LHS operand that is not a
+ // binop itself. Putting a binop like AND(a, b) into a
+ // terminating scope is not useful, thus we only put the LHS
+ // into a terminating scope if it is not a binop.
+
+ let terminate_lhs = match l.kind {
+ // let expressions can create temporaries that live on
+ hir::ExprKind::Let(_) => false,
+ // binops already drop their temporaries, so there is no
+ // need to put them into a terminating scope.
+ // This is purely an optimization to reduce the number of
+ // terminating scopes.
+ hir::ExprKind::Binary(
+ source_map::Spanned {
+ node: hir::BinOpKind::And | hir::BinOpKind::Or, ..
+ },
+ ..,
+ ) => false,
+ // otherwise: mark it as terminating
+ _ => true,
+ };
+ if terminate_lhs {
+ terminating(l.hir_id.local_id);
+ }
// `Let` expressions (in a let-chain) shouldn't be terminating, as their temporaries
// should live beyond the immediate expression
diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs
index a23575004..b065ace6b 100644
--- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs
+++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs
@@ -1,7 +1,7 @@
use crate::constrained_generic_params::{identify_constrained_generic_params, Parameter};
use hir::def::DefKind;
use rustc_ast as ast;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
@@ -14,13 +14,14 @@ use rustc_middle::mir::ConstraintCategory;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::trait_def::TraitSpecializationKind;
use rustc_middle::ty::{
- self, AdtKind, DefIdTree, GenericParamDefKind, ToPredicate, Ty, TyCtxt, TypeFoldable,
- TypeSuperVisitable, TypeVisitable, TypeVisitor,
+ self, AdtKind, DefIdTree, GenericParamDefKind, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable,
+ TypeVisitable, TypeVisitor,
};
use rustc_middle::ty::{GenericArgKind, InternalSubsts};
use rustc_session::parse::feature_err;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
+use rustc_target::spec::abi::Abi;
use rustc_trait_selection::autoderef::Autoderef;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt;
use rustc_trait_selection::traits::outlives_bounds::InferCtxtExt as _;
@@ -52,12 +53,14 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
self.ocx.infcx.tcx
}
+ // Convenience function to normalize during wfcheck. This performs
+ // `ObligationCtxt::normalize`, but provides a nice `ObligationCauseCode`.
fn normalize<T>(&self, span: Span, loc: Option<WellFormedLoc>, value: T) -> T
where
T: TypeFoldable<'tcx>,
{
self.ocx.normalize(
- ObligationCause::new(span, self.body_id, ObligationCauseCode::WellFormed(loc)),
+ &ObligationCause::new(span, self.body_id, ObligationCauseCode::WellFormed(loc)),
self.param_env,
value,
)
@@ -74,9 +77,10 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
// for a type to be WF, we do not need to check if const trait predicates satisfy.
let param_env = self.param_env.without_const();
self.ocx.register_obligation(traits::Obligation::new(
+ self.tcx(),
cause,
param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)).to_predicate(self.tcx()),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)),
));
}
}
@@ -104,7 +108,7 @@ pub(super) fn enter_wf_checking_ctxt<'tcx, F>(
f(&mut wfcx);
let errors = wfcx.select_all_or_error();
if !errors.is_empty() {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
return;
}
@@ -116,7 +120,7 @@ pub(super) fn enter_wf_checking_ctxt<'tcx, F>(
}
fn check_well_formed(tcx: TyCtxt<'_>, def_id: hir::OwnerId) {
- let node = tcx.hir().expect_owner(def_id);
+ let node = tcx.hir().owner(def_id);
match node {
hir::OwnerNode::Crate(_) => {}
hir::OwnerNode::Item(item) => check_item(tcx, item),
@@ -218,19 +222,16 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) {
hir::ItemKind::Const(ty, ..) => {
check_item_type(tcx, def_id, ty.span, false);
}
- hir::ItemKind::Struct(ref struct_def, ref ast_generics) => {
- check_type_defn(tcx, item, false, |wfcx| vec![wfcx.non_enum_variant(struct_def)]);
-
+ hir::ItemKind::Struct(_, ref ast_generics) => {
+ check_type_defn(tcx, item, false);
check_variances_for_type_defn(tcx, item, ast_generics);
}
- hir::ItemKind::Union(ref struct_def, ref ast_generics) => {
- check_type_defn(tcx, item, true, |wfcx| vec![wfcx.non_enum_variant(struct_def)]);
-
+ hir::ItemKind::Union(_, ref ast_generics) => {
+ check_type_defn(tcx, item, true);
check_variances_for_type_defn(tcx, item, ast_generics);
}
- hir::ItemKind::Enum(ref enum_def, ref ast_generics) => {
- check_type_defn(tcx, item, true, |wfcx| wfcx.enum_variants(enum_def));
-
+ hir::ItemKind::Enum(_, ref ast_generics) => {
+ check_type_defn(tcx, item, true);
check_variances_for_type_defn(tcx, item, ast_generics);
}
hir::ItemKind::Trait(..) => {
@@ -414,7 +415,7 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRe
.iter()
.copied()
.collect::<Vec<_>>(),
- &FxHashSet::default(),
+ &FxIndexSet::default(),
gat_def_id.def_id,
gat_generics,
)
@@ -463,12 +464,16 @@ fn check_gat_where_clauses(tcx: TyCtxt<'_>, associated_items: &[hir::TraitItemRe
let mut unsatisfied_bounds: Vec<_> = required_bounds
.into_iter()
.filter(|clause| match clause.kind().skip_binder() {
- ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => {
- !region_known_to_outlive(tcx, gat_hir, param_env, &FxHashSet::default(), a, b)
- }
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(a, b)) => {
- !ty_known_to_outlive(tcx, gat_hir, param_env, &FxHashSet::default(), a, b)
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate(
+ a,
+ b,
+ ))) => {
+ !region_known_to_outlive(tcx, gat_hir, param_env, &FxIndexSet::default(), a, b)
}
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ a,
+ b,
+ ))) => !ty_known_to_outlive(tcx, gat_hir, param_env, &FxIndexSet::default(), a, b),
_ => bug!("Unexpected PredicateKind"),
})
.map(|clause| clause.to_string())
@@ -549,7 +554,7 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable<'tcx>>(
param_env: ty::ParamEnv<'tcx>,
item_hir: hir::HirId,
to_check: T,
- wf_tys: &FxHashSet<Ty<'tcx>>,
+ wf_tys: &FxIndexSet<Ty<'tcx>>,
gat_def_id: LocalDefId,
gat_generics: &'tcx ty::Generics,
) -> Option<FxHashSet<ty::Predicate<'tcx>>> {
@@ -600,8 +605,9 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable<'tcx>>(
}));
// The predicate we expect to see. (In our example,
// `Self: 'me`.)
- let clause =
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_param, region_param));
+ let clause = ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
+ ty::OutlivesPredicate(ty_param, region_param),
+ ));
let clause = tcx.mk_predicate(ty::Binder::dummy(clause));
bounds.insert(clause);
}
@@ -637,9 +643,8 @@ fn gather_gat_bounds<'tcx, T: TypeFoldable<'tcx>>(
name: region_b_param.name,
}));
// The predicate we expect to see.
- let clause = ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(
- region_a_param,
- region_b_param,
+ let clause = ty::PredicateKind::Clause(ty::Clause::RegionOutlives(
+ ty::OutlivesPredicate(region_a_param, region_b_param),
));
let clause = tcx.mk_predicate(ty::Binder::dummy(clause));
bounds.insert(clause);
@@ -656,7 +661,7 @@ fn ty_known_to_outlive<'tcx>(
tcx: TyCtxt<'tcx>,
id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
- wf_tys: &FxHashSet<Ty<'tcx>>,
+ wf_tys: &FxIndexSet<Ty<'tcx>>,
ty: Ty<'tcx>,
region: ty::Region<'tcx>,
) -> bool {
@@ -673,7 +678,7 @@ fn region_known_to_outlive<'tcx>(
tcx: TyCtxt<'tcx>,
id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
- wf_tys: &FxHashSet<Ty<'tcx>>,
+ wf_tys: &FxIndexSet<Ty<'tcx>>,
region_a: ty::Region<'tcx>,
region_b: ty::Region<'tcx>,
) -> bool {
@@ -697,7 +702,7 @@ fn resolve_regions_with_wf_tys<'tcx>(
tcx: TyCtxt<'tcx>,
id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
- wf_tys: &FxHashSet<Ty<'tcx>>,
+ wf_tys: &FxIndexSet<Ty<'tcx>>,
add_constraints: impl for<'a> FnOnce(&'a InferCtxt<'tcx>, &'a RegionBoundPairs<'tcx>),
) -> bool {
// Unfortunately, we have to use a new `InferCtxt` each call, because
@@ -855,7 +860,7 @@ fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) {
// Const parameters are well formed if their type is structural match.
hir::GenericParamKind::Const { ty: hir_ty, default: _ } => {
- let ty = tcx.type_of(tcx.hir().local_def_id(param.hir_id));
+ let ty = tcx.type_of(param.def_id);
if tcx.features().adt_const_params {
if let Some(non_structural_match_ty) =
@@ -1037,27 +1042,25 @@ fn item_adt_kind(kind: &ItemKind<'_>) -> Option<AdtKind> {
}
/// In a type definition, we check that to ensure that the types of the fields are well-formed.
-fn check_type_defn<'tcx, F>(
- tcx: TyCtxt<'tcx>,
- item: &hir::Item<'tcx>,
- all_sized: bool,
- mut lookup_fields: F,
-) where
- F: FnMut(&WfCheckingCtxt<'_, 'tcx>) -> Vec<AdtVariant<'tcx>>,
-{
+fn check_type_defn<'tcx>(tcx: TyCtxt<'tcx>, item: &hir::Item<'tcx>, all_sized: bool) {
let _ = tcx.representability(item.owner_id.def_id);
+ let adt_def = tcx.adt_def(item.owner_id);
enter_wf_checking_ctxt(tcx, item.span, item.owner_id.def_id, |wfcx| {
- let variants = lookup_fields(wfcx);
- let packed = tcx.adt_def(item.owner_id).repr().packed();
+ let variants = adt_def.variants();
+ let packed = adt_def.repr().packed();
- for variant in &variants {
+ for variant in variants.iter() {
// All field types must be well-formed.
for field in &variant.fields {
+ let field_id = field.did.expect_local();
+ let hir::Node::Field(hir::FieldDef { ty: hir_ty, .. }) = tcx.hir().get_by_def_id(field_id)
+ else { bug!() };
+ let ty = wfcx.normalize(hir_ty.span, None, tcx.type_of(field.did));
wfcx.register_wf_obligation(
- field.span,
- Some(WellFormedLoc::Ty(field.def_id)),
- field.ty.into(),
+ hir_ty.span,
+ Some(WellFormedLoc::Ty(field_id)),
+ ty.into(),
)
}
@@ -1065,7 +1068,7 @@ fn check_type_defn<'tcx, F>(
// intermediate types must be sized.
let needs_drop_copy = || {
packed && {
- let ty = variant.fields.last().unwrap().ty;
+ let ty = tcx.type_of(variant.fields.last().unwrap().did);
let ty = tcx.erase_regions(ty);
if ty.needs_infer() {
tcx.sess
@@ -1084,39 +1087,43 @@ fn check_type_defn<'tcx, F>(
variant.fields[..variant.fields.len() - unsized_len].iter().enumerate()
{
let last = idx == variant.fields.len() - 1;
+ let field_id = field.did.expect_local();
+ let hir::Node::Field(hir::FieldDef { ty: hir_ty, .. }) = tcx.hir().get_by_def_id(field_id)
+ else { bug!() };
+ let ty = wfcx.normalize(hir_ty.span, None, tcx.type_of(field.did));
wfcx.register_bound(
traits::ObligationCause::new(
- field.span,
+ hir_ty.span,
wfcx.body_id,
traits::FieldSized {
adt_kind: match item_adt_kind(&item.kind) {
Some(i) => i,
None => bug!(),
},
- span: field.span,
+ span: hir_ty.span,
last,
},
),
wfcx.param_env,
- field.ty,
+ ty,
tcx.require_lang_item(LangItem::Sized, None),
);
}
// Explicit `enum` discriminant values must const-evaluate successfully.
- if let Some(discr_def_id) = variant.explicit_discr {
+ if let ty::VariantDiscr::Explicit(discr_def_id) = variant.discr {
let cause = traits::ObligationCause::new(
tcx.def_span(discr_def_id),
wfcx.body_id,
traits::MiscObligation,
);
wfcx.register_obligation(traits::Obligation::new(
+ tcx,
cause,
wfcx.param_env,
ty::Binder::dummy(ty::PredicateKind::ConstEvaluatable(
- ty::Const::from_anon_const(tcx, discr_def_id),
- ))
- .to_predicate(tcx),
+ ty::Const::from_anon_const(tcx, discr_def_id.expect_local()),
+ )),
));
}
}
@@ -1453,7 +1460,7 @@ fn check_where_clauses<'tcx>(wfcx: &WfCheckingCtxt<'_, 'tcx>, span: Span, def_id
wfcx.body_id,
traits::ItemObligation(def_id.to_def_id()),
);
- traits::Obligation::new(cause, wfcx.param_env, pred)
+ traits::Obligation::new(tcx, cause, wfcx.param_env, pred)
});
let predicates = predicates.0.instantiate_identity(tcx);
@@ -1537,23 +1544,50 @@ fn check_fn_or_method<'tcx>(
check_where_clauses(wfcx, span, def_id);
check_return_position_impl_trait_in_trait_bounds(
- tcx,
wfcx,
def_id,
sig.output(),
hir_decl.output.span(),
);
+
+ if sig.abi == Abi::RustCall {
+ let span = tcx.def_span(def_id);
+ let has_implicit_self = hir_decl.implicit_self != hir::ImplicitSelfKind::None;
+ let mut inputs = sig.inputs().iter().skip(if has_implicit_self { 1 } else { 0 });
+ // Check that the argument is a tuple
+ if let Some(ty) = inputs.next() {
+ wfcx.register_bound(
+ ObligationCause::new(span, wfcx.body_id, ObligationCauseCode::RustCall),
+ wfcx.param_env,
+ *ty,
+ tcx.require_lang_item(hir::LangItem::Tuple, Some(span)),
+ );
+ } else {
+ tcx.sess.span_err(
+ hir_decl.inputs.last().map_or(span, |input| input.span),
+ "functions with the \"rust-call\" ABI must take a single non-self tuple argument",
+ );
+ }
+ // No more inputs other than the `self` type and the tuple type
+ if inputs.next().is_some() {
+ tcx.sess.span_err(
+ hir_decl.inputs.last().map_or(span, |input| input.span),
+ "functions with the \"rust-call\" ABI must take a single non-self tuple argument",
+ );
+ }
+ }
}
/// Basically `check_associated_type_bounds`, but separated for now and should be
/// deduplicated when RPITITs get lowered into real associated items.
+#[tracing::instrument(level = "trace", skip(wfcx))]
fn check_return_position_impl_trait_in_trait_bounds<'tcx>(
- tcx: TyCtxt<'tcx>,
wfcx: &WfCheckingCtxt<'_, 'tcx>,
fn_def_id: LocalDefId,
fn_output: Ty<'tcx>,
span: Span,
) {
+ let tcx = wfcx.tcx();
if let Some(assoc_item) = tcx.opt_associated_item(fn_def_id.to_def_id())
&& assoc_item.container == ty::AssocItemContainer::TraitContainer
{
@@ -1563,8 +1597,10 @@ fn check_return_position_impl_trait_in_trait_bounds<'tcx>(
&& tcx.def_kind(proj.item_def_id) == DefKind::ImplTraitPlaceholder
&& tcx.impl_trait_in_trait_parent(proj.item_def_id) == fn_def_id.to_def_id()
{
+ let span = tcx.def_span(proj.item_def_id);
let bounds = wfcx.tcx().explicit_item_bounds(proj.item_def_id);
let wf_obligations = bounds.iter().flat_map(|&(bound, bound_span)| {
+ let bound = ty::EarlyBinder(bound).subst(tcx, proj.substs);
let normalized_bound = wfcx.normalize(span, None, bound);
traits::wf::predicate_obligations(
wfcx.infcx,
@@ -1675,14 +1711,13 @@ fn receiver_is_valid<'tcx>(
// `self: Self` is always valid.
if can_eq_self(receiver_ty) {
- if let Err(err) = wfcx.equate_types(&cause, wfcx.param_env, self_ty, receiver_ty) {
+ if let Err(err) = wfcx.eq(&cause, wfcx.param_env, self_ty, receiver_ty) {
infcx.err_ctxt().report_mismatched_types(&cause, self_ty, receiver_ty, err).emit();
}
return true;
}
- let mut autoderef =
- Autoderef::new(infcx, wfcx.param_env, wfcx.body_id, span, receiver_ty, span);
+ let mut autoderef = Autoderef::new(infcx, wfcx.param_env, wfcx.body_id, span, receiver_ty);
// The `arbitrary_self_types` feature allows raw pointer receivers like `self: *const Self`.
if arbitrary_self_types_enabled {
@@ -1692,7 +1727,7 @@ fn receiver_is_valid<'tcx>(
// The first type is `receiver_ty`, which we know its not equal to `self_ty`; skip it.
autoderef.next();
- let receiver_trait_def_id = tcx.require_lang_item(LangItem::Receiver, None);
+ let receiver_trait_def_id = tcx.require_lang_item(LangItem::Receiver, Some(span));
// Keep dereferencing `receiver_ty` until we get to `self_ty`.
loop {
@@ -1705,9 +1740,7 @@ fn receiver_is_valid<'tcx>(
if can_eq_self(potential_self_ty) {
wfcx.register_obligations(autoderef.into_obligations());
- if let Err(err) =
- wfcx.equate_types(&cause, wfcx.param_env, self_ty, potential_self_ty)
- {
+ if let Err(err) = wfcx.eq(&cause, wfcx.param_env, self_ty, potential_self_ty) {
infcx
.err_ctxt()
.report_mismatched_types(&cause, self_ty, potential_self_ty, err)
@@ -1754,13 +1787,9 @@ fn receiver_is_implemented<'tcx>(
receiver_ty: Ty<'tcx>,
) -> bool {
let tcx = wfcx.tcx();
- let trait_ref = ty::Binder::dummy(ty::TraitRef {
- def_id: receiver_trait_def_id,
- substs: tcx.mk_substs_trait(receiver_ty, &[]),
- });
+ let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(receiver_trait_def_id, [receiver_ty]));
- let obligation =
- traits::Obligation::new(cause, wfcx.param_env, trait_ref.without_const().to_predicate(tcx));
+ let obligation = traits::Obligation::new(tcx, cause, wfcx.param_env, trait_ref);
if wfcx.infcx.predicate_must_hold_modulo_regions(&obligation) {
true
@@ -1907,6 +1936,7 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
}
let obligation = traits::Obligation::new(
+ tcx,
traits::ObligationCause::new(span, self.body_id, traits::TrivialBound),
empty_env,
pred,
@@ -1925,56 +1955,6 @@ fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalDefId) {
items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id));
}
-///////////////////////////////////////////////////////////////////////////
-// ADT
-
-// FIXME(eddyb) replace this with getting fields/discriminants through `ty::AdtDef`.
-struct AdtVariant<'tcx> {
- /// Types of fields in the variant, that must be well-formed.
- fields: Vec<AdtField<'tcx>>,
-
- /// Explicit discriminant of this variant (e.g. `A = 123`),
- /// that must evaluate to a constant value.
- explicit_discr: Option<LocalDefId>,
-}
-
-struct AdtField<'tcx> {
- ty: Ty<'tcx>,
- def_id: LocalDefId,
- span: Span,
-}
-
-impl<'a, 'tcx> WfCheckingCtxt<'a, 'tcx> {
- // FIXME(eddyb) replace this with getting fields through `ty::AdtDef`.
- fn non_enum_variant(&self, struct_def: &hir::VariantData<'_>) -> AdtVariant<'tcx> {
- let fields = struct_def
- .fields()
- .iter()
- .map(|field| {
- let def_id = self.tcx().hir().local_def_id(field.hir_id);
- let field_ty = self.tcx().type_of(def_id);
- let field_ty = self.normalize(field.ty.span, None, field_ty);
- debug!("non_enum_variant: type of field {:?} is {:?}", field, field_ty);
- AdtField { ty: field_ty, span: field.ty.span, def_id }
- })
- .collect();
- AdtVariant { fields, explicit_discr: None }
- }
-
- fn enum_variants(&self, enum_def: &hir::EnumDef<'_>) -> Vec<AdtVariant<'tcx>> {
- enum_def
- .variants
- .iter()
- .map(|variant| AdtVariant {
- fields: self.non_enum_variant(&variant.data).fields,
- explicit_discr: variant
- .disr_expr
- .map(|explicit_discr| self.tcx().hir().local_def_id(explicit_discr.hir_id)),
- })
- .collect()
- }
-}
-
fn error_392(
tcx: TyCtxt<'_>,
span: Span,
diff --git a/compiler/rustc_hir_analysis/src/check_unused.rs b/compiler/rustc_hir_analysis/src/check_unused.rs
index d0c317334..5749b0478 100644
--- a/compiler/rustc_hir_analysis/src/check_unused.rs
+++ b/compiler/rustc_hir_analysis/src/check_unused.rs
@@ -57,25 +57,6 @@ fn unused_crates_lint(tcx: TyCtxt<'_>) {
.maybe_unused_extern_crates(())
.iter()
.filter(|&&(def_id, _)| {
- // The `def_id` here actually was calculated during resolution (at least
- // at the time of this writing) and is being shipped to us via a side
- // channel of the tcx. There may have been extra expansion phases,
- // however, which ended up removing the `def_id` *after* expansion.
- //
- // As a result we need to verify that `def_id` is indeed still valid for
- // our AST and actually present in the HIR map. If it's not there then
- // there's safely nothing to warn about, and otherwise we carry on with
- // our execution.
- //
- // Note that if we carry through to the `extern_mod_stmt_cnum` query
- // below it'll cause a panic because `def_id` is actually bogus at this
- // point in time otherwise.
- if tcx.hir().find(tcx.hir().local_def_id_to_hir_id(def_id)).is_none() {
- return false;
- }
- true
- })
- .filter(|&&(def_id, _)| {
tcx.extern_mod_stmt_cnum(def_id).map_or(true, |cnum| {
!tcx.is_compiler_builtins(cnum)
&& !tcx.is_panic_runtime(cnum)
diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs
index b6c91d425..193ecdb16 100644
--- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs
+++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs
@@ -114,7 +114,7 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
traits::ObligationCause::dummy_with_span(field_ty_span),
param_env,
ty,
- tcx.lang_items().copy_trait().unwrap(),
+ tcx.require_lang_item(LangItem::Copy, Some(span)),
) {
let error_predicate = error.obligation.predicate;
// Only note if it's not the root obligation, otherwise it's trivial and
@@ -128,11 +128,11 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
.or_default()
.push(error.obligation.cause.span);
}
- if let ty::PredicateKind::Trait(ty::TraitPredicate {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
trait_ref,
polarity: ty::ImplPolarity::Positive,
..
- }) = error_predicate.kind().skip_binder()
+ })) = error_predicate.kind().skip_binder()
{
let ty = trait_ref.self_ty();
if let ty::Param(_) = ty.kind() {
@@ -315,13 +315,12 @@ fn visit_implementation_of_dispatch_from_dyn<'tcx>(tcx: TyCtxt<'tcx>, impl_did:
cause.clone(),
dispatch_from_dyn_trait,
0,
- field.ty(tcx, substs_a),
- &[field.ty(tcx, substs_b).into()],
+ [field.ty(tcx, substs_a), field.ty(tcx, substs_b)],
)
}),
);
if !errors.is_empty() {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
}
// Finally, resolve all regions.
@@ -371,7 +370,7 @@ pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>,
mt_b: ty::TypeAndMut<'tcx>,
mk_ptr: &dyn Fn(Ty<'tcx>) -> Ty<'tcx>| {
- if (mt_a.mutbl, mt_b.mutbl) == (hir::Mutability::Not, hir::Mutability::Mut) {
+ if mt_a.mutbl < mt_b.mutbl {
infcx
.err_ctxt()
.report_mismatched_types(
@@ -558,10 +557,10 @@ pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: DefId) -> CoerceUn
// Register an obligation for `A: Trait<B>`.
let cause = traits::ObligationCause::misc(span, impl_hir_id);
let predicate =
- predicate_for_trait_def(tcx, param_env, cause, trait_def_id, 0, source, &[target.into()]);
+ predicate_for_trait_def(tcx, param_env, cause, trait_def_id, 0, [source, target]);
let errors = traits::fully_solve_obligation(&infcx, predicate);
if !errors.is_empty() {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
}
// Finally, resolve all regions.
diff --git a/compiler/rustc_hir_analysis/src/coherence/mod.rs b/compiler/rustc_hir_analysis/src/coherence/mod.rs
index ae9ebe590..1bf3768fe 100644
--- a/compiler/rustc_hir_analysis/src/coherence/mod.rs
+++ b/compiler/rustc_hir_analysis/src/coherence/mod.rs
@@ -5,10 +5,11 @@
// done by the orphan and overlap modules. Then we build up various
// mappings. That mapping code resides here.
-use rustc_errors::struct_span_err;
+use rustc_errors::{error_code, struct_span_err};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
+use rustc_span::sym;
use rustc_trait_selection::traits;
mod builtin;
@@ -39,61 +40,26 @@ fn enforce_trait_manually_implementable(
impl_def_id: LocalDefId,
trait_def_id: DefId,
) {
- let did = Some(trait_def_id);
- let li = tcx.lang_items();
let impl_header_span = tcx.def_span(impl_def_id);
- // Disallow *all* explicit impls of `Pointee`, `DiscriminantKind`, `Sized` and `Unsize` for now.
- if did == li.pointee_trait() {
- struct_span_err!(
+ // Disallow *all* explicit impls of traits marked `#[rustc_deny_explicit_impl]`
+ if tcx.has_attr(trait_def_id, sym::rustc_deny_explicit_impl) {
+ let trait_name = tcx.item_name(trait_def_id);
+ let mut err = struct_span_err!(
tcx.sess,
impl_header_span,
E0322,
- "explicit impls for the `Pointee` trait are not permitted"
- )
- .span_label(impl_header_span, "impl of `Pointee` not allowed")
- .emit();
- return;
- }
-
- if did == li.discriminant_kind_trait() {
- struct_span_err!(
- tcx.sess,
- impl_header_span,
- E0322,
- "explicit impls for the `DiscriminantKind` trait are not permitted"
- )
- .span_label(impl_header_span, "impl of `DiscriminantKind` not allowed")
- .emit();
- return;
- }
-
- if did == li.sized_trait() {
- struct_span_err!(
- tcx.sess,
- impl_header_span,
- E0322,
- "explicit impls for the `Sized` trait are not permitted"
- )
- .span_label(impl_header_span, "impl of `Sized` not allowed")
- .emit();
- return;
- }
-
- if did == li.unsize_trait() {
- struct_span_err!(
- tcx.sess,
- impl_header_span,
- E0328,
- "explicit impls for the `Unsize` trait are not permitted"
- )
- .span_label(impl_header_span, "impl of `Unsize` not allowed")
- .emit();
- return;
- }
+ "explicit impls for the `{trait_name}` trait are not permitted"
+ );
+ err.span_label(impl_header_span, format!("impl of `{trait_name}` not allowed"));
+
+ // Maintain explicit error code for `Unsize`, since it has a useful
+ // explanation about using `CoerceUnsized` instead.
+ if Some(trait_def_id) == tcx.lang_items().unsize_trait() {
+ err.code(error_code!(E0328));
+ }
- if tcx.features().unboxed_closures {
- // the feature gate allows all Fn traits
+ err.emit();
return;
}
diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs
index bb45c3823..cc5114dba 100644
--- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs
+++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs
@@ -5,7 +5,6 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{struct_span_err, DelayDm};
use rustc_errors::{Diagnostic, ErrorGuaranteed};
use rustc_hir as hir;
-use rustc_middle::ty::subst::GenericArgKind;
use rustc_middle::ty::subst::InternalSubsts;
use rustc_middle::ty::util::IgnoreRegions;
use rustc_middle::ty::{
@@ -23,9 +22,7 @@ pub(crate) fn orphan_check_impl(
impl_def_id: LocalDefId,
) -> Result<(), ErrorGuaranteed> {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
- if let Some(err) = trait_ref.error_reported() {
- return Err(err);
- }
+ trait_ref.error_reported()?;
let ret = do_orphan_check_impl(tcx, trait_ref, impl_def_id);
if tcx.trait_is_auto(trait_ref.def_id) {
@@ -49,58 +46,6 @@ fn do_orphan_check_impl<'tcx>(
let sp = tcx.def_span(def_id);
let tr = impl_.of_trait.as_ref().unwrap();
- // Ensure no opaque types are present in this impl header. See issues #76202 and #86411 for examples,
- // and #84660 where it would otherwise allow unsoundness.
- if trait_ref.has_opaque_types() {
- trace!("{:#?}", item);
- // First we find the opaque type in question.
- for ty in trait_ref.substs {
- for ty in ty.walk() {
- let ty::subst::GenericArgKind::Type(ty) = ty.unpack() else { continue };
- let ty::Opaque(def_id, _) = *ty.kind() else { continue };
- trace!(?def_id);
-
- // Then we search for mentions of the opaque type's type alias in the HIR
- struct SpanFinder<'tcx> {
- sp: Span,
- def_id: DefId,
- tcx: TyCtxt<'tcx>,
- }
- impl<'v, 'tcx> hir::intravisit::Visitor<'v> for SpanFinder<'tcx> {
- #[instrument(level = "trace", skip(self, _id))]
- fn visit_path(&mut self, path: &'v hir::Path<'v>, _id: hir::HirId) {
- // You can't mention an opaque type directly, so we look for type aliases
- if let hir::def::Res::Def(hir::def::DefKind::TyAlias, def_id) = path.res {
- // And check if that type alias's type contains the opaque type we're looking for
- for arg in self.tcx.type_of(def_id).walk() {
- if let GenericArgKind::Type(ty) = arg.unpack() {
- if let ty::Opaque(def_id, _) = *ty.kind() {
- if def_id == self.def_id {
- // Finally we update the span to the mention of the type alias
- self.sp = path.span;
- return;
- }
- }
- }
- }
- }
- hir::intravisit::walk_path(self, path)
- }
- }
-
- let mut visitor = SpanFinder { sp, def_id, tcx };
- hir::intravisit::walk_item(&mut visitor, item);
- let reported = tcx
- .sess
- .struct_span_err(visitor.sp, "cannot implement trait on type alias impl trait")
- .span_note(tcx.def_span(def_id), "type alias impl trait defined here")
- .emit();
- return Err(reported);
- }
- }
- span_bug!(sp, "opaque type not found, but `has_opaque_types` is set")
- }
-
match traits::orphan_check(tcx, item.owner_id.to_def_id()) {
Ok(()) => {}
Err(err) => emit_orphan_check_error(
@@ -347,7 +292,7 @@ fn emit_newtype_suggestion_for_raw_ptr(
diag: &mut Diagnostic,
) {
if !self_ty.needs_subst() {
- let mut_key = if ptr_ty.mutbl == rustc_middle::mir::Mutability::Mut { "mut " } else { "" };
+ let mut_key = ptr_ty.mutbl.prefix_str();
let msg_sugg = "consider introducing a new wrapper type".to_owned();
let sugg = vec![
(
diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs
index 346d2e2fc..1183a26d5 100644
--- a/compiler/rustc_hir_analysis/src/collect.rs
+++ b/compiler/rustc_hir_analysis/src/collect.rs
@@ -24,18 +24,16 @@ use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, StashKey};
use rustc_hir as hir;
-use rustc_hir::def::CtorKind;
use rustc_hir::def_id::{DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::intravisit::{self, Visitor};
-use rustc_hir::weak_lang_items;
-use rustc_hir::{GenericParamKind, Node};
+use rustc_hir::weak_lang_items::WEAK_LANG_ITEMS;
+use rustc_hir::{lang_items, GenericParamKind, LangItem, Node};
use rustc_middle::hir::nested_filter;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::mono::Linkage;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::util::{Discr, IntTypeExt};
-use rustc_middle::ty::ReprOptions;
-use rustc_middle::ty::{self, AdtKind, Const, DefIdTree, IsSuggestable, Ty, TyCtxt};
+use rustc_middle::ty::{self, AdtKind, Const, DefIdTree, IsSuggestable, ToPredicate, Ty, TyCtxt};
use rustc_session::lint;
use rustc_session::parse::feature_err;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
@@ -84,6 +82,7 @@ pub fn provide(providers: &mut Providers) {
asm_target_features,
collect_mod_item_types,
should_inherit_track_caller,
+ is_type_alias_impl_trait,
..*providers
};
}
@@ -291,18 +290,15 @@ impl<'tcx> Visitor<'tcx> for CollectItemTypesVisitor<'tcx> {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {}
hir::GenericParamKind::Type { default: Some(_), .. } => {
- let def_id = self.tcx.hir().local_def_id(param.hir_id);
- self.tcx.ensure().type_of(def_id);
+ self.tcx.ensure().type_of(param.def_id);
}
hir::GenericParamKind::Type { .. } => {}
hir::GenericParamKind::Const { default, .. } => {
- let def_id = self.tcx.hir().local_def_id(param.hir_id);
- self.tcx.ensure().type_of(def_id);
+ self.tcx.ensure().type_of(param.def_id);
if let Some(default) = default {
- let default_def_id = self.tcx.hir().local_def_id(default.hir_id);
// need to store default and type of default
- self.tcx.ensure().type_of(default_def_id);
- self.tcx.ensure().const_param_default(def_id);
+ self.tcx.ensure().type_of(default.def_id);
+ self.tcx.ensure().const_param_default(param.def_id);
}
}
}
@@ -311,9 +307,9 @@ impl<'tcx> Visitor<'tcx> for CollectItemTypesVisitor<'tcx> {
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
- if let hir::ExprKind::Closure { .. } = expr.kind {
- let def_id = self.tcx.hir().local_def_id(expr.hir_id);
- self.tcx.ensure().generics_of(def_id);
+ if let hir::ExprKind::Closure(closure) = expr.kind {
+ self.tcx.ensure().generics_of(closure.def_id);
+ self.tcx.ensure().codegen_fn_attrs(closure.def_id);
// We do not call `type_of` for closures here as that
// depends on typecheck and would therefore hide
// any further errors in case one typeck fails.
@@ -379,8 +375,8 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
self.tcx
}
- fn item_def_id(&self) -> Option<DefId> {
- Some(self.item_def_id)
+ fn item_def_id(&self) -> DefId {
+ self.item_def_id
}
fn get_type_parameter_bounds(
@@ -512,8 +508,7 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
}
_ => {}
}
- err.emit();
- self.tcx().ty_error()
+ self.tcx().ty_error_with_guaranteed(err.emit())
}
}
@@ -522,7 +517,7 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
ty
}
- fn set_tainted_by_errors(&self) {
+ fn set_tainted_by_errors(&self, _: ErrorGuaranteed) {
// There's no obvious place to track this, so just let it go.
}
@@ -587,8 +582,12 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
tcx.ensure().type_of(item.owner_id);
tcx.ensure().predicates_of(item.owner_id);
match item.kind {
- hir::ForeignItemKind::Fn(..) => tcx.ensure().fn_sig(item.owner_id),
+ hir::ForeignItemKind::Fn(..) => {
+ tcx.ensure().codegen_fn_attrs(item.owner_id);
+ tcx.ensure().fn_sig(item.owner_id)
+ }
hir::ForeignItemKind::Static(..) => {
+ tcx.ensure().codegen_fn_attrs(item.owner_id);
let mut visitor = HirPlaceholderCollector::default();
visitor.visit_foreign_item(item);
placeholder_type_error(
@@ -604,11 +603,11 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
}
}
}
- hir::ItemKind::Enum(ref enum_definition, _) => {
+ hir::ItemKind::Enum(..) => {
tcx.ensure().generics_of(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().predicates_of(def_id);
- convert_enum_variant_types(tcx, def_id.to_def_id(), enum_definition.variants);
+ convert_enum_variant_types(tcx, def_id.to_def_id());
}
hir::ItemKind::Impl { .. } => {
tcx.ensure().generics_of(def_id);
@@ -633,23 +632,16 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
tcx.ensure().predicates_of(def_id);
for f in struct_def.fields() {
- let def_id = tcx.hir().local_def_id(f.hir_id);
- tcx.ensure().generics_of(def_id);
- tcx.ensure().type_of(def_id);
- tcx.ensure().predicates_of(def_id);
+ tcx.ensure().generics_of(f.def_id);
+ tcx.ensure().type_of(f.def_id);
+ tcx.ensure().predicates_of(f.def_id);
}
- if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
- convert_variant_ctor(tcx, ctor_hir_id);
+ if let Some(ctor_def_id) = struct_def.ctor_def_id() {
+ convert_variant_ctor(tcx, ctor_def_id);
}
}
- // Desugared from `impl Trait`, so visited by the function's return type.
- hir::ItemKind::OpaqueTy(hir::OpaqueTy {
- origin: hir::OpaqueTyOrigin::FnReturn(..) | hir::OpaqueTyOrigin::AsyncFn(..),
- ..
- }) => {}
-
// Don't call `type_of` on opaque types, since that depends on type
// checking function bodies. `check_item_type` ensures that it's called
// instead.
@@ -657,27 +649,33 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
tcx.ensure().generics_of(def_id);
tcx.ensure().predicates_of(def_id);
tcx.ensure().explicit_item_bounds(def_id);
+ tcx.ensure().item_bounds(def_id);
}
- hir::ItemKind::TyAlias(..)
- | hir::ItemKind::Static(..)
- | hir::ItemKind::Const(..)
- | hir::ItemKind::Fn(..) => {
+
+ hir::ItemKind::TyAlias(..) => {
tcx.ensure().generics_of(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().predicates_of(def_id);
- match it.kind {
- hir::ItemKind::Fn(..) => tcx.ensure().fn_sig(def_id),
- hir::ItemKind::OpaqueTy(..) => tcx.ensure().item_bounds(def_id),
- hir::ItemKind::Const(ty, ..) | hir::ItemKind::Static(ty, ..) => {
- if !is_suggestable_infer_ty(ty) {
- let mut visitor = HirPlaceholderCollector::default();
- visitor.visit_item(it);
- placeholder_type_error(tcx, None, visitor.0, false, None, it.kind.descr());
- }
- }
- _ => (),
+ }
+
+ hir::ItemKind::Static(ty, ..) | hir::ItemKind::Const(ty, ..) => {
+ tcx.ensure().generics_of(def_id);
+ tcx.ensure().type_of(def_id);
+ tcx.ensure().predicates_of(def_id);
+ if !is_suggestable_infer_ty(ty) {
+ let mut visitor = HirPlaceholderCollector::default();
+ visitor.visit_item(it);
+ placeholder_type_error(tcx, None, visitor.0, false, None, it.kind.descr());
}
}
+
+ hir::ItemKind::Fn(..) => {
+ tcx.ensure().generics_of(def_id);
+ tcx.ensure().type_of(def_id);
+ tcx.ensure().predicates_of(def_id);
+ tcx.ensure().fn_sig(def_id);
+ tcx.ensure().codegen_fn_attrs(def_id);
+ }
}
}
@@ -688,6 +686,7 @@ fn convert_trait_item(tcx: TyCtxt<'_>, trait_item_id: hir::TraitItemId) {
match trait_item.kind {
hir::TraitItemKind::Fn(..) => {
+ tcx.ensure().codegen_fn_attrs(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().fn_sig(def_id);
}
@@ -737,6 +736,7 @@ fn convert_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::ImplItemId) {
let impl_item = tcx.hir().impl_item(impl_item_id);
match impl_item.kind {
hir::ImplItemKind::Fn(..) => {
+ tcx.ensure().codegen_fn_attrs(def_id);
tcx.ensure().fn_sig(def_id);
}
hir::ImplItemKind::Type(_) => {
@@ -750,37 +750,34 @@ fn convert_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::ImplItemId) {
}
}
-fn convert_variant_ctor(tcx: TyCtxt<'_>, ctor_id: hir::HirId) {
- let def_id = tcx.hir().local_def_id(ctor_id);
+fn convert_variant_ctor(tcx: TyCtxt<'_>, def_id: LocalDefId) {
tcx.ensure().generics_of(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().predicates_of(def_id);
}
-fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId, variants: &[hir::Variant<'_>]) {
+fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId) {
let def = tcx.adt_def(def_id);
let repr_type = def.repr().discr_type();
let initial = repr_type.initial_discriminant(tcx);
let mut prev_discr = None::<Discr<'_>>;
// fill the discriminant values and field types
- for variant in variants {
+ for variant in def.variants() {
let wrapped_discr = prev_discr.map_or(initial, |d| d.wrap_incr(tcx));
prev_discr = Some(
- if let Some(ref e) = variant.disr_expr {
- let expr_did = tcx.hir().local_def_id(e.hir_id);
- def.eval_explicit_discr(tcx, expr_did.to_def_id())
+ if let ty::VariantDiscr::Explicit(const_def_id) = variant.discr {
+ def.eval_explicit_discr(tcx, const_def_id)
} else if let Some(discr) = repr_type.disr_incr(tcx, prev_discr) {
Some(discr)
} else {
- struct_span_err!(tcx.sess, variant.span, E0370, "enum discriminant overflowed")
- .span_label(
- variant.span,
- format!("overflowed on value after {}", prev_discr.unwrap()),
- )
+ let span = tcx.def_span(variant.def_id);
+ struct_span_err!(tcx.sess, span, E0370, "enum discriminant overflowed")
+ .span_label(span, format!("overflowed on value after {}", prev_discr.unwrap()))
.note(&format!(
"explicitly set `{} = {}` if that is desired outcome",
- variant.ident, wrapped_discr
+ tcx.item_name(variant.def_id),
+ wrapped_discr
))
.emit();
None
@@ -788,17 +785,16 @@ fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId, variants: &[hir::V
.unwrap_or(wrapped_discr),
);
- for f in variant.data.fields() {
- let def_id = tcx.hir().local_def_id(f.hir_id);
- tcx.ensure().generics_of(def_id);
- tcx.ensure().type_of(def_id);
- tcx.ensure().predicates_of(def_id);
+ for f in &variant.fields {
+ tcx.ensure().generics_of(f.did);
+ tcx.ensure().type_of(f.did);
+ tcx.ensure().predicates_of(f.did);
}
// Convert the ctor, if any. This also registers the variant as
// an item.
- if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
- convert_variant_ctor(tcx, ctor_hir_id);
+ if let Some(ctor_def_id) = variant.ctor_def_id() {
+ convert_variant_ctor(tcx, ctor_def_id.expect_local());
}
}
}
@@ -806,7 +802,6 @@ fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId, variants: &[hir::V
fn convert_variant(
tcx: TyCtxt<'_>,
variant_did: Option<LocalDefId>,
- ctor_did: Option<LocalDefId>,
ident: Ident,
discr: ty::VariantDiscr,
def: &hir::VariantData<'_>,
@@ -818,7 +813,6 @@ fn convert_variant(
.fields()
.iter()
.map(|f| {
- let fid = tcx.hir().local_def_id(f.hir_id);
let dup_span = seen_fields.get(&f.ident.normalize_to_macros_2_0()).cloned();
if let Some(prev_span) = dup_span {
tcx.sess.emit_err(errors::FieldAlreadyDeclared {
@@ -830,7 +824,11 @@ fn convert_variant(
seen_fields.insert(f.ident.normalize_to_macros_2_0(), f.span);
}
- ty::FieldDef { did: fid.to_def_id(), name: f.ident.name, vis: tcx.visibility(fid) }
+ ty::FieldDef {
+ did: f.def_id.to_def_id(),
+ name: f.ident.name,
+ vis: tcx.visibility(f.def_id),
+ }
})
.collect();
let recovered = match def {
@@ -840,10 +838,9 @@ fn convert_variant(
ty::VariantDef::new(
ident.name,
variant_did.map(LocalDefId::to_def_id),
- ctor_did.map(LocalDefId::to_def_id),
+ def.ctor().map(|(kind, _, def_id)| (kind, def_id.to_def_id())),
discr,
fields,
- CtorKind::from_hir(def),
adt_kind,
parent_did.to_def_id(),
recovered,
@@ -863,7 +860,7 @@ fn adt_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::AdtDef<'tcx> {
bug!();
};
- let repr = ReprOptions::new(tcx, def_id.to_def_id());
+ let repr = tcx.repr_options_of_def(def_id.to_def_id());
let (kind, variants) = match item.kind {
ItemKind::Enum(ref def, _) => {
let mut distance_from_explicit = 0;
@@ -871,13 +868,9 @@ fn adt_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::AdtDef<'tcx> {
.variants
.iter()
.map(|v| {
- let variant_did = Some(tcx.hir().local_def_id(v.id));
- let ctor_did =
- v.data.ctor_hir_id().map(|hir_id| tcx.hir().local_def_id(hir_id));
-
let discr = if let Some(ref e) = v.disr_expr {
distance_from_explicit = 0;
- ty::VariantDiscr::Explicit(tcx.hir().local_def_id(e.hir_id).to_def_id())
+ ty::VariantDiscr::Explicit(e.def_id.to_def_id())
} else {
ty::VariantDiscr::Relative(distance_from_explicit)
};
@@ -885,8 +878,7 @@ fn adt_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::AdtDef<'tcx> {
convert_variant(
tcx,
- variant_did,
- ctor_did,
+ Some(v.def_id),
v.ident,
discr,
&v.data,
@@ -898,41 +890,23 @@ fn adt_def<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> ty::AdtDef<'tcx> {
(AdtKind::Enum, variants)
}
- ItemKind::Struct(ref def, _) => {
- let variant_did = None::<LocalDefId>;
- let ctor_did = def.ctor_hir_id().map(|hir_id| tcx.hir().local_def_id(hir_id));
-
- let variants = std::iter::once(convert_variant(
- tcx,
- variant_did,
- ctor_did,
- item.ident,
- ty::VariantDiscr::Relative(0),
- def,
- AdtKind::Struct,
- def_id,
- ))
- .collect();
-
- (AdtKind::Struct, variants)
- }
- ItemKind::Union(ref def, _) => {
- let variant_did = None;
- let ctor_did = def.ctor_hir_id().map(|hir_id| tcx.hir().local_def_id(hir_id));
-
+ ItemKind::Struct(ref def, _) | ItemKind::Union(ref def, _) => {
+ let adt_kind = match item.kind {
+ ItemKind::Struct(..) => AdtKind::Struct,
+ _ => AdtKind::Union,
+ };
let variants = std::iter::once(convert_variant(
tcx,
- variant_did,
- ctor_did,
+ None,
item.ident,
ty::VariantDiscr::Relative(0),
def,
- AdtKind::Union,
+ adt_kind,
def_id,
))
.collect();
- (AdtKind::Union, variants)
+ (adt_kind, variants)
}
_ => bug!(),
};
@@ -1181,10 +1155,9 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> {
compute_sig_of_foreign_fn_decl(tcx, def_id.to_def_id(), fn_decl, abi)
}
- Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor_hir_id().is_some() => {
+ Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor().is_some() => {
let ty = tcx.type_of(tcx.hir().get_parent_item(hir_id));
- let inputs =
- data.fields().iter().map(|f| tcx.type_of(tcx.hir().local_def_id(f.hir_id)));
+ let inputs = data.fields().iter().map(|f| tcx.type_of(f.def_id));
ty::Binder::dummy(tcx.mk_fn_sig(
inputs,
ty,
@@ -1394,12 +1367,14 @@ fn predicates_defined_on(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicate
"predicates_defined_on: inferred_outlives_of({:?}) = {:?}",
def_id, inferred_outlives,
);
+ let inferred_outlives_iter =
+ inferred_outlives.iter().map(|(clause, span)| ((*clause).to_predicate(tcx), *span));
if result.predicates.is_empty() {
- result.predicates = inferred_outlives;
+ result.predicates = tcx.arena.alloc_from_iter(inferred_outlives_iter);
} else {
- result.predicates = tcx
- .arena
- .alloc_from_iter(result.predicates.iter().chain(inferred_outlives).copied());
+ result.predicates = tcx.arena.alloc_from_iter(
+ result.predicates.into_iter().copied().chain(inferred_outlives_iter),
+ );
}
}
@@ -1840,7 +1815,12 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: DefId) -> CodegenFnAttrs {
);
} else if attr.has_name(sym::linkage) {
if let Some(val) = attr.value_str() {
- codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, did, val.as_str()));
+ let linkage = Some(linkage_by_name(tcx, did, val.as_str()));
+ if tcx.is_foreign_item(did) {
+ codegen_fn_attrs.import_linkage = linkage;
+ } else {
+ codegen_fn_attrs.linkage = linkage;
+ }
}
} else if attr.has_name(sym::link_section) {
if let Some(val) = attr.value_str() {
@@ -2099,17 +2079,25 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: DefId) -> CodegenFnAttrs {
}
}
+ if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
+ codegen_fn_attrs.flags |= CodegenFnAttrFlags::NO_COVERAGE;
+ codegen_fn_attrs.inline = InlineAttr::Never;
+ }
+
// Weak lang items have the same semantics as "std internal" symbols in the
// sense that they're preserved through all our LTO passes and only
// strippable by the linker.
//
// Additionally weak lang items have predetermined symbol names.
- if tcx.is_weak_lang_item(did.to_def_id()) {
+ if WEAK_LANG_ITEMS.iter().any(|&l| tcx.lang_items().get(l) == Some(did.to_def_id())) {
codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL;
}
- if let Some(name) = weak_lang_items::link_name(attrs) {
- codegen_fn_attrs.export_name = Some(name);
- codegen_fn_attrs.link_name = Some(name);
+ if let Some((name, _)) = lang_items::extract(attrs)
+ && let Some(lang_item) = LangItem::from_name(name)
+ && let Some(link_name) = lang_item.link_name()
+ {
+ codegen_fn_attrs.export_name = Some(link_name);
+ codegen_fn_attrs.link_name = Some(link_name);
}
check_link_name_xor_ordinal(tcx, &codegen_fn_attrs, link_ordinal_span);
@@ -2170,7 +2158,7 @@ fn should_inherit_track_caller(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
}
fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
- use rustc_ast::{Lit, LitIntType, LitKind};
+ use rustc_ast::{LitIntType, LitKind, MetaItemLit};
if !tcx.features().raw_dylib && tcx.sess.target.arch == "x86" {
feature_err(
&tcx.sess.parse_sess,
@@ -2181,9 +2169,9 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
.emit();
}
let meta_item_list = attr.meta_item_list();
- let meta_item_list: Option<&[ast::NestedMetaItem]> = meta_item_list.as_ref().map(Vec::as_ref);
+ let meta_item_list = meta_item_list.as_deref();
let sole_meta_list = match meta_item_list {
- Some([item]) => item.literal(),
+ Some([item]) => item.lit(),
Some(_) => {
tcx.sess
.struct_span_err(attr.span, "incorrect number of arguments to `#[link_ordinal]`")
@@ -2193,7 +2181,9 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
}
_ => None,
};
- if let Some(Lit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) = sole_meta_list {
+ if let Some(MetaItemLit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) =
+ sole_meta_list
+ {
// According to the table at https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-header,
// the ordinal must fit into 16 bits. Similarly, the Ordinal field in COFFShortExport (defined
// in llvm/include/llvm/Object/COFFImportFile.h), which we use to communicate import information
@@ -2261,3 +2251,13 @@ fn check_target_feature_trait_unsafe(tcx: TyCtxt<'_>, id: LocalDefId, attr_span:
}
}
}
+
+fn is_type_alias_impl_trait<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
+ match tcx.hir().get_if_local(def_id) {
+ Some(Node::Item(hir::Item { kind: hir::ItemKind::OpaqueTy(opaque), .. })) => {
+ matches!(opaque.origin, hir::OpaqueTyOrigin::TyAlias)
+ }
+ Some(_) => bug!("tried getting opaque_ty_origin for non-opaque: {:?}", def_id),
+ _ => bug!("tried getting opaque_ty_origin for non-local def-id {:?}", def_id),
+ }
+}
diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs
index c7777a946..639f81f20 100644
--- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs
+++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs
@@ -51,7 +51,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
// of a const parameter type, e.g. `struct Foo<const N: usize, const M: [u8; N]>` is not allowed.
None
} else if tcx.lazy_normalization() {
- if let Some(param_id) = tcx.hir().opt_const_param_default_param_hir_id(hir_id) {
+ if let Some(param_id) = tcx.hir().opt_const_param_default_param_def_id(hir_id) {
// If the def_id we are calling generics_of on is an anon ct default i.e:
//
// struct Foo<const N: usize = { .. }>;
@@ -77,8 +77,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
// This has some implications for how we get the predicates available to the anon const
// see `explicit_predicates_of` for more information on this
let generics = tcx.generics_of(parent_def_id.to_def_id());
- let param_def = tcx.hir().local_def_id(param_id).to_def_id();
- let param_def_idx = generics.param_def_id_to_index[&param_def];
+ let param_def_idx = generics.param_def_id_to_index[&param_id.to_def_id()];
// In the above example this would be .params[..N#0]
let params = generics.params[..param_def_idx as usize].to_owned();
let param_def_id_to_index =
@@ -241,7 +240,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
params.extend(early_lifetimes.enumerate().map(|(i, param)| ty::GenericParamDef {
name: param.name.ident().name,
index: own_start + i as u32,
- def_id: tcx.hir().local_def_id(param.hir_id).to_def_id(),
+ def_id: param.def_id.to_def_id(),
pure_wrt_drop: param.pure_wrt_drop,
kind: ty::GenericParamDefKind::Lifetime,
}));
@@ -286,7 +285,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
Some(ty::GenericParamDef {
index: next_index(),
name: param.name.ident().name,
- def_id: tcx.hir().local_def_id(param.hir_id).to_def_id(),
+ def_id: param.def_id.to_def_id(),
pure_wrt_drop: param.pure_wrt_drop,
kind,
})
@@ -303,7 +302,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
Some(ty::GenericParamDef {
index: next_index(),
name: param.name.ident().name,
- def_id: tcx.hir().local_def_id(param.hir_id).to_def_id(),
+ def_id: param.def_id.to_def_id(),
pure_wrt_drop: param.pure_wrt_drop,
kind: ty::GenericParamDefKind::Const { has_default: default.is_some() },
})
@@ -399,7 +398,7 @@ fn has_late_bound_regions<'tcx>(tcx: TyCtxt<'tcx>, node: Node<'tcx>) -> Option<S
Some(rl::Region::Static | rl::Region::EarlyBound(..)) => {}
Some(rl::Region::LateBound(debruijn, _, _)) if debruijn < self.outer_index => {}
Some(rl::Region::LateBound(..) | rl::Region::Free(..)) | None => {
- self.has_late_bound_regions = Some(lt.span);
+ self.has_late_bound_regions = Some(lt.ident.span);
}
}
}
diff --git a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs
index 0d34a8bfe..0542e2c8f 100644
--- a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs
+++ b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs
@@ -35,9 +35,11 @@ fn associated_type_bounds<'tcx>(
let bounds_from_parent = trait_predicates.predicates.iter().copied().filter(|(pred, _)| {
match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(tr) => tr.self_ty() == item_ty,
- ty::PredicateKind::Projection(proj) => proj.projection_ty.self_ty() == item_ty,
- ty::PredicateKind::TypeOutlives(outlives) => outlives.0 == item_ty,
+ ty::PredicateKind::Clause(ty::Clause::Trait(tr)) => tr.self_ty() == item_ty,
+ ty::PredicateKind::Clause(ty::Clause::Projection(proj)) => {
+ proj.projection_ty.self_ty() == item_ty
+ }
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(outlives)) => outlives.0 == item_ty,
_ => false,
}
});
diff --git a/compiler/rustc_hir_analysis/src/collect/lifetimes.rs b/compiler/rustc_hir_analysis/src/collect/lifetimes.rs
index 3f263a6de..e4fe3e90e 100644
--- a/compiler/rustc_hir_analysis/src/collect/lifetimes.rs
+++ b/compiler/rustc_hir_analysis/src/collect/lifetimes.rs
@@ -15,19 +15,18 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{GenericArg, GenericParam, GenericParamKind, HirIdMap, LifetimeName, Node};
use rustc_middle::bug;
-use rustc_middle::hir::map::Map;
use rustc_middle::hir::nested_filter;
use rustc_middle::middle::resolve_lifetime::*;
-use rustc_middle::ty::{self, DefIdTree, TyCtxt};
+use rustc_middle::ty::{self, DefIdTree, TyCtxt, TypeSuperVisitable, TypeVisitor};
use rustc_span::def_id::DefId;
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
use std::fmt;
trait RegionExt {
- fn early(hir_map: Map<'_>, param: &GenericParam<'_>) -> (LocalDefId, Region);
+ fn early(param: &GenericParam<'_>) -> (LocalDefId, Region);
- fn late(index: u32, hir_map: Map<'_>, param: &GenericParam<'_>) -> (LocalDefId, Region);
+ fn late(index: u32, param: &GenericParam<'_>) -> (LocalDefId, Region);
fn id(&self) -> Option<DefId>;
@@ -35,20 +34,18 @@ trait RegionExt {
}
impl RegionExt for Region {
- fn early(hir_map: Map<'_>, param: &GenericParam<'_>) -> (LocalDefId, Region) {
- let def_id = hir_map.local_def_id(param.hir_id);
- debug!("Region::early: def_id={:?}", def_id);
- (def_id, Region::EarlyBound(def_id.to_def_id()))
+ fn early(param: &GenericParam<'_>) -> (LocalDefId, Region) {
+ debug!("Region::early: def_id={:?}", param.def_id);
+ (param.def_id, Region::EarlyBound(param.def_id.to_def_id()))
}
- fn late(idx: u32, hir_map: Map<'_>, param: &GenericParam<'_>) -> (LocalDefId, Region) {
+ fn late(idx: u32, param: &GenericParam<'_>) -> (LocalDefId, Region) {
let depth = ty::INNERMOST;
- let def_id = hir_map.local_def_id(param.hir_id);
debug!(
"Region::late: idx={:?}, param={:?} depth={:?} def_id={:?}",
- idx, param, depth, def_id,
+ idx, param, depth, param.def_id,
);
- (def_id, Region::LateBound(depth, idx, def_id.to_def_id()))
+ (param.def_id, Region::LateBound(depth, idx, param.def_id.to_def_id()))
}
fn id(&self) -> Option<DefId> {
@@ -94,11 +91,6 @@ struct LifetimeContext<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
map: &'a mut NamedRegionMap,
scope: ScopeRef<'a>,
-
- /// Indicates that we only care about the definition of a trait. This should
- /// be false if the `Item` we are resolving lifetimes for is not a trait or
- /// we eventually need lifetimes resolve for trait items.
- trait_definition_only: bool,
}
#[derive(Debug)]
@@ -166,7 +158,9 @@ enum Scope<'a> {
s: ScopeRef<'a>,
},
- Root,
+ Root {
+ opt_parent_item: Option<LocalDefId>,
+ },
}
#[derive(Copy, Clone, Debug)]
@@ -214,95 +208,58 @@ impl<'a> fmt::Debug for TruncatedScopeDebug<'a> {
.field("s", &"..")
.finish(),
Scope::TraitRefBoundary { s: _ } => f.debug_struct("TraitRefBoundary").finish(),
- Scope::Root => f.debug_struct("Root").finish(),
+ Scope::Root { opt_parent_item } => {
+ f.debug_struct("Root").field("opt_parent_item", &opt_parent_item).finish()
+ }
}
}
}
type ScopeRef<'a> = &'a Scope<'a>;
-const ROOT_SCOPE: ScopeRef<'static> = &Scope::Root;
-
pub(crate) fn provide(providers: &mut ty::query::Providers) {
*providers = ty::query::Providers {
- resolve_lifetimes_trait_definition,
resolve_lifetimes,
- named_region_map: |tcx, id| resolve_lifetimes_for(tcx, id).defs.get(&id),
+ named_region_map: |tcx, id| tcx.resolve_lifetimes(id).defs.get(&id),
is_late_bound_map,
object_lifetime_default,
- late_bound_vars_map: |tcx, id| resolve_lifetimes_for(tcx, id).late_bound_vars.get(&id),
+ late_bound_vars_map: |tcx, id| tcx.resolve_lifetimes(id).late_bound_vars.get(&id),
..*providers
};
}
-/// Like `resolve_lifetimes`, but does not resolve lifetimes for trait items.
-/// Also does not generate any diagnostics.
-///
-/// This is ultimately a subset of the `resolve_lifetimes` work. It effectively
-/// resolves lifetimes only within the trait "header" -- that is, the trait
-/// and supertrait list. In contrast, `resolve_lifetimes` resolves all the
-/// lifetimes within the trait and its items. There is room to refactor this,
-/// for example to resolve lifetimes for each trait item in separate queries,
-/// but it's convenient to do the entire trait at once because the lifetimes
-/// from the trait definition are in scope within the trait items as well.
-///
-/// The reason for this separate call is to resolve what would otherwise
-/// be a cycle. Consider this example:
-///
-/// ```ignore UNSOLVED (maybe @jackh726 knows what lifetime parameter to give Sub)
-/// trait Base<'a> {
-/// type BaseItem;
-/// }
-/// trait Sub<'b>: for<'a> Base<'a> {
-/// type SubItem: Sub<BaseItem = &'b u32>;
-/// }
-/// ```
-///
-/// When we resolve `Sub` and all its items, we also have to resolve `Sub<BaseItem = &'b u32>`.
-/// To figure out the index of `'b`, we have to know about the supertraits
-/// of `Sub` so that we can determine that the `for<'a>` will be in scope.
-/// (This is because we -- currently at least -- flatten all the late-bound
-/// lifetimes into a single binder.) This requires us to resolve the
-/// *trait definition* of `Sub`; basically just enough lifetime information
-/// to look at the supertraits.
-#[instrument(level = "debug", skip(tcx))]
-fn resolve_lifetimes_trait_definition(
- tcx: TyCtxt<'_>,
- local_def_id: LocalDefId,
-) -> ResolveLifetimes {
- convert_named_region_map(do_resolve(tcx, local_def_id, true))
-}
-
/// Computes the `ResolveLifetimes` map that contains data for an entire `Item`.
/// You should not read the result of this query directly, but rather use
/// `named_region_map`, `is_late_bound_map`, etc.
#[instrument(level = "debug", skip(tcx))]
-fn resolve_lifetimes(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> ResolveLifetimes {
- convert_named_region_map(do_resolve(tcx, local_def_id, false))
-}
-
-fn do_resolve(
- tcx: TyCtxt<'_>,
- local_def_id: LocalDefId,
- trait_definition_only: bool,
-) -> NamedRegionMap {
- let item = tcx.hir().expect_item(local_def_id);
+fn resolve_lifetimes(tcx: TyCtxt<'_>, local_def_id: hir::OwnerId) -> ResolveLifetimes {
let mut named_region_map =
NamedRegionMap { defs: Default::default(), late_bound_vars: Default::default() };
let mut visitor = LifetimeContext {
tcx,
map: &mut named_region_map,
- scope: ROOT_SCOPE,
- trait_definition_only,
+ scope: &Scope::Root { opt_parent_item: None },
};
- visitor.visit_item(item);
-
- named_region_map
-}
+ match tcx.hir().owner(local_def_id) {
+ hir::OwnerNode::Item(item) => visitor.visit_item(item),
+ hir::OwnerNode::ForeignItem(item) => visitor.visit_foreign_item(item),
+ hir::OwnerNode::TraitItem(item) => {
+ let scope =
+ Scope::Root { opt_parent_item: Some(tcx.local_parent(item.owner_id.def_id)) };
+ visitor.scope = &scope;
+ visitor.visit_trait_item(item)
+ }
+ hir::OwnerNode::ImplItem(item) => {
+ let scope =
+ Scope::Root { opt_parent_item: Some(tcx.local_parent(item.owner_id.def_id)) };
+ visitor.scope = &scope;
+ visitor.visit_impl_item(item)
+ }
+ hir::OwnerNode::Crate(_) => {}
+ }
-fn convert_named_region_map(named_region_map: NamedRegionMap) -> ResolveLifetimes {
let mut rl = ResolveLifetimes::default();
for (hir_id, v) in named_region_map.defs {
@@ -319,53 +276,6 @@ fn convert_named_region_map(named_region_map: NamedRegionMap) -> ResolveLifetime
rl
}
-/// Given `any` owner (structs, traits, trait methods, etc.), does lifetime resolution.
-/// There are two important things this does.
-/// First, we have to resolve lifetimes for
-/// the entire *`Item`* that contains this owner, because that's the largest "scope"
-/// where we can have relevant lifetimes.
-/// Second, if we are asking for lifetimes in a trait *definition*, we use `resolve_lifetimes_trait_definition`
-/// instead of `resolve_lifetimes`, which does not descend into the trait items and does not emit diagnostics.
-/// This allows us to avoid cycles. Importantly, if we ask for lifetimes for lifetimes that have an owner
-/// other than the trait itself (like the trait methods or associated types), then we just use the regular
-/// `resolve_lifetimes`.
-fn resolve_lifetimes_for<'tcx>(tcx: TyCtxt<'tcx>, def_id: hir::OwnerId) -> &'tcx ResolveLifetimes {
- let item_id = item_for(tcx, def_id.def_id);
- let local_def_id = item_id.owner_id.def_id;
- if item_id.owner_id == def_id {
- let item = tcx.hir().item(item_id);
- match item.kind {
- hir::ItemKind::Trait(..) => tcx.resolve_lifetimes_trait_definition(local_def_id),
- _ => tcx.resolve_lifetimes(local_def_id),
- }
- } else {
- tcx.resolve_lifetimes(local_def_id)
- }
-}
-
-/// Finds the `Item` that contains the given `LocalDefId`
-fn item_for(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> hir::ItemId {
- match tcx.hir().find_by_def_id(local_def_id) {
- Some(Node::Item(item)) => {
- return item.item_id();
- }
- _ => {}
- }
- let item = {
- let hir_id = tcx.hir().local_def_id_to_hir_id(local_def_id);
- let mut parent_iter = tcx.hir().parent_iter(hir_id);
- loop {
- let node = parent_iter.next().map(|n| n.1);
- match node {
- Some(hir::Node::Item(item)) => break item.item_id(),
- Some(hir::Node::Crate(_)) | None => bug!("Called `item_for` on an Item."),
- _ => {}
- }
- }
- };
- item
-}
-
fn late_region_as_bound_region<'tcx>(tcx: TyCtxt<'tcx>, region: &Region) -> ty::BoundVariableKind {
match region {
Region::LateBound(_, _, def_id) => {
@@ -383,7 +293,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
let mut supertrait_lifetimes = vec![];
loop {
match scope {
- Scope::Body { .. } | Scope::Root => {
+ Scope::Body { .. } | Scope::Root { .. } => {
break (vec![], BinderScopeType::Normal);
}
@@ -414,21 +324,12 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
}
}
impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
- type NestedFilter = nested_filter::All;
+ type NestedFilter = nested_filter::OnlyBodies;
fn nested_visit_map(&mut self) -> Self::Map {
self.tcx.hir()
}
- // We want to nest trait/impl items in their parent, but nothing else.
- fn visit_nested_item(&mut self, _: hir::ItemId) {}
-
- fn visit_trait_item_ref(&mut self, ii: &'tcx hir::TraitItemRef) {
- if !self.trait_definition_only {
- intravisit::walk_trait_item_ref(self, ii)
- }
- }
-
fn visit_nested_body(&mut self, body: hir::BodyId) {
let body = self.tcx.hir().body(body);
self.with(Scope::Body { id: body.id(), s: self.scope }, |this| {
@@ -491,7 +392,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
.filter(|param| matches!(param.kind, GenericParamKind::Lifetime { .. }))
.enumerate()
.map(|(late_bound_idx, param)| {
- let pair = Region::late(late_bound_idx as u32, self.tcx.hir(), param);
+ let pair = Region::late(late_bound_idx as u32, param);
let r = late_region_as_bound_region(self.tcx, &pair.1);
(pair, r)
})
@@ -548,7 +449,9 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
intravisit::walk_item(this, item)
});
}
- hir::ItemKind::OpaqueTy(hir::OpaqueTy { .. }) => {
+ hir::ItemKind::OpaqueTy(hir::OpaqueTy {
+ origin: hir::OpaqueTyOrigin::TyAlias, ..
+ }) => {
// Opaque types are visited when we visit the
// `TyKind::OpaqueDef`, so that they have the lifetimes from
// their parent opaque_ty in scope.
@@ -557,34 +460,53 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
// their owner, we can keep going until we find the Item that owns that. We then
// conservatively add all resolved lifetimes. Otherwise we run into problems in
// cases like `type Foo<'a> = impl Bar<As = impl Baz + 'a>`.
- for (_hir_id, node) in self.tcx.hir().parent_iter(item.owner_id.into()) {
- match node {
- hir::Node::Item(parent_item) => {
- let resolved_lifetimes: &ResolveLifetimes = self.tcx.resolve_lifetimes(
- item_for(self.tcx, parent_item.owner_id.def_id).owner_id.def_id,
- );
- // We need to add *all* deps, since opaque tys may want them from *us*
- for (&owner, defs) in resolved_lifetimes.defs.iter() {
- defs.iter().for_each(|(&local_id, region)| {
- self.map.defs.insert(hir::HirId { owner, local_id }, *region);
- });
- }
- for (&owner, late_bound_vars) in
- resolved_lifetimes.late_bound_vars.iter()
- {
- late_bound_vars.iter().for_each(|(&local_id, late_bound_vars)| {
- self.record_late_bound_vars(
- hir::HirId { owner, local_id },
- late_bound_vars.clone(),
- );
- });
- }
- break;
+ let parent_item = self.tcx.hir().get_parent_item(item.hir_id());
+ let resolved_lifetimes: &ResolveLifetimes = self.tcx.resolve_lifetimes(parent_item);
+ // We need to add *all* deps, since opaque tys may want them from *us*
+ for (&owner, defs) in resolved_lifetimes.defs.iter() {
+ defs.iter().for_each(|(&local_id, region)| {
+ self.map.defs.insert(hir::HirId { owner, local_id }, *region);
+ });
+ }
+ for (&owner, late_bound_vars) in resolved_lifetimes.late_bound_vars.iter() {
+ late_bound_vars.iter().for_each(|(&local_id, late_bound_vars)| {
+ self.record_late_bound_vars(
+ hir::HirId { owner, local_id },
+ late_bound_vars.clone(),
+ );
+ });
+ }
+ }
+ hir::ItemKind::OpaqueTy(hir::OpaqueTy {
+ origin: hir::OpaqueTyOrigin::FnReturn(_) | hir::OpaqueTyOrigin::AsyncFn(_),
+ generics,
+ ..
+ }) => {
+ // We want to start our early-bound indices at the end of the parent scope,
+ // not including any parent `impl Trait`s.
+ let mut lifetimes = FxIndexMap::default();
+ debug!(?generics.params);
+ for param in generics.params {
+ match param.kind {
+ GenericParamKind::Lifetime { .. } => {
+ let (def_id, reg) = Region::early(&param);
+ lifetimes.insert(def_id, reg);
}
- hir::Node::Crate(_) => bug!("No Item about an OpaqueTy"),
- _ => {}
+ GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {}
}
}
+
+ let scope = Scope::Binder {
+ hir_id: item.hir_id(),
+ lifetimes,
+ s: self.scope,
+ scope_type: BinderScopeType::Normal,
+ where_bound_origin: None,
+ };
+ self.with(scope, |this| {
+ let scope = Scope::TraitRefBoundary { s: this.scope };
+ this.with(scope, |this| intravisit::walk_item(this, item))
+ });
}
hir::ItemKind::TyAlias(_, ref generics)
| hir::ItemKind::Enum(_, ref generics)
@@ -598,9 +520,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
.params
.iter()
.filter_map(|param| match param.kind {
- GenericParamKind::Lifetime { .. } => {
- Some(Region::early(self.tcx.hir(), param))
- }
+ GenericParamKind::Lifetime { .. } => Some(Region::early(param)),
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => None,
})
.collect();
@@ -609,7 +529,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
hir_id: item.hir_id(),
lifetimes,
scope_type: BinderScopeType::Normal,
- s: ROOT_SCOPE,
+ s: self.scope,
where_bound_origin: None,
};
self.with(scope, |this| {
@@ -648,7 +568,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
.filter(|param| matches!(param.kind, GenericParamKind::Lifetime { .. }))
.enumerate()
.map(|(late_bound_idx, param)| {
- let pair = Region::late(late_bound_idx as u32, self.tcx.hir(), param);
+ let pair = Region::late(late_bound_idx as u32, param);
let r = late_region_as_bound_region(self.tcx, &pair.1);
(pair, r)
})
@@ -675,7 +595,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
this.visit_poly_trait_ref(bound);
}
});
- match lifetime.name {
+ match lifetime.res {
LifetimeName::ImplicitObjectLifetimeDefault => {
// If the user does not write *anything*, we
// use the object lifetime defaulting
@@ -712,7 +632,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
// ^ ^ this gets resolved in the scope of
// the opaque_ty generics
let opaque_ty = self.tcx.hir().item(item_id);
- let (generics, bounds) = match opaque_ty.kind {
+ match opaque_ty.kind {
hir::ItemKind::OpaqueTy(hir::OpaqueTy {
origin: hir::OpaqueTyOrigin::TyAlias,
..
@@ -733,10 +653,8 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
}
hir::ItemKind::OpaqueTy(hir::OpaqueTy {
origin: hir::OpaqueTyOrigin::FnReturn(..) | hir::OpaqueTyOrigin::AsyncFn(..),
- ref generics,
- bounds,
..
- }) => (generics, bounds),
+ }) => {}
ref i => bug!("`impl Trait` pointed to non-opaque type?? {:#?}", i),
};
@@ -766,65 +684,28 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
// Ensure that the parent of the def is an item, not HRTB
let parent_id = self.tcx.hir().get_parent_node(hir_id);
if !parent_id.is_owner() {
- if !self.trait_definition_only {
- struct_span_err!(
- self.tcx.sess,
- lifetime.span,
- E0657,
- "`impl Trait` can only capture lifetimes \
- bound at the fn or impl level"
- )
- .emit();
- }
+ struct_span_err!(
+ self.tcx.sess,
+ lifetime.ident.span,
+ E0657,
+ "`impl Trait` can only capture lifetimes bound at the fn or impl level"
+ )
+ .emit();
self.uninsert_lifetime_on_error(lifetime, def.unwrap());
}
if let hir::Node::Item(hir::Item {
kind: hir::ItemKind::OpaqueTy { .. }, ..
}) = self.tcx.hir().get(parent_id)
{
- if !self.trait_definition_only {
- let mut err = self.tcx.sess.struct_span_err(
- lifetime.span,
- "higher kinded lifetime bounds on nested opaque types are not supported yet",
- );
- err.span_note(self.tcx.def_span(def_id), "lifetime declared here");
- err.emit();
- }
+ let mut err = self.tcx.sess.struct_span_err(
+ lifetime.ident.span,
+ "higher kinded lifetime bounds on nested opaque types are not supported yet",
+ );
+ err.span_note(self.tcx.def_span(def_id), "lifetime declared here");
+ err.emit();
self.uninsert_lifetime_on_error(lifetime, def.unwrap());
}
}
-
- // We want to start our early-bound indices at the end of the parent scope,
- // not including any parent `impl Trait`s.
- let mut lifetimes = FxIndexMap::default();
- debug!(?generics.params);
- for param in generics.params {
- match param.kind {
- GenericParamKind::Lifetime { .. } => {
- let (def_id, reg) = Region::early(self.tcx.hir(), &param);
- lifetimes.insert(def_id, reg);
- }
- GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {}
- }
- }
- self.record_late_bound_vars(ty.hir_id, vec![]);
-
- let scope = Scope::Binder {
- hir_id: ty.hir_id,
- lifetimes,
- s: self.scope,
- scope_type: BinderScopeType::Normal,
- where_bound_origin: None,
- };
- self.with(scope, |this| {
- let scope = Scope::TraitRefBoundary { s: this.scope };
- this.with(scope, |this| {
- this.visit_generics(generics);
- for bound in bounds {
- this.visit_param_bound(bound);
- }
- })
- });
}
_ => intravisit::walk_ty(self, ty),
}
@@ -845,9 +726,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
.params
.iter()
.filter_map(|param| match param.kind {
- GenericParamKind::Lifetime { .. } => {
- Some(Region::early(self.tcx.hir(), param))
- }
+ GenericParamKind::Lifetime { .. } => Some(Region::early(param)),
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => None,
})
.collect();
@@ -893,9 +772,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
.params
.iter()
.filter_map(|param| match param.kind {
- GenericParamKind::Lifetime { .. } => {
- Some(Region::early(self.tcx.hir(), param))
- }
+ GenericParamKind::Lifetime { .. } => Some(Region::early(param)),
GenericParamKind::Const { .. } | GenericParamKind::Type { .. } => None,
})
.collect();
@@ -925,9 +802,9 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
#[instrument(level = "debug", skip(self))]
fn visit_lifetime(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
- match lifetime_ref.name {
+ match lifetime_ref.res {
hir::LifetimeName::Static => self.insert_lifetime(lifetime_ref, Region::Static),
- hir::LifetimeName::Param(param_def_id, _) => {
+ hir::LifetimeName::Param(param_def_id) => {
self.resolve_lifetime_ref(param_def_id, lifetime_ref)
}
// If we've already reported an error, just ignore `lifetime_ref`.
@@ -937,7 +814,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
}
}
- fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
+ fn visit_path(&mut self, path: &hir::Path<'tcx>, _: hir::HirId) {
for (i, segment) in path.segments.iter().enumerate() {
let depth = path.segments.len() - i - 1;
if let Some(ref args) = segment.args {
@@ -1000,7 +877,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
})
.enumerate()
.map(|(late_bound_idx, param)| {
- Region::late(late_bound_idx as u32, this.tcx.hir(), param)
+ Region::late(late_bound_idx as u32, param)
})
.collect();
let binders: Vec<_> =
@@ -1035,27 +912,27 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
this.visit_lifetime(lifetime);
walk_list!(this, visit_param_bound, bounds);
- if lifetime.name != hir::LifetimeName::Static {
+ if lifetime.res != hir::LifetimeName::Static {
for bound in bounds {
let hir::GenericBound::Outlives(ref lt) = bound else {
continue;
};
- if lt.name != hir::LifetimeName::Static {
+ if lt.res != hir::LifetimeName::Static {
continue;
}
this.insert_lifetime(lt, Region::Static);
this.tcx
.sess
.struct_span_warn(
- lifetime.span,
+ lifetime.ident.span,
&format!(
"unnecessary lifetime parameter `{}`",
- lifetime.name.ident(),
+ lifetime.ident,
),
)
.help(&format!(
"you can use the `'static` lifetime directly, in place of `{}`",
- lifetime.name.ident(),
+ lifetime.ident,
))
.emit();
}
@@ -1113,8 +990,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
.filter(|param| matches!(param.kind, GenericParamKind::Lifetime { .. }))
.enumerate()
.map(|(late_bound_idx, param)| {
- let pair =
- Region::late(initial_bound_vars + late_bound_idx as u32, self.tcx.hir(), param);
+ let pair = Region::late(initial_bound_vars + late_bound_idx as u32, param);
let r = late_region_as_bound_region(self.tcx, &pair.1);
lifetimes.insert(pair.0, pair.1);
r
@@ -1167,7 +1043,7 @@ fn object_lifetime_default<'tcx>(tcx: TyCtxt<'tcx>, param_def_id: DefId) -> Obje
for bound in bound.bounds {
if let hir::GenericBound::Outlives(ref lifetime) = *bound {
- set.insert(lifetime.name.normalize_to_macros_2_0());
+ set.insert(lifetime.res);
}
}
}
@@ -1175,7 +1051,7 @@ fn object_lifetime_default<'tcx>(tcx: TyCtxt<'tcx>, param_def_id: DefId) -> Obje
match set {
Set1::Empty => ObjectLifetimeDefault::Empty,
Set1::One(hir::LifetimeName::Static) => ObjectLifetimeDefault::Static,
- Set1::One(hir::LifetimeName::Param(param_def_id, _)) => {
+ Set1::One(hir::LifetimeName::Param(param_def_id)) => {
ObjectLifetimeDefault::Param(param_def_id.to_def_id())
}
_ => ObjectLifetimeDefault::Ambiguous,
@@ -1193,12 +1069,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
F: for<'b> FnOnce(&mut LifetimeContext<'b, 'tcx>),
{
let LifetimeContext { tcx, map, .. } = self;
- let mut this = LifetimeContext {
- tcx: *tcx,
- map,
- scope: &wrap_scope,
- trait_definition_only: self.trait_definition_only,
- };
+ let mut this = LifetimeContext { tcx: *tcx, map, scope: &wrap_scope };
let span = debug_span!("scope", scope = ?TruncatedScopeDebug(&this.scope));
{
let _enter = span.enter();
@@ -1250,9 +1121,9 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
if self.tcx.is_late_bound(param.hir_id) {
let late_bound_idx = named_late_bound_vars;
named_late_bound_vars += 1;
- Some(Region::late(late_bound_idx, self.tcx.hir(), param))
+ Some(Region::late(late_bound_idx, param))
} else {
- Some(Region::early(self.tcx.hir(), param))
+ Some(Region::early(param))
}
}
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => None,
@@ -1268,7 +1139,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
})
.enumerate()
.map(|(late_bound_idx, param)| {
- let pair = Region::late(late_bound_idx as u32, self.tcx.hir(), param);
+ let pair = Region::late(late_bound_idx as u32, param);
late_region_as_bound_region(self.tcx, &pair.1)
})
.collect();
@@ -1303,7 +1174,13 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
scope = s;
}
- Scope::Root => {
+ Scope::Root { opt_parent_item } => {
+ if let Some(parent_item) = opt_parent_item
+ && let parent_generics = self.tcx.generics_of(parent_item)
+ && parent_generics.param_def_id_to_index.contains_key(&region_def_id.to_def_id())
+ {
+ break Some(Region::EarlyBound(region_def_id.to_def_id()));
+ }
break None;
}
@@ -1318,42 +1195,52 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
// Fresh lifetimes in APIT used to be allowed in async fns and forbidden in
// regular fns.
if let Some(hir::PredicateOrigin::ImplTrait) = where_bound_origin
- && let hir::LifetimeName::Param(_, hir::ParamName::Fresh) = lifetime_ref.name
+ && let hir::LifetimeName::Param(param_id) = lifetime_ref.res
+ && let Some(generics) = self.tcx.hir().get_generics(self.tcx.local_parent(param_id))
+ && let Some(param) = generics.params.iter().find(|p| p.def_id == param_id)
+ && param.is_elided_lifetime()
&& let hir::IsAsync::NotAsync = self.tcx.asyncness(lifetime_ref.hir_id.owner.def_id)
&& !self.tcx.features().anonymous_lifetime_in_impl_trait
{
let mut diag = rustc_session::parse::feature_err(
&self.tcx.sess.parse_sess,
sym::anonymous_lifetime_in_impl_trait,
- lifetime_ref.span,
+ lifetime_ref.ident.span,
"anonymous lifetimes in `impl Trait` are unstable",
);
- match self.tcx.hir().get_generics(lifetime_ref.hir_id.owner.def_id) {
- Some(generics) => {
-
- let new_param_sugg_tuple;
-
- new_param_sugg_tuple = match generics.span_for_param_suggestion() {
- Some(_) => {
- Some((self.tcx.sess.source_map().span_through_char(generics.span, '<').shrink_to_hi(), "'a, ".to_owned()))
- },
- None => Some((generics.span, "<'a>".to_owned()))
- };
-
- let mut multi_sugg_vec = vec![(lifetime_ref.span.shrink_to_hi(), "'a ".to_owned())];
-
- if let Some(new_tuple) = new_param_sugg_tuple{
- multi_sugg_vec.push(new_tuple);
- }
-
- diag.span_label(lifetime_ref.span, "expected named lifetime parameter");
- diag.multipart_suggestion("consider introducing a named lifetime parameter",
- multi_sugg_vec,
- rustc_errors::Applicability::MaybeIncorrect);
-
- },
- None => { }
+ if let Some(generics) =
+ self.tcx.hir().get_generics(lifetime_ref.hir_id.owner.def_id)
+ {
+ let new_param_sugg = if let Some(span) =
+ generics.span_for_lifetime_suggestion()
+ {
+ (span, "'a, ".to_owned())
+ } else {
+ (generics.span, "<'a>".to_owned())
+ };
+
+ let lifetime_sugg = match lifetime_ref.suggestion_position() {
+ (hir::LifetimeSuggestionPosition::Normal, span) => (span, "'a".to_owned()),
+ (hir::LifetimeSuggestionPosition::Ampersand, span) => (span, "'a ".to_owned()),
+ (hir::LifetimeSuggestionPosition::ElidedPath, span) => (span, "<'a>".to_owned()),
+ (hir::LifetimeSuggestionPosition::ElidedPathArgument, span) => (span, "'a, ".to_owned()),
+ (hir::LifetimeSuggestionPosition::ObjectDefault, span) => (span, "+ 'a".to_owned()),
+ };
+ let suggestions = vec![
+ lifetime_sugg,
+ new_param_sugg,
+ ];
+
+ diag.span_label(
+ lifetime_ref.ident.span,
+ "expected named lifetime parameter",
+ );
+ diag.multipart_suggestion(
+ "consider introducing a named lifetime parameter",
+ suggestions,
+ rustc_errors::Applicability::MaybeIncorrect,
+ );
}
diag.emit();
@@ -1377,11 +1264,12 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
} else if let Some(body_id) = outermost_body {
let fn_id = self.tcx.hir().body_owner(body_id);
match self.tcx.hir().get(fn_id) {
- Node::Item(&hir::Item { kind: hir::ItemKind::Fn(..), .. })
- | Node::TraitItem(&hir::TraitItem {
+ Node::Item(hir::Item { kind: hir::ItemKind::Fn(..), .. })
+ | Node::TraitItem(hir::TraitItem {
kind: hir::TraitItemKind::Fn(..), ..
})
- | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(..), .. }) => {
+ | Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Fn(..), .. })
+ | Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(..), .. }) => {
let scope = self.tcx.hir().local_def_id(fn_id);
def = Region::Free(scope.to_def_id(), def.id().unwrap());
}
@@ -1409,14 +1297,14 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
where_bound_origin: Some(hir::PredicateOrigin::ImplTrait), ..
} => {
let mut err = self.tcx.sess.struct_span_err(
- lifetime_ref.span,
+ lifetime_ref.ident.span,
"`impl Trait` can only mention lifetimes bound at the fn or impl level",
);
err.span_note(self.tcx.def_span(region_def_id), "lifetime declared here");
err.emit();
return;
}
- Scope::Root => break,
+ Scope::Root { .. } => break,
Scope::Binder { s, .. }
| Scope::Body { s, .. }
| Scope::Elision { s, .. }
@@ -1429,7 +1317,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
}
self.tcx.sess.delay_span_bug(
- lifetime_ref.span,
+ lifetime_ref.ident.span,
&format!("Could not resolve {:?} in scope {:#?}", lifetime_ref, self.scope,),
);
}
@@ -1494,7 +1382,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
let mut scope = self.scope;
loop {
match *scope {
- Scope::Root => break false,
+ Scope::Root { .. } => break false,
Scope::Body { .. } => break true,
@@ -1680,7 +1568,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
let obligations = predicates.predicates.iter().filter_map(|&(pred, _)| {
let bound_predicate = pred.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(data) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => {
// The order here needs to match what we would get from `subst_supertrait`
let pred_bound_vars = bound_predicate.bound_vars();
let mut all_bound_vars = bound_vars.clone();
@@ -1731,7 +1619,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
scope = s;
}
- Scope::Root | Scope::Elision { .. } => break Region::Static,
+ Scope::Root { .. } | Scope::Elision { .. } => break Region::Static,
Scope::Body { .. } | Scope::ObjectLifetimeDefault { lifetime: None, .. } => return,
@@ -1747,10 +1635,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
#[instrument(level = "debug", skip(self))]
fn insert_lifetime(&mut self, lifetime_ref: &'tcx hir::Lifetime, def: Region) {
- debug!(
- node = ?self.tcx.hir().node_to_string(lifetime_ref.hir_id),
- span = ?self.tcx.sess.source_map().span_to_diagnostic_string(lifetime_ref.span)
- );
+ debug!(span = ?lifetime_ref.ident.span);
self.map.defs.insert(lifetime_ref.hir_id, def);
}
@@ -1780,7 +1665,7 @@ fn is_late_bound_map(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<&FxIndexSet<
let mut late_bound = FxIndexSet::default();
- let mut constrained_by_input = ConstrainedCollector::default();
+ let mut constrained_by_input = ConstrainedCollector { regions: Default::default(), tcx };
for arg_ty in decl.inputs {
constrained_by_input.visit_ty(arg_ty);
}
@@ -1833,12 +1718,65 @@ fn is_late_bound_map(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<&FxIndexSet<
debug!(?late_bound);
return Some(tcx.arena.alloc(late_bound));
- #[derive(Default)]
- struct ConstrainedCollector {
+ /// Visits a `ty::Ty` collecting information about what generic parameters are constrained.
+ ///
+ /// The visitor does not operate on `hir::Ty` so that it can be called on the rhs of a `type Alias<...> = ...;`
+ /// which may live in a separate crate so there would not be any hir available. Instead we use the `type_of`
+ /// query to obtain a `ty::Ty` which will be present even in cross crate scenarios. It also naturally
+ /// handles cycle detection as we go through the query system.
+ ///
+ /// This is necessary in the first place for the following case:
+ /// ```
+ /// type Alias<'a, T> = <T as Trait<'a>>::Assoc;
+ /// fn foo<'a>(_: Alias<'a, ()>) -> Alias<'a, ()> { ... }
+ /// ```
+ ///
+ /// If we conservatively considered `'a` unconstrained then we could break users who had written code before
+ /// we started correctly handling aliases. If we considered `'a` constrained then it would become late bound
+ /// causing an error during astconv as the `'a` is not constrained by the input type `<() as Trait<'a>>::Assoc`
+ /// but appears in the output type `<() as Trait<'a>>::Assoc`.
+ ///
+ /// We must therefore "look into" the `Alias` to see whether we should consider `'a` constrained or not.
+ ///
+ /// See #100508 #85533 #47511 for additional context
+ struct ConstrainedCollectorPostAstConv {
+ arg_is_constrained: Box<[bool]>,
+ }
+
+ use std::ops::ControlFlow;
+ use ty::Ty;
+ impl<'tcx> TypeVisitor<'tcx> for ConstrainedCollectorPostAstConv {
+ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<!> {
+ match t.kind() {
+ ty::Param(param_ty) => {
+ self.arg_is_constrained[param_ty.index as usize] = true;
+ }
+ ty::Projection(_) => return ControlFlow::Continue(()),
+ _ => (),
+ }
+ t.super_visit_with(self)
+ }
+
+ fn visit_const(&mut self, _: ty::Const<'tcx>) -> ControlFlow<!> {
+ ControlFlow::Continue(())
+ }
+
+ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<!> {
+ debug!("r={:?}", r.kind());
+ if let ty::RegionKind::ReEarlyBound(region) = r.kind() {
+ self.arg_is_constrained[region.index as usize] = true;
+ }
+
+ ControlFlow::Continue(())
+ }
+ }
+
+ struct ConstrainedCollector<'tcx> {
+ tcx: TyCtxt<'tcx>,
regions: FxHashSet<LocalDefId>,
}
- impl<'v> Visitor<'v> for ConstrainedCollector {
+ impl<'v> Visitor<'v> for ConstrainedCollector<'_> {
fn visit_ty(&mut self, ty: &'v hir::Ty<'v>) {
match ty.kind {
hir::TyKind::Path(
@@ -1849,6 +1787,47 @@ fn is_late_bound_map(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<&FxIndexSet<
// (defined above)
}
+ hir::TyKind::Path(hir::QPath::Resolved(
+ None,
+ hir::Path { res: Res::Def(DefKind::TyAlias, alias_def), segments, span },
+ )) => {
+ // See comments on `ConstrainedCollectorPostAstConv` for why this arm does not just consider
+ // substs to be unconstrained.
+ let generics = self.tcx.generics_of(alias_def);
+ let mut walker = ConstrainedCollectorPostAstConv {
+ arg_is_constrained: vec![false; generics.params.len()].into_boxed_slice(),
+ };
+ walker.visit_ty(self.tcx.type_of(alias_def));
+
+ match segments.last() {
+ Some(hir::PathSegment { args: Some(args), .. }) => {
+ let tcx = self.tcx;
+ for constrained_arg in
+ args.args.iter().enumerate().flat_map(|(n, arg)| {
+ match walker.arg_is_constrained.get(n) {
+ Some(true) => Some(arg),
+ Some(false) => None,
+ None => {
+ tcx.sess.delay_span_bug(
+ *span,
+ format!(
+ "Incorrect generic arg count for alias {:?}",
+ alias_def
+ ),
+ );
+ None
+ }
+ }
+ })
+ {
+ self.visit_generic_arg(constrained_arg);
+ }
+ }
+ Some(_) => (),
+ None => bug!("Path with no segments or self type"),
+ }
+ }
+
hir::TyKind::Path(hir::QPath::Resolved(None, ref path)) => {
// consider only the lifetimes on the final
// segment; I am not sure it's even currently
@@ -1867,7 +1846,7 @@ fn is_late_bound_map(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<&FxIndexSet<
}
fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
- if let hir::LifetimeName::Param(def_id, _) = lifetime_ref.name {
+ if let hir::LifetimeName::Param(def_id) = lifetime_ref.res {
self.regions.insert(def_id);
}
}
@@ -1880,7 +1859,7 @@ fn is_late_bound_map(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option<&FxIndexSet<
impl<'v> Visitor<'v> for AllCollector {
fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
- if let hir::LifetimeName::Param(def_id, _) = lifetime_ref.name {
+ if let hir::LifetimeName::Param(def_id) = lifetime_ref.res {
self.regions.insert(def_id);
}
}
diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs
index 2e84e1d01..45e241f4e 100644
--- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs
+++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs
@@ -84,60 +84,30 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
Node::ImplItem(item) => item.generics,
- Node::Item(item) => {
- match item.kind {
- ItemKind::Impl(ref impl_) => {
- if impl_.defaultness.is_default() {
- is_default_impl_trait = tcx.impl_trait_ref(def_id).map(ty::Binder::dummy);
- }
- &impl_.generics
- }
- ItemKind::Fn(.., ref generics, _)
- | ItemKind::TyAlias(_, ref generics)
- | ItemKind::Enum(_, ref generics)
- | ItemKind::Struct(_, ref generics)
- | ItemKind::Union(_, ref generics) => *generics,
-
- ItemKind::Trait(_, _, ref generics, ..) => {
- is_trait = Some(ty::TraitRef::identity(tcx, def_id));
- *generics
+ Node::Item(item) => match item.kind {
+ ItemKind::Impl(ref impl_) => {
+ if impl_.defaultness.is_default() {
+ is_default_impl_trait = tcx.impl_trait_ref(def_id).map(ty::Binder::dummy);
}
- ItemKind::TraitAlias(ref generics, _) => {
- is_trait = Some(ty::TraitRef::identity(tcx, def_id));
- *generics
- }
- ItemKind::OpaqueTy(OpaqueTy {
- origin: hir::OpaqueTyOrigin::AsyncFn(..) | hir::OpaqueTyOrigin::FnReturn(..),
- ..
- }) => {
- // return-position impl trait
- //
- // We don't inherit predicates from the parent here:
- // If we have, say `fn f<'a, T: 'a>() -> impl Sized {}`
- // then the return type is `f::<'static, T>::{{opaque}}`.
- //
- // If we inherited the predicates of `f` then we would
- // require that `T: 'static` to show that the return
- // type is well-formed.
- //
- // The only way to have something with this opaque type
- // is from the return type of the containing function,
- // which will ensure that the function's predicates
- // hold.
- return ty::GenericPredicates { parent: None, predicates: &[] };
- }
- ItemKind::OpaqueTy(OpaqueTy {
- ref generics,
- origin: hir::OpaqueTyOrigin::TyAlias,
- ..
- }) => {
- // type-alias impl trait
- generics
- }
-
- _ => NO_GENERICS,
+ &impl_.generics
}
- }
+ ItemKind::Fn(.., ref generics, _)
+ | ItemKind::TyAlias(_, ref generics)
+ | ItemKind::Enum(_, ref generics)
+ | ItemKind::Struct(_, ref generics)
+ | ItemKind::Union(_, ref generics) => *generics,
+
+ ItemKind::Trait(_, _, ref generics, ..) => {
+ is_trait = Some(ty::TraitRef::identity(tcx, def_id));
+ *generics
+ }
+ ItemKind::TraitAlias(ref generics, _) => {
+ is_trait = Some(ty::TraitRef::identity(tcx, def_id));
+ *generics
+ }
+ ItemKind::OpaqueTy(OpaqueTy { ref generics, .. }) => generics,
+ _ => NO_GENERICS,
+ },
Node::ForeignItem(item) => match item.kind {
ForeignItemKind::Static(..) => NO_GENERICS,
@@ -181,6 +151,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
trace!(?predicates);
trace!(?ast_generics);
+ trace!(?generics);
// Collect the predicates that were written inline by the user on each
// type parameter (e.g., `<T: Foo>`).
@@ -199,7 +170,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
&icx,
&mut bounds,
&[],
- Some((param.hir_id, ast_generics.predicates)),
+ Some((param.def_id, ast_generics.predicates)),
param.span,
);
trace!(?bounds);
@@ -258,12 +229,12 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
predicates.extend(region_pred.bounds.iter().map(|bound| {
let (r2, span) = match bound {
hir::GenericBound::Outlives(lt) => {
- (<dyn AstConv<'_>>::ast_region_to_region(&icx, lt, None), lt.span)
+ (<dyn AstConv<'_>>::ast_region_to_region(&icx, lt, None), lt.ident.span)
}
_ => bug!(),
};
- let pred = ty::Binder::dummy(ty::PredicateKind::RegionOutlives(
- ty::OutlivesPredicate(r1, r2),
+ let pred = ty::Binder::dummy(ty::PredicateKind::Clause(
+ ty::Clause::RegionOutlives(ty::OutlivesPredicate(r1, r2)),
))
.to_predicate(icx.tcx);
@@ -299,6 +270,52 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericP
);
}
+ // Opaque types duplicate some of their generic parameters.
+ // We create bi-directional Outlives predicates between the original
+ // and the duplicated parameter, to ensure that they do not get out of sync.
+ if let Node::Item(&Item { kind: ItemKind::OpaqueTy(..), .. }) = node {
+ let opaque_ty_id = tcx.hir().get_parent_node(hir_id);
+ let opaque_ty_node = tcx.hir().get(opaque_ty_id);
+ let Node::Ty(&Ty { kind: TyKind::OpaqueDef(_, lifetimes, _), .. }) = opaque_ty_node else {
+ bug!("unexpected {opaque_ty_node:?}")
+ };
+ debug!(?lifetimes);
+ for (arg, duplicate) in std::iter::zip(lifetimes, ast_generics.params) {
+ let hir::GenericArg::Lifetime(arg) = arg else { bug!() };
+ let orig_region = <dyn AstConv<'_>>::ast_region_to_region(&icx, &arg, None);
+ if !matches!(orig_region.kind(), ty::ReEarlyBound(..)) {
+ // Only early-bound regions can point to the original generic parameter.
+ continue;
+ }
+
+ let hir::GenericParamKind::Lifetime { .. } = duplicate.kind else { continue };
+ let dup_def = tcx.hir().local_def_id(duplicate.hir_id).to_def_id();
+
+ let Some(dup_index) = generics.param_def_id_to_index(tcx, dup_def) else { bug!() };
+
+ let dup_region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
+ def_id: dup_def,
+ index: dup_index,
+ name: duplicate.name.ident().name,
+ }));
+ predicates.push((
+ ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::RegionOutlives(
+ ty::OutlivesPredicate(orig_region, dup_region),
+ )))
+ .to_predicate(icx.tcx),
+ duplicate.span,
+ ));
+ predicates.push((
+ ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::RegionOutlives(
+ ty::OutlivesPredicate(dup_region, orig_region),
+ )))
+ .to_predicate(icx.tcx),
+ duplicate.span,
+ ));
+ }
+ debug!(?predicates);
+ }
+
ty::GenericPredicates {
parent: generics.parent,
predicates: tcx.arena.alloc_from_iter(predicates),
@@ -316,10 +333,9 @@ fn const_evaluatable_predicates_of<'tcx>(
impl<'tcx> intravisit::Visitor<'tcx> for ConstCollector<'tcx> {
fn visit_anon_const(&mut self, c: &'tcx hir::AnonConst) {
- let def_id = self.tcx.hir().local_def_id(c.hir_id);
- let ct = ty::Const::from_anon_const(self.tcx, def_id);
+ let ct = ty::Const::from_anon_const(self.tcx, c.def_id);
if let ty::ConstKind::Unevaluated(_) = ct.kind() {
- let span = self.tcx.hir().span(c.hir_id);
+ let span = self.tcx.def_span(c.def_id);
self.preds.insert((
ty::Binder::dummy(ty::PredicateKind::ConstEvaluatable(ct))
.to_predicate(self.tcx),
@@ -408,11 +424,13 @@ pub(super) fn explicit_predicates_of<'tcx>(
.iter()
.copied()
.filter(|(pred, _)| match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(tr) => !is_assoc_item_ty(tr.self_ty()),
- ty::PredicateKind::Projection(proj) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(tr)) => !is_assoc_item_ty(tr.self_ty()),
+ ty::PredicateKind::Clause(ty::Clause::Projection(proj)) => {
!is_assoc_item_ty(proj.projection_ty.self_ty())
}
- ty::PredicateKind::TypeOutlives(outlives) => !is_assoc_item_ty(outlives.0),
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(outlives)) => {
+ !is_assoc_item_ty(outlives.0)
+ }
_ => true,
})
.collect();
@@ -427,7 +445,9 @@ pub(super) fn explicit_predicates_of<'tcx>(
} else {
if matches!(def_kind, DefKind::AnonConst) && tcx.lazy_normalization() {
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
- if tcx.hir().opt_const_param_default_param_hir_id(hir_id).is_some() {
+ let parent_def_id = tcx.hir().get_parent_item(hir_id);
+
+ if tcx.hir().opt_const_param_default_param_def_id(hir_id).is_some() {
// In `generics_of` we set the generics' parent to be our parent's parent which means that
// we lose out on the predicates of our actual parent if we dont return those predicates here.
// (See comment in `generics_of` for more information on why the parent shenanigans is necessary)
@@ -439,8 +459,33 @@ pub(super) fn explicit_predicates_of<'tcx>(
// parent of generics returned by `generics_of`
//
// In the above code we want the anon const to have predicates in its param env for `T: Trait`
- let item_def_id = tcx.hir().get_parent_item(hir_id);
- // In the above code example we would be calling `explicit_predicates_of(Foo)` here
+ // and we would be calling `explicit_predicates_of(Foo)` here
+ return tcx.explicit_predicates_of(parent_def_id);
+ }
+
+ let parent_def_kind = tcx.def_kind(parent_def_id);
+ if matches!(parent_def_kind, DefKind::OpaqueTy) {
+ // In `instantiate_identity` we inherit the predicates of our parent.
+ // However, opaque types do not have a parent (see `gather_explicit_predicates_of`), which means
+ // that we lose out on the predicates of our actual parent if we dont return those predicates here.
+ //
+ //
+ // fn foo<T: Trait>() -> impl Iterator<Output = Another<{ <T as Trait>::ASSOC }> > { todo!() }
+ // ^^^^^^^^^^^^^^^^^^^ the def id we are calling
+ // explicit_predicates_of on
+ //
+ // In the above code we want the anon const to have predicates in its param env for `T: Trait`.
+ // However, the anon const cannot inherit predicates from its parent since it's opaque.
+ //
+ // To fix this, we call `explicit_predicates_of` directly on `foo`, the parent's parent.
+
+ // In the above example this is `foo::{opaque#0}` or `impl Iterator`
+ let parent_hir_id = tcx.hir().local_def_id_to_hir_id(parent_def_id.def_id);
+
+ // In the above example this is the function `foo`
+ let item_def_id = tcx.hir().get_parent_item(parent_hir_id);
+
+ // In the above code example we would be calling `explicit_predicates_of(foo)` here
return tcx.explicit_predicates_of(item_def_id);
}
}
@@ -504,7 +549,7 @@ pub(super) fn super_predicates_that_define_assoc_type(
let is_trait_alias = tcx.is_trait_alias(trait_def_id);
let superbounds2 = icx.type_parameter_bounds_in_generics(
generics,
- item.hir_id(),
+ item.owner_id.def_id,
self_param_ty,
OnlySelfBounds(!is_trait_alias),
assoc_name,
@@ -521,7 +566,9 @@ pub(super) fn super_predicates_that_define_assoc_type(
// which will, in turn, reach indirect supertraits.
for &(pred, span) in superbounds {
debug!("superbound: {:?}", pred);
- if let ty::PredicateKind::Trait(bound) = pred.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(bound)) =
+ pred.kind().skip_binder()
+ {
tcx.at(span).super_predicates_of(bound.def_id());
}
}
@@ -614,14 +661,14 @@ pub(super) fn type_param_predicates(
let extra_predicates = extend.into_iter().chain(
icx.type_parameter_bounds_in_generics(
ast_generics,
- param_id,
+ def_id,
ty,
OnlySelfBounds(true),
Some(assoc_name),
)
.into_iter()
.filter(|(predicate, _)| match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(data) => data.self_ty().is_param(index),
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => data.self_ty().is_param(index),
_ => false,
}),
);
@@ -639,13 +686,11 @@ impl<'tcx> ItemCtxt<'tcx> {
fn type_parameter_bounds_in_generics(
&self,
ast_generics: &'tcx hir::Generics<'tcx>,
- param_id: hir::HirId,
+ param_def_id: LocalDefId,
ty: Ty<'tcx>,
only_self_bounds: OnlySelfBounds,
assoc_name: Option<Ident>,
) -> Vec<(ty::Predicate<'tcx>, Span)> {
- let param_def_id = self.tcx.hir().local_def_id(param_id).to_def_id();
- trace!(?param_def_id);
ast_generics
.predicates
.iter()
@@ -654,7 +699,7 @@ impl<'tcx> ItemCtxt<'tcx> {
_ => None,
})
.flat_map(|bp| {
- let bt = if bp.is_param_bound(param_def_id) {
+ let bt = if bp.is_param_bound(param_def_id.to_def_id()) {
Some(ty)
} else if !only_self_bounds.0 {
Some(self.to_ty(bp.bounded_ty))
diff --git a/compiler/rustc_hir_analysis/src/collect/type_of.rs b/compiler/rustc_hir_analysis/src/collect/type_of.rs
index c29a645eb..9bd1715ce 100644
--- a/compiler/rustc_hir_analysis/src/collect/type_of.rs
+++ b/compiler/rustc_hir_analysis/src/collect/type_of.rs
@@ -514,10 +514,10 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> {
}
Node::GenericParam(&GenericParam {
- hir_id: param_hir_id,
+ def_id: param_def_id,
kind: GenericParamKind::Const { default: Some(ct), .. },
..
- }) if ct.hir_id == hir_id => tcx.type_of(tcx.hir().local_def_id(param_hir_id)),
+ }) if ct.hir_id == hir_id => tcx.type_of(param_def_id),
x => tcx.ty_error_with_message(
DUMMY_SP,
@@ -636,9 +636,8 @@ fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: LocalDefId) -> T
self.tcx.hir()
}
fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
- if let hir::ExprKind::Closure { .. } = ex.kind {
- let def_id = self.tcx.hir().local_def_id(ex.hir_id);
- self.check(def_id);
+ if let hir::ExprKind::Closure(closure) = ex.kind {
+ self.check(closure.def_id);
}
intravisit::walk_expr(self, ex);
}
@@ -698,7 +697,7 @@ fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: LocalDefId) -> T
}
let Some(hidden) = locator.found else {
- tcx.sess.emit_err(UnconstrainedOpaqueType {
+ let reported = tcx.sess.emit_err(UnconstrainedOpaqueType {
span: tcx.def_span(def_id),
name: tcx.item_name(tcx.local_parent(def_id).to_def_id()),
what: match tcx.hir().get(scope) {
@@ -708,7 +707,7 @@ fn find_opaque_ty_constraints_for_tait(tcx: TyCtxt<'_>, def_id: LocalDefId) -> T
_ => "item",
},
});
- return tcx.ty_error();
+ return tcx.ty_error_with_guaranteed(reported);
};
// Only check against typeck if we didn't already error
@@ -771,9 +770,8 @@ fn find_opaque_ty_constraints_for_rpit(
self.tcx.hir()
}
fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
- if let hir::ExprKind::Closure { .. } = ex.kind {
- let def_id = self.tcx.hir().local_def_id(ex.hir_id);
- self.check(def_id);
+ if let hir::ExprKind::Closure(closure) = ex.kind {
+ self.check(closure.def_id);
}
intravisit::walk_expr(self, ex);
}
diff --git a/compiler/rustc_hir_analysis/src/constrained_generic_params.rs b/compiler/rustc_hir_analysis/src/constrained_generic_params.rs
index 213b89fc7..b4057df78 100644
--- a/compiler/rustc_hir_analysis/src/constrained_generic_params.rs
+++ b/compiler/rustc_hir_analysis/src/constrained_generic_params.rs
@@ -187,7 +187,8 @@ pub fn setup_constraining_predicates<'tcx>(
for j in i..predicates.len() {
// Note that we don't have to care about binders here,
// as the impl trait ref never contains any late-bound regions.
- if let ty::PredicateKind::Projection(projection) = predicates[j].0.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(projection)) =
+ predicates[j].0.kind().skip_binder()
{
// Special case: watch out for some kind of sneaky attempt
// to project out an associated type defined by this very
diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs
index d5b1a7ce1..c92ab749b 100644
--- a/compiler/rustc_hir_analysis/src/errors.rs
+++ b/compiler/rustc_hir_analysis/src/errors.rs
@@ -43,6 +43,10 @@ pub struct LifetimesOrBoundsMismatchOnTrait {
pub span: Span,
#[label(generics_label)]
pub generics_span: Option<Span>,
+ #[label(where_label)]
+ pub where_span: Option<Span>,
+ #[label(bounds_label)]
+ pub bounds_span: Vec<Span>,
pub item_kind: &'static str,
pub ident: Ident,
}
@@ -120,7 +124,7 @@ pub struct TypeofReservedKeywordUsed<'tcx> {
#[primary_span]
#[label]
pub span: Span,
- #[suggestion_verbose(code = "{ty}")]
+ #[suggestion(style = "verbose", code = "{ty}")]
pub opt_sugg: Option<(Span, Applicability)>,
}
@@ -156,6 +160,7 @@ pub struct MissingTypeParams {
// Manual implementation of `IntoDiagnostic` to be able to call `span_to_snippet`.
impl<'a> IntoDiagnostic<'a> for MissingTypeParams {
+ #[track_caller]
fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
let mut err = handler.struct_span_err_with_code(
self.span,
@@ -238,7 +243,11 @@ pub struct UnusedExternCrate {
#[derive(LintDiagnostic)]
#[diag(hir_analysis_extern_crate_not_idiomatic)]
pub struct ExternCrateNotIdiomatic {
- #[suggestion_short(applicability = "machine-applicable", code = "{suggestion_code}")]
+ #[suggestion(
+ style = "short",
+ applicability = "machine-applicable",
+ code = "{suggestion_code}"
+ )]
pub span: Span,
pub msg_code: String,
pub suggestion_code: String,
@@ -280,3 +289,10 @@ pub struct SelfInImplSelf {
#[note]
pub note: (),
}
+
+#[derive(Diagnostic)]
+#[diag(hir_analysis_linkage_type, code = "E0791")]
+pub(crate) struct LinkageType {
+ #[primary_span]
+ pub span: Span,
+}
diff --git a/compiler/rustc_hir_analysis/src/hir_wf_check.rs b/compiler/rustc_hir_analysis/src/hir_wf_check.rs
index b0fdfcf38..4f9d58265 100644
--- a/compiler/rustc_hir_analysis/src/hir_wf_check.rs
+++ b/compiler/rustc_hir_analysis/src/hir_wf_check.rs
@@ -5,7 +5,7 @@ use rustc_hir::{ForeignItem, ForeignItemKind, HirId};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::{ObligationCause, WellFormedLoc};
use rustc_middle::ty::query::Providers;
-use rustc_middle::ty::{self, Region, ToPredicate, TyCtxt, TypeFoldable, TypeFolder};
+use rustc_middle::ty::{self, Region, TyCtxt, TypeFoldable, TypeFolder};
use rustc_trait_selection::traits;
pub fn provide(providers: &mut Providers) {
@@ -74,10 +74,10 @@ fn diagnostic_hir_wf_check<'tcx>(
let errors = traits::fully_solve_obligation(
&infcx,
traits::Obligation::new(
+ self.tcx,
cause,
self.param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(tcx_ty.into()))
- .to_predicate(self.tcx),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(tcx_ty.into())),
),
);
if !errors.is_empty() {
diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs
index e806e9487..fd8e8ed7b 100644
--- a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs
+++ b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs
@@ -69,6 +69,7 @@ use crate::constrained_generic_params as cgp;
use crate::errors::SubstsOnOverriddenImpl;
use rustc_data_structures::fx::FxHashSet;
+use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::TyCtxtInferExt;
@@ -80,6 +81,7 @@ use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt;
use rustc_trait_selection::traits::outlives_bounds::InferCtxtExt as _;
use rustc_trait_selection::traits::{self, translate_substs, wf, ObligationCtxt};
+use tracing::instrument;
pub(super) fn check_min_specialization(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) {
if let Some(node) = parent_specialization_node(tcx, impl_def_id) {
@@ -103,13 +105,11 @@ fn parent_specialization_node(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId) -> Opti
}
/// Check that `impl1` is a sound specialization
+#[instrument(level = "debug", skip(tcx))]
fn check_always_applicable(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node) {
if let Some((impl1_substs, impl2_substs)) = get_impl_substs(tcx, impl1_def_id, impl2_node) {
let impl2_def_id = impl2_node.def_id();
- debug!(
- "check_always_applicable(\nimpl1_def_id={:?},\nimpl2_def_id={:?},\nimpl2_substs={:?}\n)",
- impl1_def_id, impl2_def_id, impl2_substs
- );
+ debug!(?impl2_def_id, ?impl2_substs);
let parent_substs = if impl2_node.is_from_trait() {
impl2_substs.to_vec()
@@ -118,12 +118,33 @@ fn check_always_applicable(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node
};
let span = tcx.def_span(impl1_def_id);
+ check_constness(tcx, impl1_def_id, impl2_node, span);
check_static_lifetimes(tcx, &parent_substs, span);
check_duplicate_params(tcx, impl1_substs, &parent_substs, span);
check_predicates(tcx, impl1_def_id, impl1_substs, impl2_node, impl2_substs, span);
}
}
+/// Check that the specializing impl `impl1` is at least as const as the base
+/// impl `impl2`
+fn check_constness(tcx: TyCtxt<'_>, impl1_def_id: LocalDefId, impl2_node: Node, span: Span) {
+ if impl2_node.is_from_trait() {
+ // This isn't a specialization
+ return;
+ }
+
+ let impl1_constness = tcx.constness(impl1_def_id.to_def_id());
+ let impl2_constness = tcx.constness(impl2_node.def_id());
+
+ if let hir::Constness::Const = impl2_constness {
+ if let hir::Constness::NotConst = impl1_constness {
+ tcx.sess
+ .struct_span_err(span, "cannot specialize on const impl with non-const impl")
+ .emit();
+ }
+ }
+}
+
/// Given a specializing impl `impl1`, and the base impl `impl2`, returns two
/// substitutions `(S1, S2)` that equate their trait references. The returned
/// types are expressed in terms of the generics of `impl1`.
@@ -155,7 +176,7 @@ fn get_impl_substs<'tcx>(
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- ocx.infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ ocx.infcx.err_ctxt().report_fulfillment_errors(&errors, None);
return None;
}
@@ -193,7 +214,9 @@ fn unconstrained_parent_impl_substs<'tcx>(
// the functions in `cgp` add the constrained parameters to a list of
// unconstrained parameters.
for (predicate, _) in impl_generic_predicates.predicates.iter() {
- if let ty::PredicateKind::Projection(proj) = predicate.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) =
+ predicate.kind().skip_binder()
+ {
let projection_ty = proj.projection_ty;
let projected_ty = proj.term;
@@ -278,15 +301,15 @@ fn check_static_lifetimes<'tcx>(
/// Check whether predicates on the specializing impl (`impl1`) are allowed.
///
-/// Each predicate `P` must be:
+/// Each predicate `P` must be one of:
///
-/// * global (not reference any parameters)
-/// * `T: Tr` predicate where `Tr` is an always-applicable trait
-/// * on the base `impl impl2`
-/// * Currently this check is done using syntactic equality, which is
-/// conservative but generally sufficient.
-/// * a well-formed predicate of a type argument of the trait being implemented,
+/// * Global (not reference any parameters).
+/// * A `T: Tr` predicate where `Tr` is an always-applicable trait.
+/// * Present on the base impl `impl2`.
+/// * This check is done using the `trait_predicates_eq` function below.
+/// * A well-formed predicate of a type argument of the trait being implemented,
/// including the `Self`-type.
+#[instrument(level = "debug", skip(tcx))]
fn check_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
impl1_def_id: LocalDefId,
@@ -322,10 +345,7 @@ fn check_predicates<'tcx>(
.map(|obligation| obligation.predicate)
.collect()
};
- debug!(
- "check_always_applicable(\nimpl1_predicates={:?},\nimpl2_predicates={:?}\n)",
- impl1_predicates, impl2_predicates,
- );
+ debug!(?impl1_predicates, ?impl2_predicates);
// Since impls of always applicable traits don't get to assume anything, we
// can also assume their supertraits apply.
@@ -373,25 +393,90 @@ fn check_predicates<'tcx>(
);
for (predicate, span) in impl1_predicates {
- if !impl2_predicates.contains(&predicate) {
+ if !impl2_predicates.iter().any(|pred2| trait_predicates_eq(tcx, predicate, *pred2, span)) {
check_specialization_on(tcx, predicate, span)
}
}
}
+/// Checks if some predicate on the specializing impl (`predicate1`) is the same
+/// as some predicate on the base impl (`predicate2`).
+///
+/// This basically just checks syntactic equivalence, but is a little more
+/// forgiving since we want to equate `T: Tr` with `T: ~const Tr` so this can work:
+///
+/// ```ignore (illustrative)
+/// #[rustc_specialization_trait]
+/// trait Specialize { }
+///
+/// impl<T: Bound> Tr for T { }
+/// impl<T: ~const Bound + Specialize> const Tr for T { }
+/// ```
+///
+/// However, we *don't* want to allow the reverse, i.e., when the bound on the
+/// specializing impl is not as const as the bound on the base impl:
+///
+/// ```ignore (illustrative)
+/// impl<T: ~const Bound> const Tr for T { }
+/// impl<T: Bound + Specialize> const Tr for T { } // should be T: ~const Bound
+/// ```
+///
+/// So we make that check in this function and try to raise a helpful error message.
+fn trait_predicates_eq<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ predicate1: ty::Predicate<'tcx>,
+ predicate2: ty::Predicate<'tcx>,
+ span: Span,
+) -> bool {
+ let pred1_kind = predicate1.kind().skip_binder();
+ let pred2_kind = predicate2.kind().skip_binder();
+ let (trait_pred1, trait_pred2) = match (pred1_kind, pred2_kind) {
+ (
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred1)),
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred2)),
+ ) => (pred1, pred2),
+ // Just use plain syntactic equivalence if either of the predicates aren't
+ // trait predicates or have bound vars.
+ _ => return predicate1 == predicate2,
+ };
+
+ let predicates_equal_modulo_constness = {
+ let pred1_unconsted =
+ ty::TraitPredicate { constness: ty::BoundConstness::NotConst, ..trait_pred1 };
+ let pred2_unconsted =
+ ty::TraitPredicate { constness: ty::BoundConstness::NotConst, ..trait_pred2 };
+ pred1_unconsted == pred2_unconsted
+ };
+
+ if !predicates_equal_modulo_constness {
+ return false;
+ }
+
+ // Check that the predicate on the specializing impl is at least as const as
+ // the one on the base.
+ match (trait_pred2.constness, trait_pred1.constness) {
+ (ty::BoundConstness::ConstIfConst, ty::BoundConstness::NotConst) => {
+ tcx.sess.struct_span_err(span, "missing `~const` qualifier for specialization").emit();
+ }
+ _ => {}
+ }
+
+ true
+}
+
+#[instrument(level = "debug", skip(tcx))]
fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, predicate: ty::Predicate<'tcx>, span: Span) {
- debug!("can_specialize_on(predicate = {:?})", predicate);
match predicate.kind().skip_binder() {
// Global predicates are either always true or always false, so we
// are fine to specialize on.
_ if predicate.is_global() => (),
// We allow specializing on explicitly marked traits with no associated
// items.
- ty::PredicateKind::Trait(ty::TraitPredicate {
+ ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
trait_ref,
- constness: ty::BoundConstness::NotConst,
+ constness: _,
polarity: _,
- }) => {
+ })) => {
if !matches!(
trait_predicate_kind(tcx, predicate),
Some(TraitSpecializationKind::Marker)
@@ -407,7 +492,10 @@ fn check_specialization_on<'tcx>(tcx: TyCtxt<'tcx>, predicate: ty::Predicate<'tc
.emit();
}
}
- ty::PredicateKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(ty::ProjectionPredicate {
+ projection_ty,
+ term,
+ })) => {
tcx.sess
.struct_span_err(
span,
@@ -428,12 +516,14 @@ fn trait_predicate_kind<'tcx>(
predicate: ty::Predicate<'tcx>,
) -> Option<TraitSpecializationKind> {
match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(ty::TraitPredicate { trait_ref, constness: _, polarity: _ }) => {
- Some(tcx.trait_def(trait_ref.def_id).specialization_kind)
- }
- ty::PredicateKind::RegionOutlives(_)
- | ty::PredicateKind::TypeOutlives(_)
- | ty::PredicateKind::Projection(_)
+ ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
+ trait_ref,
+ constness: _,
+ polarity: _,
+ })) => Some(tcx.trait_def(trait_ref.def_id).specialization_kind),
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(_))
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(_))
+ | ty::PredicateKind::Clause(ty::Clause::Projection(_))
| ty::PredicateKind::WellFormed(_)
| ty::PredicateKind::Subtype(_)
| ty::PredicateKind::Coerce(_)
@@ -441,6 +531,7 @@ fn trait_predicate_kind<'tcx>(
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs
index 525cd2419..2058832d5 100644
--- a/compiler/rustc_hir_analysis/src/lib.rs
+++ b/compiler/rustc_hir_analysis/src/lib.rs
@@ -106,7 +106,7 @@ use rustc_middle::middle;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::util;
-use rustc_session::config::EntryFnType;
+use rustc_session::{config::EntryFnType, parse::feature_err};
use rustc_span::{symbol::sym, Span, DUMMY_SP};
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
@@ -118,20 +118,40 @@ use astconv::AstConv;
use bounds::Bounds;
fn require_c_abi_if_c_variadic(tcx: TyCtxt<'_>, decl: &hir::FnDecl<'_>, abi: Abi, span: Span) {
- match (decl.c_variadic, abi) {
- // The function has the correct calling convention, or isn't a "C-variadic" function.
- (false, _) | (true, Abi::C { .. }) | (true, Abi::Cdecl { .. }) => {}
- // The function is a "C-variadic" function with an incorrect calling convention.
- (true, _) => {
- let mut err = struct_span_err!(
- tcx.sess,
+ const ERROR_HEAD: &str = "C-variadic function must have a compatible calling convention";
+ const CONVENTIONS_UNSTABLE: &str = "`C`, `cdecl`, `win64`, `sysv64` or `efiapi`";
+ const CONVENTIONS_STABLE: &str = "`C` or `cdecl`";
+ const UNSTABLE_EXPLAIN: &str =
+ "using calling conventions other than `C` or `cdecl` for varargs functions is unstable";
+
+ if !decl.c_variadic || matches!(abi, Abi::C { .. } | Abi::Cdecl { .. }) {
+ return;
+ }
+
+ let extended_abi_support = tcx.features().extended_varargs_abi_support;
+ let conventions = match (extended_abi_support, abi.supports_varargs()) {
+ // User enabled additional ABI support for varargs and function ABI matches those ones.
+ (true, true) => return,
+
+ // Using this ABI would be ok, if the feature for additional ABI support was enabled.
+ // Return CONVENTIONS_STABLE, because we want the other error to look the same.
+ (false, true) => {
+ feature_err(
+ &tcx.sess.parse_sess,
+ sym::extended_varargs_abi_support,
span,
- E0045,
- "C-variadic function must have C or cdecl calling convention"
- );
- err.span_label(span, "C-variadics require C or cdecl calling convention").emit();
+ UNSTABLE_EXPLAIN,
+ )
+ .emit();
+ CONVENTIONS_STABLE
}
- }
+
+ (false, false) => CONVENTIONS_STABLE,
+ (true, false) => CONVENTIONS_UNSTABLE,
+ };
+
+ let mut err = struct_span_err!(tcx.sess, span, E0045, "{}, like {}", ERROR_HEAD, conventions);
+ err.span_label(span, ERROR_HEAD).emit();
}
fn require_same_types<'tcx>(
@@ -153,7 +173,7 @@ fn require_same_types<'tcx>(
match &errors[..] {
[] => true,
errors => {
- infcx.err_ctxt().report_fulfillment_errors(errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(errors, None);
false
}
}
@@ -312,11 +332,11 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) {
ObligationCauseCode::MainFunctionType,
);
let ocx = traits::ObligationCtxt::new(&infcx);
- let norm_return_ty = ocx.normalize(cause.clone(), param_env, return_ty);
+ let norm_return_ty = ocx.normalize(&cause, param_env, return_ty);
ocx.register_bound(cause, param_env, norm_return_ty, term_did);
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
error = true;
}
// now we can take the return type of the given main function
diff --git a/compiler/rustc_hir_analysis/src/outlives/explicit.rs b/compiler/rustc_hir_analysis/src/outlives/explicit.rs
index 7534482cc..663f1c49d 100644
--- a/compiler/rustc_hir_analysis/src/outlives/explicit.rs
+++ b/compiler/rustc_hir_analysis/src/outlives/explicit.rs
@@ -30,28 +30,30 @@ impl<'tcx> ExplicitPredicatesMap<'tcx> {
// process predicates and convert to `RequiredPredicates` entry, see below
for &(predicate, span) in predicates.predicates {
match predicate.kind().skip_binder() {
- ty::PredicateKind::TypeOutlives(OutlivesPredicate(ty, reg)) => {
- insert_outlives_predicate(
- tcx,
- ty.into(),
- reg,
- span,
- &mut required_predicates,
- )
- }
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(OutlivesPredicate(
+ ty,
+ reg,
+ ))) => insert_outlives_predicate(
+ tcx,
+ ty.into(),
+ reg,
+ span,
+ &mut required_predicates,
+ ),
- ty::PredicateKind::RegionOutlives(OutlivesPredicate(reg1, reg2)) => {
- insert_outlives_predicate(
- tcx,
- reg1.into(),
- reg2,
- span,
- &mut required_predicates,
- )
- }
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(OutlivesPredicate(
+ reg1,
+ reg2,
+ ))) => insert_outlives_predicate(
+ tcx,
+ reg1.into(),
+ reg2,
+ span,
+ &mut required_predicates,
+ ),
- ty::PredicateKind::Trait(..)
- | ty::PredicateKind::Projection(..)
+ ty::PredicateKind::Clause(ty::Clause::Trait(..))
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
@@ -59,6 +61,7 @@ impl<'tcx> ExplicitPredicatesMap<'tcx> {
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => (),
}
}
diff --git a/compiler/rustc_hir_analysis/src/outlives/mod.rs b/compiler/rustc_hir_analysis/src/outlives/mod.rs
index e50c26765..81fe32000 100644
--- a/compiler/rustc_hir_analysis/src/outlives/mod.rs
+++ b/compiler/rustc_hir_analysis/src/outlives/mod.rs
@@ -3,7 +3,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::subst::GenericArgKind;
-use rustc_middle::ty::{self, CratePredicatesMap, ToPredicate, TyCtxt};
+use rustc_middle::ty::{self, CratePredicatesMap, TyCtxt};
use rustc_span::symbol::sym;
use rustc_span::Span;
@@ -17,12 +17,12 @@ pub fn provide(providers: &mut Providers) {
*providers = Providers { inferred_outlives_of, inferred_outlives_crate, ..*providers };
}
-fn inferred_outlives_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[(ty::Predicate<'_>, Span)] {
+fn inferred_outlives_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[(ty::Clause<'_>, Span)] {
let id = tcx.hir().local_def_id_to_hir_id(item_def_id.expect_local());
if matches!(tcx.def_kind(item_def_id), hir::def::DefKind::AnonConst) && tcx.lazy_normalization()
{
- if tcx.hir().opt_const_param_default_param_hir_id(id).is_some() {
+ if tcx.hir().opt_const_param_default_param_def_id(id).is_some() {
// In `generics_of` we set the generics' parent to be our parent's parent which means that
// we lose out on the predicates of our actual parent if we dont return those predicates here.
// (See comment in `generics_of` for more information on why the parent shenanigans is necessary)
@@ -50,10 +50,10 @@ fn inferred_outlives_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[(ty::Predicate
if tcx.has_attr(item_def_id, sym::rustc_outlives) {
let mut pred: Vec<String> = predicates
.iter()
- .map(|(out_pred, _)| match out_pred.kind().skip_binder() {
- ty::PredicateKind::RegionOutlives(p) => p.to_string(),
- ty::PredicateKind::TypeOutlives(p) => p.to_string(),
- err => bug!("unexpected predicate {:?}", err),
+ .map(|(out_pred, _)| match out_pred {
+ ty::Clause::RegionOutlives(p) => p.to_string(),
+ ty::Clause::TypeOutlives(p) => p.to_string(),
+ err => bug!("unexpected clause {:?}", err),
})
.collect();
pred.sort();
@@ -101,17 +101,11 @@ fn inferred_outlives_crate(tcx: TyCtxt<'_>, (): ()) -> CratePredicatesMap<'_> {
|(ty::OutlivesPredicate(kind1, region2), &span)| {
match kind1.unpack() {
GenericArgKind::Type(ty1) => Some((
- ty::Binder::dummy(ty::PredicateKind::TypeOutlives(
- ty::OutlivesPredicate(ty1, *region2),
- ))
- .to_predicate(tcx),
+ ty::Clause::TypeOutlives(ty::OutlivesPredicate(ty1, *region2)),
span,
)),
GenericArgKind::Lifetime(region1) => Some((
- ty::Binder::dummy(ty::PredicateKind::RegionOutlives(
- ty::OutlivesPredicate(region1, *region2),
- ))
- .to_predicate(tcx),
+ ty::Clause::RegionOutlives(ty::OutlivesPredicate(region1, *region2)),
span,
)),
GenericArgKind::Const(_) => {
diff --git a/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs b/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs
index 435912464..4451db19f 100644
--- a/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs
+++ b/compiler/rustc_hir_analysis/src/structured_errors/wrong_number_of_generic_args.rs
@@ -296,25 +296,35 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
) -> String {
debug!(?path_hir_id);
+ // If there was already a lifetime among the arguments, just replicate that one.
+ if let Some(lt) = self.gen_args.args.iter().find_map(|arg| match arg {
+ hir::GenericArg::Lifetime(lt) => Some(lt),
+ _ => None,
+ }) {
+ return std::iter::repeat(lt.to_string())
+ .take(num_params_to_take)
+ .collect::<Vec<_>>()
+ .join(", ");
+ }
+
let mut ret = Vec::new();
+ let mut ty_id = None;
for (id, node) in self.tcx.hir().parent_iter(path_hir_id) {
debug!(?id);
- let params = if let Some(generics) = node.generics() {
- generics.params
- } else if let hir::Node::Ty(ty) = node
- && let hir::TyKind::BareFn(bare_fn) = ty.kind
- {
- bare_fn.generic_params
- } else {
- &[]
- };
- ret.extend(params.iter().filter_map(|p| {
- let hir::GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Explicit }
- = p.kind
- else { return None };
- let hir::ParamName::Plain(name) = p.name else { return None };
- Some(name.to_string())
- }));
+ if let hir::Node::Ty(_) = node {
+ ty_id = Some(id);
+ }
+
+ // Suggest `'_` when in function parameter or elided function return.
+ if let Some(fn_decl) = node.fn_decl() && let Some(ty_id) = ty_id {
+ let in_arg = fn_decl.inputs.iter().any(|t| t.hir_id == ty_id);
+ let in_ret = matches!(fn_decl.output, hir::FnRetTy::Return(ty) if ty.hir_id == ty_id);
+
+ if in_arg || (in_ret && fn_decl.lifetime_elision_allowed) {
+ return std::iter::repeat("'_".to_owned()).take(num_params_to_take).collect::<Vec<_>>().join(", ");
+ }
+ }
+
// Suggest `'static` when in const/static item-like.
if let hir::Node::Item(hir::Item {
kind: hir::ItemKind::Static { .. } | hir::ItemKind::Const { .. },
@@ -334,11 +344,29 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
})
| hir::Node::AnonConst(..) = node
{
- ret.extend(
- std::iter::repeat("'static".to_owned())
- .take(num_params_to_take.saturating_sub(ret.len())),
- );
+ return std::iter::repeat("'static".to_owned())
+ .take(num_params_to_take.saturating_sub(ret.len()))
+ .collect::<Vec<_>>()
+ .join(", ");
}
+
+ let params = if let Some(generics) = node.generics() {
+ generics.params
+ } else if let hir::Node::Ty(ty) = node
+ && let hir::TyKind::BareFn(bare_fn) = ty.kind
+ {
+ bare_fn.generic_params
+ } else {
+ &[]
+ };
+ ret.extend(params.iter().filter_map(|p| {
+ let hir::GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Explicit }
+ = p.kind
+ else { return None };
+ let hir::ParamName::Plain(name) = p.name else { return None };
+ Some(name.to_string())
+ }));
+
if ret.len() >= num_params_to_take {
return ret[..num_params_to_take].join(", ");
}
@@ -728,7 +756,8 @@ impl<'a, 'tcx> WrongNumberOfGenericArgs<'a, 'tcx> {
&& let Some(trait_path_segment) = path.segments.get(0) {
let num_generic_args_supplied_to_trait = trait_path_segment.args().num_generic_params();
- if num_assoc_fn_excess_args == num_trait_generics_except_self - num_generic_args_supplied_to_trait {
+ if num_generic_args_supplied_to_trait + num_assoc_fn_excess_args == num_trait_generics_except_self
+ {
if let Some(span) = self.gen_args.span_ext()
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
let sugg = vec![
diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs
index eaf0310d5..6ce0c18bf 100644
--- a/compiler/rustc_hir_analysis/src/variance/constraints.rs
+++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs
@@ -72,8 +72,8 @@ pub fn add_constraints_from_crate<'a, 'tcx>(
let adt = tcx.adt_def(def_id);
for variant in adt.variants() {
- if let Some(ctor) = variant.ctor_def_id {
- constraint_cx.build_constraints_for_item(ctor.expect_local());
+ if let Some(ctor_def_id) = variant.ctor_def_id() {
+ constraint_cx.build_constraints_for_item(ctor_def_id.expect_local());
}
}
}
diff --git a/compiler/rustc_hir_analysis/src/variance/mod.rs b/compiler/rustc_hir_analysis/src/variance/mod.rs
index 82103c5a0..8b2719c2f 100644
--- a/compiler/rustc_hir_analysis/src/variance/mod.rs
+++ b/compiler/rustc_hir_analysis/src/variance/mod.rs
@@ -5,9 +5,11 @@
use rustc_arena::DroplessArena;
use rustc_hir::def::DefKind;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::ty::query::Providers;
-use rustc_middle::ty::{self, CrateVariancesMap, TyCtxt};
+use rustc_middle::ty::{self, CrateVariancesMap, SubstsRef, Ty, TyCtxt};
+use rustc_middle::ty::{DefIdTree, TypeSuperVisitable, TypeVisitable};
+use std::ops::ControlFlow;
/// Defines the `TermsContext` basically houses an arena where we can
/// allocate terms.
@@ -50,6 +52,9 @@ fn variances_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[ty::Variance] {
| DefKind::Union
| DefKind::Variant
| DefKind::Ctor(..) => {}
+ DefKind::OpaqueTy | DefKind::ImplTraitPlaceholder => {
+ return variance_of_opaque(tcx, item_def_id.expect_local());
+ }
_ => {
// Variance not relevant.
span_bug!(tcx.def_span(item_def_id), "asked to compute variance for wrong kind of item")
@@ -61,3 +66,125 @@ fn variances_of(tcx: TyCtxt<'_>, item_def_id: DefId) -> &[ty::Variance] {
let crate_map = tcx.crate_variances(());
crate_map.variances.get(&item_def_id).copied().unwrap_or(&[])
}
+
+#[instrument(level = "trace", skip(tcx), ret)]
+fn variance_of_opaque(tcx: TyCtxt<'_>, item_def_id: LocalDefId) -> &[ty::Variance] {
+ let generics = tcx.generics_of(item_def_id);
+
+ // Opaque types may only use regions that are bound. So for
+ // ```rust
+ // type Foo<'a, 'b, 'c> = impl Trait<'a> + 'b;
+ // ```
+ // we may not use `'c` in the hidden type.
+ struct OpaqueTypeLifetimeCollector<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ root_def_id: DefId,
+ variances: Vec<ty::Variance>,
+ }
+
+ impl<'tcx> OpaqueTypeLifetimeCollector<'tcx> {
+ #[instrument(level = "trace", skip(self), ret)]
+ fn visit_opaque(&mut self, def_id: DefId, substs: SubstsRef<'tcx>) -> ControlFlow<!> {
+ if def_id != self.root_def_id && self.tcx.is_descendant_of(def_id, self.root_def_id) {
+ let child_variances = self.tcx.variances_of(def_id);
+ for (a, v) in substs.iter().zip(child_variances) {
+ if *v != ty::Bivariant {
+ a.visit_with(self)?;
+ }
+ }
+ ControlFlow::CONTINUE
+ } else {
+ substs.visit_with(self)
+ }
+ }
+ }
+
+ impl<'tcx> ty::TypeVisitor<'tcx> for OpaqueTypeLifetimeCollector<'tcx> {
+ #[instrument(level = "trace", skip(self), ret)]
+ fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
+ if let ty::RegionKind::ReEarlyBound(ebr) = r.kind() {
+ self.variances[ebr.index as usize] = ty::Invariant;
+ }
+ r.super_visit_with(self)
+ }
+
+ #[instrument(level = "trace", skip(self), ret)]
+ fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
+ match t.kind() {
+ ty::Opaque(def_id, substs) => self.visit_opaque(*def_id, substs),
+ ty::Projection(proj)
+ if self.tcx.def_kind(proj.item_def_id) == DefKind::ImplTraitPlaceholder =>
+ {
+ self.visit_opaque(proj.item_def_id, proj.substs)
+ }
+ _ => t.super_visit_with(self),
+ }
+ }
+ }
+
+ // By default, RPIT are invariant wrt type and const generics, but they are bivariant wrt
+ // lifetime generics.
+ let mut variances: Vec<_> = std::iter::repeat(ty::Invariant).take(generics.count()).collect();
+
+ // Mark all lifetimes from parent generics as unused (Bivariant).
+ // This will be overridden later if required.
+ {
+ let mut generics = generics;
+ while let Some(def_id) = generics.parent {
+ generics = tcx.generics_of(def_id);
+ for param in &generics.params {
+ match param.kind {
+ ty::GenericParamDefKind::Lifetime => {
+ variances[param.index as usize] = ty::Bivariant;
+ }
+ ty::GenericParamDefKind::Type { .. }
+ | ty::GenericParamDefKind::Const { .. } => {}
+ }
+ }
+ }
+ }
+
+ let mut collector =
+ OpaqueTypeLifetimeCollector { tcx, root_def_id: item_def_id.to_def_id(), variances };
+ let id_substs = ty::InternalSubsts::identity_for_item(tcx, item_def_id.to_def_id());
+ for pred in tcx.bound_explicit_item_bounds(item_def_id.to_def_id()).transpose_iter() {
+ let pred = pred.map_bound(|(pred, _)| *pred).subst(tcx, id_substs);
+ debug!(?pred);
+
+ // We only ignore opaque type substs if the opaque type is the outermost type.
+ // The opaque type may be nested within itself via recursion in e.g.
+ // type Foo<'a> = impl PartialEq<Foo<'a>>;
+ // which thus mentions `'a` and should thus accept hidden types that borrow 'a
+ // instead of requiring an additional `+ 'a`.
+ match pred.kind().skip_binder() {
+ ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
+ trait_ref: ty::TraitRef { def_id: _, substs },
+ constness: _,
+ polarity: _,
+ })) => {
+ for subst in &substs[1..] {
+ subst.visit_with(&mut collector);
+ }
+ }
+ ty::PredicateKind::Clause(ty::Clause::Projection(ty::ProjectionPredicate {
+ projection_ty: ty::ProjectionTy { substs, item_def_id: _ },
+ term,
+ })) => {
+ for subst in &substs[1..] {
+ subst.visit_with(&mut collector);
+ }
+ term.visit_with(&mut collector);
+ }
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ _,
+ region,
+ ))) => {
+ region.visit_with(&mut collector);
+ }
+ _ => {
+ pred.visit_with(&mut collector);
+ }
+ }
+ }
+ tcx.arena.alloc_from_iter(collector.variances.into_iter())
+}
diff --git a/compiler/rustc_hir_analysis/src/variance/terms.rs b/compiler/rustc_hir_analysis/src/variance/terms.rs
index 1f763011e..3b286bb9c 100644
--- a/compiler/rustc_hir_analysis/src/variance/terms.rs
+++ b/compiler/rustc_hir_analysis/src/variance/terms.rs
@@ -42,22 +42,22 @@ impl<'a> fmt::Debug for VarianceTerm<'a> {
}
}
-// The first pass over the crate simply builds up the set of inferreds.
+/// The first pass over the crate simply builds up the set of inferreds.
pub struct TermsContext<'a, 'tcx> {
pub tcx: TyCtxt<'tcx>,
pub arena: &'a DroplessArena,
- // For marker types, UnsafeCell, and other lang items where
- // variance is hardcoded, records the item-id and the hardcoded
- // variance.
+ /// For marker types, `UnsafeCell`, and other lang items where
+ /// variance is hardcoded, records the item-id and the hardcoded
+ /// variance.
pub lang_items: Vec<(LocalDefId, Vec<ty::Variance>)>,
- // Maps from the node id of an item to the first inferred index
- // used for its type & region parameters.
+ /// Maps from the node id of an item to the first inferred index
+ /// used for its type & region parameters.
pub inferred_starts: LocalDefIdMap<InferredIndex>,
- // Maps from an InferredIndex to the term for that variable.
+ /// Maps from an InferredIndex to the term for that variable.
pub inferred_terms: Vec<VarianceTermPtr<'a>>,
}
@@ -91,8 +91,8 @@ pub fn determine_parameters_to_be_inferred<'a, 'tcx>(
let adt = tcx.adt_def(def_id);
for variant in adt.variants() {
- if let Some(ctor) = variant.ctor_def_id {
- terms_cx.add_inferreds_for_item(ctor.expect_local());
+ if let Some(ctor_def_id) = variant.ctor_def_id() {
+ terms_cx.add_inferreds_for_item(ctor_def_id.expect_local());
}
}
}
diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs
index da27554a2..94bab9f33 100644
--- a/compiler/rustc_hir_pretty/src/lib.rs
+++ b/compiler/rustc_hir_pretty/src/lib.rs
@@ -398,7 +398,7 @@ impl<'a> State<'a> {
}
hir::ForeignItemKind::Static(t, m) => {
self.head("static");
- if m == hir::Mutability::Mut {
+ if m.is_mut() {
self.word_space("mut");
}
self.print_ident(item.ident);
@@ -519,7 +519,7 @@ impl<'a> State<'a> {
}
hir::ItemKind::Static(ty, m, expr) => {
self.head("static");
- if m == hir::Mutability::Mut {
+ if m.is_mut() {
self.word_space("mut");
}
self.print_ident(item.ident);
@@ -695,19 +695,8 @@ impl<'a> State<'a> {
self.head("trait");
self.print_ident(item.ident);
self.print_generic_params(generics.params);
- let mut real_bounds = Vec::with_capacity(bounds.len());
- // FIXME(durka) this seems to be some quite outdated syntax
- for b in bounds {
- if let GenericBound::Trait(ptr, hir::TraitBoundModifier::Maybe) = b {
- self.space();
- self.word_space("for ?");
- self.print_trait_ref(&ptr.trait_ref);
- } else {
- real_bounds.push(b);
- }
- }
self.nbsp();
- self.print_bounds("=", real_bounds);
+ self.print_bounds("=", bounds);
self.print_where_clause(generics);
self.word(";");
self.end(); // end inner head-block
@@ -754,7 +743,7 @@ impl<'a> State<'a> {
for v in variants {
self.space_if_not_bol();
self.maybe_print_comment(v.span.lo());
- self.print_outer_attributes(self.attrs(v.id));
+ self.print_outer_attributes(self.attrs(v.hir_id));
self.ibox(INDENT_UNIT);
self.print_variant(v);
self.word(",");
@@ -1480,7 +1469,9 @@ impl<'a> State<'a> {
fn_decl,
body,
fn_decl_span: _,
+ fn_arg_span: _,
movability: _,
+ def_id: _,
}) => {
self.print_closure_binder(binder, bound_generic_params);
self.print_capture_clause(capture_clause);
@@ -1590,7 +1581,7 @@ impl<'a> State<'a> {
self.print_ident(Ident::with_dummy_span(name))
}
- pub fn print_path(&mut self, path: &hir::Path<'_>, colons_before_params: bool) {
+ pub fn print_path<R>(&mut self, path: &hir::Path<'_, R>, colons_before_params: bool) {
self.maybe_print_comment(path.span.lo());
for (i, segment) in path.segments.iter().enumerate() {
@@ -2158,7 +2149,7 @@ impl<'a> State<'a> {
}
pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) {
- self.print_ident(lifetime.name.ident())
+ self.print_ident(lifetime.ident)
}
pub fn print_where_clause(&mut self, generics: &hir::Generics<'_>) {
diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs
index 2b15d4dcd..e25a9e903 100644
--- a/compiler/rustc_hir_typeck/src/_match.rs
+++ b/compiler/rustc_hir_typeck/src/_match.rs
@@ -1,10 +1,10 @@
use crate::coercion::{AsCoercionSite, CoerceMany};
use crate::{Diverges, Expectation, FnCtxt, Needs};
-use rustc_errors::{Applicability, MultiSpan};
+use rustc_errors::{Applicability, Diagnostic, MultiSpan};
use rustc_hir::{self as hir, ExprKind};
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::traits::Obligation;
-use rustc_middle::ty::{self, ToPredicate, Ty};
+use rustc_middle::ty::{self, Ty};
use rustc_span::Span;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
use rustc_trait_selection::traits::{
@@ -137,55 +137,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Some(&arm.body),
arm_ty,
Some(&mut |err| {
- let Some(ret) = self
- .tcx
- .hir()
- .find_by_def_id(self.body_id.owner.def_id)
- .and_then(|owner| owner.fn_decl())
- .map(|decl| decl.output.span())
- else { return; };
- let Expectation::IsLast(stmt) = orig_expected else {
- return
- };
- let can_coerce_to_return_ty = match self.ret_coercion.as_ref() {
- Some(ret_coercion) if self.in_tail_expr => {
- let ret_ty = ret_coercion.borrow().expected_ty();
- let ret_ty = self.inh.infcx.shallow_resolve(ret_ty);
- self.can_coerce(arm_ty, ret_ty)
- && prior_arm.map_or(true, |(_, t, _)| self.can_coerce(t, ret_ty))
- // The match arms need to unify for the case of `impl Trait`.
- && !matches!(ret_ty.kind(), ty::Opaque(..))
- }
- _ => false,
- };
- if !can_coerce_to_return_ty {
- return;
- }
-
- let semi_span = expr.span.shrink_to_hi().with_hi(stmt.hi());
- let mut ret_span: MultiSpan = semi_span.into();
- ret_span.push_span_label(
- expr.span,
- "this could be implicitly returned but it is a statement, not a \
- tail expression",
- );
- ret_span
- .push_span_label(ret, "the `match` arms can conform to this return type");
- ret_span.push_span_label(
- semi_span,
- "the `match` is a statement because of this semicolon, consider \
- removing it",
- );
- err.span_note(
- ret_span,
- "you might have meant to return the `match` expression",
- );
- err.tool_only_span_suggestion(
- semi_span,
- "remove this semicolon",
- "",
- Applicability::MaybeIncorrect,
- );
+ self.suggest_removing_semicolon_for_coerce(
+ err,
+ expr,
+ orig_expected,
+ arm_ty,
+ prior_arm,
+ )
}),
false,
);
@@ -219,6 +177,71 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
coercion.complete(self)
}
+ fn suggest_removing_semicolon_for_coerce(
+ &self,
+ diag: &mut Diagnostic,
+ expr: &hir::Expr<'tcx>,
+ expectation: Expectation<'tcx>,
+ arm_ty: Ty<'tcx>,
+ prior_arm: Option<(Option<hir::HirId>, Ty<'tcx>, Span)>,
+ ) {
+ let hir = self.tcx.hir();
+
+ // First, check that we're actually in the tail of a function.
+ let hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Block(block, _), .. }) =
+ hir.get(self.body_id) else { return; };
+ let Some(hir::Stmt { kind: hir::StmtKind::Semi(last_expr), .. })
+ = block.innermost_block().stmts.last() else { return; };
+ if last_expr.hir_id != expr.hir_id {
+ return;
+ }
+
+ // Next, make sure that we have no type expectation.
+ let Some(ret) = hir
+ .find_by_def_id(self.body_id.owner.def_id)
+ .and_then(|owner| owner.fn_decl())
+ .map(|decl| decl.output.span()) else { return; };
+ let Expectation::IsLast(stmt) = expectation else {
+ return;
+ };
+
+ let can_coerce_to_return_ty = match self.ret_coercion.as_ref() {
+ Some(ret_coercion) => {
+ let ret_ty = ret_coercion.borrow().expected_ty();
+ let ret_ty = self.inh.infcx.shallow_resolve(ret_ty);
+ self.can_coerce(arm_ty, ret_ty)
+ && prior_arm.map_or(true, |(_, ty, _)| self.can_coerce(ty, ret_ty))
+ // The match arms need to unify for the case of `impl Trait`.
+ && !matches!(ret_ty.kind(), ty::Opaque(..))
+ }
+ _ => false,
+ };
+ if !can_coerce_to_return_ty {
+ return;
+ }
+
+ let semi_span = expr.span.shrink_to_hi().with_hi(stmt.hi());
+ let mut ret_span: MultiSpan = semi_span.into();
+ ret_span.push_span_label(
+ expr.span,
+ "this could be implicitly returned but it is a statement, not a \
+ tail expression",
+ );
+ ret_span.push_span_label(ret, "the `match` arms can conform to this return type");
+ ret_span.push_span_label(
+ semi_span,
+ "the `match` is a statement because of this semicolon, consider \
+ removing it",
+ );
+ diag.span_note(ret_span, "you might have meant to return the `match` expression");
+ diag.tool_only_span_suggestion(
+ semi_span,
+ "remove this semicolon",
+ "",
+ Applicability::MaybeIncorrect,
+ );
+ }
+
/// When the previously checked expression (the scrutinee) diverges,
/// warn the user about the match arms being unreachable.
fn warn_arms_when_scrutinee_diverges(&self, arms: &'tcx [hir::Arm<'tcx>]) {
@@ -491,11 +514,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
..
} = self.type_var_origin(expected)? else { return None; };
- let sig = *self
- .typeck_results
- .borrow()
- .liberated_fn_sigs()
- .get(hir::HirId::make_owner(self.body_id.owner.def_id))?;
+ let sig = self.body_fn_sig()?;
let substs = sig.output().walk().find_map(|arg| {
if let ty::GenericArgKind::Type(ty) = arg.unpack()
@@ -519,23 +538,25 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.bound_explicit_item_bounds(rpit_def_id)
.subst_iter_copied(self.tcx, substs)
{
- let pred = match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(mut trait_pred) => {
+ let pred = pred.kind().rebind(match pred.kind().skip_binder() {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) => {
assert_eq!(trait_pred.trait_ref.self_ty(), opaque_ty);
- trait_pred.trait_ref.substs =
- self.tcx.mk_substs_trait(ty, &trait_pred.trait_ref.substs[1..]);
- pred.kind().rebind(trait_pred).to_predicate(self.tcx)
+ ty::PredicateKind::Clause(ty::Clause::Trait(
+ trait_pred.with_self_type(self.tcx, ty),
+ ))
}
- ty::PredicateKind::Projection(mut proj_pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(mut proj_pred)) => {
assert_eq!(proj_pred.projection_ty.self_ty(), opaque_ty);
- proj_pred.projection_ty.substs = self
- .tcx
- .mk_substs_trait(ty, &proj_pred.projection_ty.substs[1..]);
- pred.kind().rebind(proj_pred).to_predicate(self.tcx)
+ proj_pred.projection_ty.substs = self.tcx.mk_substs_trait(
+ ty,
+ proj_pred.projection_ty.substs.iter().skip(1),
+ );
+ ty::PredicateKind::Clause(ty::Clause::Projection(proj_pred))
}
_ => continue,
- };
+ });
if !self.predicate_must_hold_modulo_regions(&Obligation::new(
+ self.tcx,
ObligationCause::misc(span, self.body_id),
self.param_env,
pred,
@@ -553,8 +574,5 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
fn arms_contain_ref_bindings<'tcx>(arms: &'tcx [hir::Arm<'tcx>]) -> Option<hir::Mutability> {
- arms.iter().filter_map(|a| a.pat.contains_explicit_ref_binding()).max_by_key(|m| match *m {
- hir::Mutability::Mut => 1,
- hir::Mutability::Not => 0,
- })
+ arms.iter().filter_map(|a| a.pat.contains_explicit_ref_binding()).max()
}
diff --git a/compiler/rustc_hir_typeck/src/autoderef.rs b/compiler/rustc_hir_typeck/src/autoderef.rs
index 59c366ad7..41b52a4c4 100644
--- a/compiler/rustc_hir_typeck/src/autoderef.rs
+++ b/compiler/rustc_hir_typeck/src/autoderef.rs
@@ -12,18 +12,7 @@ use std::iter;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn autoderef(&'a self, span: Span, base_ty: Ty<'tcx>) -> Autoderef<'a, 'tcx> {
- Autoderef::new(self, self.param_env, self.body_id, span, base_ty, span)
- }
-
- /// Like `autoderef`, but provides a custom `Span` to use for calls to
- /// an overloaded `Deref` operator
- pub fn autoderef_overloaded_span(
- &'a self,
- span: Span,
- base_ty: Ty<'tcx>,
- overloaded_span: Span,
- ) -> Autoderef<'a, 'tcx> {
- Autoderef::new(self, self.param_env, self.body_id, span, base_ty, overloaded_span)
+ Autoderef::new(self, self.param_env, self.body_id, span, base_ty)
}
pub fn try_overloaded_deref(
@@ -55,11 +44,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|InferOk { value: method, obligations: o }| {
obligations.extend(o);
if let ty::Ref(region, _, mutbl) = *method.sig.output().kind() {
- Some(OverloadedDeref {
- region,
- mutbl,
- span: autoderef.overloaded_span(),
- })
+ Some(OverloadedDeref { region, mutbl, span: autoderef.span() })
} else {
None
}
diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs
index 1b33f2f02..b09ddf80e 100644
--- a/compiler/rustc_hir_typeck/src/callee.rs
+++ b/compiler/rustc_hir_typeck/src/callee.rs
@@ -6,7 +6,7 @@ use crate::type_error_struct;
use rustc_ast::util::parser::PREC_POSTFIX;
use rustc_errors::{struct_span_err, Applicability, Diagnostic, StashKey};
use rustc_hir as hir;
-use rustc_hir::def::{self, Namespace, Res};
+use rustc_hir::def::{self, CtorKind, Namespace, Res};
use rustc_hir::def_id::DefId;
use rustc_infer::{
infer,
@@ -30,7 +30,7 @@ use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::traits::error_reporting::DefIdOrName;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
-use std::iter;
+use std::{iter, slice};
/// Checks that it is legal to call methods of the trait corresponding
/// to `trait_id` (this only cares about the trait, not the specific
@@ -129,6 +129,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
output
}
+ #[instrument(level = "debug", skip(self, call_expr, callee_expr, arg_exprs, autoderef), ret)]
fn try_overloaded_call_step(
&self,
call_expr: &'tcx hir::Expr<'tcx>,
@@ -138,10 +139,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) -> Option<CallStep<'tcx>> {
let adjusted_ty =
self.structurally_resolved_type(autoderef.span(), autoderef.final_ty(false));
- debug!(
- "try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?})",
- call_expr, adjusted_ty
- );
// If the callee is a bare function or a closure, then we're all set.
match *adjusted_ty.kind() {
@@ -182,12 +179,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Hack: we know that there are traits implementing Fn for &F
// where F:Fn and so forth. In the particular case of types
- // like `x: &mut FnMut()`, if there is a call `x()`, we would
- // normally translate to `FnMut::call_mut(&mut x, ())`, but
- // that winds up requiring `mut x: &mut FnMut()`. A little
- // over the top. The simplest fix by far is to just ignore
- // this case and deref again, so we wind up with
- // `FnMut::call_mut(&mut *x, ())`.
+ // like `f: &mut FnMut()`, if there is a call `f()`, we would
+ // normally translate to `FnMut::call_mut(&mut f, ())`, but
+ // that winds up potentially requiring the user to mark their
+ // variable as `mut` which feels unnecessary and unexpected.
+ //
+ // fn foo(f: &mut impl FnMut()) { f() }
+ // ^ without this hack `f` would have to be declared as mutable
+ //
+ // The simplest fix by far is to just ignore this case and deref again,
+ // so we wind up with `FnMut::call_mut(&mut *f, ())`.
ty::Ref(..) if autoderef.step_count() == 0 => {
return None;
}
@@ -230,22 +231,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
] {
let Some(trait_def_id) = opt_trait_def_id else { continue };
- let opt_input_types = opt_arg_exprs.map(|arg_exprs| {
- [self.tcx.mk_tup(arg_exprs.iter().map(|e| {
+ let opt_input_type = opt_arg_exprs.map(|arg_exprs| {
+ self.tcx.mk_tup(arg_exprs.iter().map(|e| {
self.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::TypeInference,
span: e.span,
})
- }))]
+ }))
});
- let opt_input_types = opt_input_types.as_ref().map(AsRef::as_ref);
if let Some(ok) = self.lookup_method_in_trait(
call_expr.span,
method_name,
trait_def_id,
adjusted_ty,
- opt_input_types,
+ opt_input_type.as_ref().map(slice::from_ref),
) {
let method = self.register_infer_ok_obligations(ok);
let mut autoref = None;
@@ -261,15 +261,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return None;
};
- let mutbl = match mutbl {
- hir::Mutability::Not => AutoBorrowMutability::Not,
- hir::Mutability::Mut => AutoBorrowMutability::Mut {
- // For initial two-phase borrow
- // deployment, conservatively omit
- // overloaded function call ops.
- allow_two_phase_borrow: AllowTwoPhase::No,
- },
- };
+ // For initial two-phase borrow
+ // deployment, conservatively omit
+ // overloaded function call ops.
+ let mutbl = AutoBorrowMutability::new(*mutbl, AllowTwoPhase::No);
+
autoref = Some(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(*region, mutbl)),
target: method.sig.inputs()[0],
@@ -383,6 +379,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
predicates.predicates.iter().zip(&predicates.spans)
{
let obligation = Obligation::new(
+ self.tcx,
ObligationCause::dummy_with_span(callee_expr.span),
self.param_env,
*predicate,
@@ -451,7 +448,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// previously appeared within a `Binder<>` and hence would not
// have been normalized before.
let fn_sig = self.replace_bound_vars_with_fresh_vars(call_expr.span, infer::FnCall, fn_sig);
- let fn_sig = self.normalize_associated_types_in(call_expr.span, fn_sig);
+ let fn_sig = self.normalize(call_expr.span, fn_sig);
// Call the generic checker.
let expected_arg_tys = self.expected_inputs_for_expected_output(
@@ -471,6 +468,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
def_id,
);
+ if fn_sig.abi == abi::Abi::RustCall {
+ let sp = arg_exprs.last().map_or(call_expr.span, |expr| expr.span);
+ if let Some(ty) = fn_sig.inputs().last().copied() {
+ self.register_bound(
+ ty,
+ self.tcx.require_lang_item(hir::LangItem::Tuple, Some(sp)),
+ traits::ObligationCause::new(sp, self.body_id, traits::RustCall),
+ );
+ } else {
+ self.tcx.sess.span_err(
+ sp,
+ "functions with the \"rust-call\" ABI must take a single non-self tuple argument",
+ );
+ }
+ }
+
fn_sig.output()
}
@@ -491,7 +504,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// method lookup.
let Ok(pick) = self
.probe_for_name(
- call_expr.span,
Mode::MethodCall,
segment.ident,
IsSuggestion(true),
@@ -582,7 +594,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
let mut unit_variant = None;
if let hir::ExprKind::Path(qpath) = &callee_expr.kind
- && let Res::Def(def::DefKind::Ctor(kind, def::CtorKind::Const), _)
+ && let Res::Def(def::DefKind::Ctor(kind, CtorKind::Const), _)
= self.typeck_results.borrow().qpath_res(qpath, callee_expr.hir_id)
// Only suggest removing parens if there are no arguments
&& arg_exprs.is_empty()
diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs
index d1dab0540..890a068a7 100644
--- a/compiler/rustc_hir_typeck/src/cast.rs
+++ b/compiler/rustc_hir_typeck/src/cast.rs
@@ -33,6 +33,7 @@ use super::FnCtxt;
use crate::type_error_struct;
use rustc_errors::{struct_span_err, Applicability, DelayDm, DiagnosticBuilder, ErrorGuaranteed};
use rustc_hir as hir;
+use rustc_macros::{TypeFoldable, TypeVisitable};
use rustc_middle::mir::Mutability;
use rustc_middle::ty::adjustment::AllowTwoPhase;
use rustc_middle::ty::cast::{CastKind, CastTy};
@@ -45,7 +46,6 @@ use rustc_span::def_id::{DefId, LOCAL_CRATE};
use rustc_span::symbol::sym;
use rustc_span::Span;
use rustc_trait_selection::infer::InferCtxtExt;
-use rustc_trait_selection::traits::error_reporting::report_object_safety_error;
/// Reifies a cast check to be checked once we have full type information for
/// a function context.
@@ -67,7 +67,7 @@ pub struct CastCheck<'tcx> {
/// The kind of pointer and associated metadata (thin, length or vtable) - we
/// only allow casts between fat pointers if their metadata have the same
/// kind.
-#[derive(Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, TypeVisitable, TypeFoldable)]
enum PointerKind<'tcx> {
/// No metadata attached, ie pointer to sized type or foreign type
Thin,
@@ -76,11 +76,11 @@ enum PointerKind<'tcx> {
/// Slice
Length,
/// The unsize info of this projection
- OfProjection(&'tcx ty::ProjectionTy<'tcx>),
+ OfProjection(ty::ProjectionTy<'tcx>),
/// The unsize info of this opaque ty
OfOpaque(DefId, SubstsRef<'tcx>),
/// The unsize info of this parameter
- OfParam(&'tcx ty::ParamTy),
+ OfParam(ty::ParamTy),
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@@ -94,10 +94,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!("pointer_kind({:?}, {:?})", t, span);
let t = self.resolve_vars_if_possible(t);
-
- if let Some(reported) = t.error_reported() {
- return Err(reported);
- }
+ t.error_reported()?;
if self.type_is_sized_modulo_regions(self.param_env, t, span) {
return Ok(Some(PointerKind::Thin));
@@ -121,9 +118,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Pointers to foreign types are thin, despite being unsized
ty::Foreign(..) => Some(PointerKind::Thin),
// We should really try to normalize here.
- ty::Projection(ref pi) => Some(PointerKind::OfProjection(pi)),
+ ty::Projection(pi) => Some(PointerKind::OfProjection(pi)),
ty::Opaque(def_id, substs) => Some(PointerKind::OfOpaque(def_id, substs)),
- ty::Param(ref p) => Some(PointerKind::OfParam(p)),
+ ty::Param(p) => Some(PointerKind::OfParam(p)),
// Insufficient type information.
ty::Placeholder(..) | ty::Bound(..) | ty::Infer(_) => None,
@@ -222,8 +219,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
// inference is more completely known.
match cast_ty.kind() {
ty::Dynamic(_, _, ty::Dyn) | ty::Slice(..) => {
- let reported = check.report_cast_to_unsized_type(fcx);
- Err(reported)
+ Err(check.report_cast_to_unsized_type(fcx))
}
_ => Ok(check),
}
@@ -501,10 +497,9 @@ impl<'a, 'tcx> CastCheck<'tcx> {
let ty = fcx.tcx.erase_regions(ty);
let expr_ty = fcx.resolve_vars_if_possible(self.expr_ty);
let expr_ty = fcx.tcx.erase_regions(expr_ty);
- let ty_params = fcx.tcx.mk_substs_trait(expr_ty, &[]);
if fcx
.infcx
- .type_implements_trait(from_trait, ty, ty_params, fcx.param_env)
+ .type_implements_trait(from_trait, [ty, expr_ty], fcx.param_env)
.must_apply_modulo_regions()
{
label = false;
@@ -614,10 +609,11 @@ impl<'a, 'tcx> CastCheck<'tcx> {
}
fn report_cast_to_unsized_type(&self, fcx: &FnCtxt<'a, 'tcx>) -> ErrorGuaranteed {
- if let Some(reported) =
- self.cast_ty.error_reported().or_else(|| self.expr_ty.error_reported())
- {
- return reported;
+ if let Err(err) = self.cast_ty.error_reported() {
+ return err;
+ }
+ if let Err(err) = self.expr_ty.error_reported() {
+ return err;
}
let tstr = fcx.ty_to_string(self.cast_ty);
@@ -730,9 +726,6 @@ impl<'a, 'tcx> CastCheck<'tcx> {
debug!(" -> CoercionCast");
fcx.typeck_results.borrow_mut().set_coercion_cast(self.expr.hir_id.local_id);
}
- Err(ty::error::TypeError::ObjectUnsafeCoercion(did)) => {
- self.report_object_unsafe_cast(&fcx, did);
- }
Err(_) => {
match self.do_check(fcx) {
Ok(k) => {
@@ -744,14 +737,6 @@ impl<'a, 'tcx> CastCheck<'tcx> {
};
}
}
-
- fn report_object_unsafe_cast(&self, fcx: &FnCtxt<'a, 'tcx>, did: DefId) {
- let violations = fcx.tcx.object_safety_violations(did);
- let mut err = report_object_safety_error(fcx.tcx, self.cast_span, did, violations);
- err.note(&format!("required by cast to type '{}'", fcx.ty_to_string(self.cast_ty)));
- err.emit();
- }
-
/// Checks a cast, and report an error if one exists. In some cases, this
/// can return Ok and create type errors in the fcx rather than returning
/// directly. coercion-cast is handled in check instead of here.
@@ -767,10 +752,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
match *self.expr_ty.kind() {
ty::FnDef(..) => {
// Attempt a coercion to a fn pointer type.
- let f = fcx.normalize_associated_types_in(
- self.expr_span,
- self.expr_ty.fn_sig(fcx.tcx),
- );
+ let f = fcx.normalize(self.expr_span, self.expr_ty.fn_sig(fcx.tcx));
let res = fcx.try_coerce(
self.expr,
self.expr_ty,
@@ -912,7 +894,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
}
// vtable kinds must match
- if cast_kind == expr_kind {
+ if fcx.tcx.erase_regions(cast_kind) == fcx.tcx.erase_regions(expr_kind) {
Ok(CastKind::PtrPtrCast)
} else {
Err(CastError::DifferingKinds)
@@ -954,7 +936,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
m_cast: ty::TypeAndMut<'tcx>,
) -> Result<CastKind, CastError> {
// array-ptr-cast: allow mut-to-mut, mut-to-const, const-to-const
- if m_expr.mutbl == hir::Mutability::Mut || m_cast.mutbl == hir::Mutability::Not {
+ if m_expr.mutbl >= m_cast.mutbl {
if let ty::Array(ety, _) = m_expr.ty.kind() {
// Due to the limitations of LLVM global constants,
// region pointers end up pointing at copies of
diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs
index 7f76364e1..32f86b804 100644
--- a/compiler/rustc_hir_typeck/src/check.rs
+++ b/compiler/rustc_hir_typeck/src/check.rs
@@ -1,18 +1,16 @@
use crate::coercion::CoerceMany;
use crate::gather_locals::GatherLocalsVisitor;
-use crate::{FnCtxt, Inherited};
-use crate::{GeneratorTypes, UnsafetyState};
+use crate::FnCtxt;
+use crate::GeneratorTypes;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::intravisit::Visitor;
use rustc_hir::lang_items::LangItem;
-use rustc_hir::{ImplicitSelfKind, ItemKind, Node};
use rustc_hir_analysis::check::fn_maybe_err;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::infer::RegionVariableOrigin;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::LocalDefId;
-use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits;
use std::cell::RefCell;
@@ -22,22 +20,16 @@ use std::cell::RefCell;
///
/// * ...
/// * inherited: other fields inherited from the enclosing fn (if any)
-#[instrument(skip(inherited, body), level = "debug")]
+#[instrument(skip(fcx, body), level = "debug")]
pub(super) fn check_fn<'a, 'tcx>(
- inherited: &'a Inherited<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
+ fcx: &mut FnCtxt<'a, 'tcx>,
fn_sig: ty::FnSig<'tcx>,
decl: &'tcx hir::FnDecl<'tcx>,
- fn_id: hir::HirId,
+ fn_def_id: LocalDefId,
body: &'tcx hir::Body<'tcx>,
can_be_generator: Option<hir::Movability>,
- return_type_pre_known: bool,
-) -> (FnCtxt<'a, 'tcx>, Option<GeneratorTypes<'tcx>>) {
- // Create the function context. This is either derived from scratch or,
- // in the case of closures, based on the outer context.
- let mut fcx = FnCtxt::new(inherited, param_env, body.value.hir_id);
- fcx.ps.set(UnsafetyState::function(fn_sig.unsafety, fn_id));
- fcx.return_type_pre_known = return_type_pre_known;
+) -> Option<GeneratorTypes<'tcx>> {
+ let fn_id = fcx.tcx.hir().local_def_id_to_hir_id(fn_def_id);
let tcx = fcx.tcx;
let hir = tcx.hir();
@@ -49,11 +41,8 @@ pub(super) fn check_fn<'a, 'tcx>(
declared_ret_ty,
body.value.hir_id,
decl.output.span(),
- param_env,
+ fcx.param_env,
));
- // If we replaced declared_ret_ty with infer vars, then we must be inferring
- // an opaque type, so set a flag so we can improve diagnostics.
- fcx.return_type_has_opaque = ret_ty != declared_ret_ty;
fcx.ret_coercion = Some(RefCell::new(CoerceMany::new(ret_ty)));
@@ -61,45 +50,15 @@ pub(super) fn check_fn<'a, 'tcx>(
fn_maybe_err(tcx, span, fn_sig.abi);
- if fn_sig.abi == Abi::RustCall {
- let expected_args = if let ImplicitSelfKind::None = decl.implicit_self { 1 } else { 2 };
-
- let err = || {
- let item = match tcx.hir().get(fn_id) {
- Node::Item(hir::Item { kind: ItemKind::Fn(header, ..), .. }) => Some(header),
- Node::ImplItem(hir::ImplItem {
- kind: hir::ImplItemKind::Fn(header, ..), ..
- }) => Some(header),
- Node::TraitItem(hir::TraitItem {
- kind: hir::TraitItemKind::Fn(header, ..),
- ..
- }) => Some(header),
- // Closures are RustCall, but they tuple their arguments, so shouldn't be checked
- Node::Expr(hir::Expr { kind: hir::ExprKind::Closure { .. }, .. }) => None,
- node => bug!("Item being checked wasn't a function/closure: {:?}", node),
- };
-
- if let Some(header) = item {
- tcx.sess.span_err(header.span, "functions with the \"rust-call\" ABI must take a single non-self argument that is a tuple");
- }
- };
-
- if fn_sig.inputs().len() != expected_args {
- err()
+ if let Some(kind) = body.generator_kind && can_be_generator.is_some() {
+ let yield_ty = if kind == hir::GeneratorKind::Gen {
+ let yield_ty = fcx
+ .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span });
+ fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType);
+ yield_ty
} else {
- // FIXME(CraftSpider) Add a check on parameter expansion, so we don't just make the ICE happen later on
- // This will probably require wide-scale changes to support a TupleKind obligation
- // We can't resolve this without knowing the type of the param
- if !matches!(fn_sig.inputs()[expected_args - 1].kind(), ty::Tuple(_) | ty::Param(_)) {
- err()
- }
- }
- }
-
- if body.generator_kind.is_some() && can_be_generator.is_some() {
- let yield_ty = fcx
- .next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span });
- fcx.require_type_is_sized(yield_ty, span, traits::SizedYieldType);
+ tcx.mk_unit()
+ };
// Resume type defaults to `()` if the generator has no argument.
let resume_ty = fn_sig.inputs().get(0).copied().unwrap_or_else(|| tcx.mk_unit());
@@ -140,10 +99,9 @@ pub(super) fn check_fn<'a, 'tcx>(
fcx.write_ty(param.hir_id, param_ty);
}
- inherited.typeck_results.borrow_mut().liberated_fn_sigs_mut().insert(fn_id, fn_sig);
+ fcx.typeck_results.borrow_mut().liberated_fn_sigs_mut().insert(fn_id, fn_sig);
- fcx.in_tail_expr = true;
- if let ty::Dynamic(..) = declared_ret_ty.kind() {
+ if let ty::Dynamic(_, _, ty::Dyn) = declared_ret_ty.kind() {
// FIXME: We need to verify that the return type is `Sized` after the return expression has
// been evaluated so that we have types available for all the nodes being returned, but that
// requires the coerced evaluated type to be stored. Moving `check_return_expr` before this
@@ -161,7 +119,6 @@ pub(super) fn check_fn<'a, 'tcx>(
fcx.require_type_is_sized(declared_ret_ty, decl.output.span(), traits::SizedReturnType);
fcx.check_return_expr(&body.value, false);
}
- fcx.in_tail_expr = false;
// We insert the deferred_generator_interiors entry after visiting the body.
// This ensures that all nested generators appear before the entry of this generator.
@@ -211,14 +168,7 @@ pub(super) fn check_fn<'a, 'tcx>(
check_panic_info_fn(tcx, panic_impl_did.expect_local(), fn_sig, decl, declared_ret_ty);
}
- // Check that a function marked as `#[alloc_error_handler]` has signature `fn(Layout) -> !`
- if let Some(alloc_error_handler_did) = tcx.lang_items().oom()
- && alloc_error_handler_did == hir.local_def_id(fn_id).to_def_id()
- {
- check_alloc_error_fn(tcx, alloc_error_handler_did.expect_local(), fn_sig, decl, declared_ret_ty);
- }
-
- (fcx, gen_ty)
+ gen_ty
}
fn check_panic_info_fn(
@@ -246,7 +196,7 @@ fn check_panic_info_fn(
let arg_is_panic_info = match *inputs[0].kind() {
ty::Ref(region, ty, mutbl) => match *ty.kind() {
ty::Adt(ref adt, _) => {
- adt.did() == panic_info_did && mutbl == hir::Mutability::Not && !region.is_static()
+ adt.did() == panic_info_did && mutbl.is_not() && !region.is_static()
}
_ => false,
},
@@ -273,52 +223,3 @@ fn check_panic_info_fn(
tcx.sess.span_err(span, "should have no const parameters");
}
}
-
-fn check_alloc_error_fn(
- tcx: TyCtxt<'_>,
- fn_id: LocalDefId,
- fn_sig: ty::FnSig<'_>,
- decl: &hir::FnDecl<'_>,
- declared_ret_ty: Ty<'_>,
-) {
- let Some(alloc_layout_did) = tcx.lang_items().alloc_layout() else {
- tcx.sess.err("language item required, but not found: `alloc_layout`");
- return;
- };
-
- if *declared_ret_ty.kind() != ty::Never {
- tcx.sess.span_err(decl.output.span(), "return type should be `!`");
- }
-
- let inputs = fn_sig.inputs();
- if inputs.len() != 1 {
- tcx.sess.span_err(tcx.def_span(fn_id), "function should have one argument");
- return;
- }
-
- let arg_is_alloc_layout = match inputs[0].kind() {
- ty::Adt(ref adt, _) => adt.did() == alloc_layout_did,
- _ => false,
- };
-
- if !arg_is_alloc_layout {
- tcx.sess.span_err(decl.inputs[0].span, "argument should be `Layout`");
- }
-
- let DefKind::Fn = tcx.def_kind(fn_id) else {
- let span = tcx.def_span(fn_id);
- tcx.sess.span_err(span, "`#[alloc_error_handler]` should be a function");
- return;
- };
-
- let generic_counts = tcx.generics_of(fn_id).own_counts();
- if generic_counts.types != 0 {
- let span = tcx.def_span(fn_id);
- tcx.sess.span_err(span, "`#[alloc_error_handler]` function should have no type parameters");
- }
- if generic_counts.consts != 0 {
- let span = tcx.def_span(fn_id);
- tcx.sess
- .span_err(span, "`#[alloc_error_handler]` function should have no const parameters");
- }
-}
diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs
index a5a45f75e..429cb60ba 100644
--- a/compiler/rustc_hir_typeck/src/closure.rs
+++ b/compiler/rustc_hir_typeck/src/closure.rs
@@ -4,24 +4,26 @@ use super::{check_fn, Expectation, FnCtxt, GeneratorTypes};
use hir::def::DefKind;
use rustc_hir as hir;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::LocalDefId;
use rustc_hir::lang_items::LangItem;
use rustc_hir_analysis::astconv::AstConv;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::infer::LateBoundRegionConversionTime;
use rustc_infer::infer::{InferOk, InferResult};
+use rustc_macros::{TypeFoldable, TypeVisitable};
use rustc_middle::ty::subst::InternalSubsts;
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{self, Ty};
use rustc_span::source_map::Span;
use rustc_target::spec::abi::Abi;
+use rustc_trait_selection::traits;
use rustc_trait_selection::traits::error_reporting::ArgKind;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
use std::cmp;
use std::iter;
/// What signature do we *expect* the closure to have from context?
-#[derive(Debug)]
+#[derive(Debug, Clone, TypeFoldable, TypeVisitable)]
struct ExpectedSig<'tcx> {
/// Span that gave us this expectation, if we know that.
cause_span: Option<Span>,
@@ -33,24 +35,20 @@ struct ClosureSignatures<'tcx> {
bound_sig: ty::PolyFnSig<'tcx>,
/// The signature within the function body.
/// This mostly differs in the sense that lifetimes are now early bound and any
- /// opaque types from the signature expectation are overriden in case there are
+ /// opaque types from the signature expectation are overridden in case there are
/// explicit hidden types written by the user in the closure signature.
liberated_sig: ty::FnSig<'tcx>,
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
- #[instrument(skip(self, expr, _capture, decl, body_id), level = "debug")]
+ #[instrument(skip(self, closure), level = "debug")]
pub fn check_expr_closure(
&self,
- expr: &hir::Expr<'_>,
- _capture: hir::CaptureBy,
- decl: &'tcx hir::FnDecl<'tcx>,
- body_id: hir::BodyId,
- gen: Option<hir::Movability>,
+ closure: &hir::Closure<'tcx>,
+ expr_span: Span,
expected: Expectation<'tcx>,
) -> Ty<'tcx> {
- trace!("decl = {:#?}", decl);
- trace!("expr = {:#?}", expr);
+ trace!("decl = {:#?}", closure.fn_decl);
// It's always helpful for inference if we know the kind of
// closure sooner rather than later, so first examine the expected
@@ -59,42 +57,37 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Some(ty) => self.deduce_expectations_from_expected_type(ty),
None => (None, None),
};
- let body = self.tcx.hir().body(body_id);
- self.check_closure(expr, expected_kind, decl, body, gen, expected_sig)
+ let body = self.tcx.hir().body(closure.body);
+ self.check_closure(closure, expr_span, expected_kind, body, expected_sig)
}
- #[instrument(skip(self, expr, body, decl), level = "debug", ret)]
+ #[instrument(skip(self, closure, body), level = "debug", ret)]
fn check_closure(
&self,
- expr: &hir::Expr<'_>,
+ closure: &hir::Closure<'tcx>,
+ expr_span: Span,
opt_kind: Option<ty::ClosureKind>,
- decl: &'tcx hir::FnDecl<'tcx>,
body: &'tcx hir::Body<'tcx>,
- gen: Option<hir::Movability>,
expected_sig: Option<ExpectedSig<'tcx>>,
) -> Ty<'tcx> {
- trace!("decl = {:#?}", decl);
- let expr_def_id = self.tcx.hir().local_def_id(expr.hir_id);
+ trace!("decl = {:#?}", closure.fn_decl);
+ let expr_def_id = closure.def_id;
debug!(?expr_def_id);
let ClosureSignatures { bound_sig, liberated_sig } =
- self.sig_of_closure(expr.hir_id, expr_def_id.to_def_id(), decl, body, expected_sig);
+ self.sig_of_closure(expr_def_id, closure.fn_decl, body, expected_sig);
debug!(?bound_sig, ?liberated_sig);
- let return_type_pre_known = !liberated_sig.output().is_ty_infer();
-
+ let mut fcx = FnCtxt::new(self, self.param_env.without_const(), body.value.hir_id);
let generator_types = check_fn(
- self,
- self.param_env.without_const(),
+ &mut fcx,
liberated_sig,
- decl,
- expr.hir_id,
+ closure.fn_decl,
+ expr_def_id,
body,
- gen,
- return_type_pre_known,
- )
- .1;
+ closure.movability,
+ );
let parent_substs = InternalSubsts::identity_for_item(
self.tcx,
@@ -103,7 +96,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let tupled_upvars_ty = self.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::ClosureSynthetic,
- span: self.tcx.hir().span(expr.hir_id),
+ span: self.tcx.def_span(expr_def_id),
});
if let Some(GeneratorTypes { resume_ty, yield_ty, interior, movability }) = generator_types
@@ -149,7 +142,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
None => self.next_ty_var(TypeVariableOrigin {
// FIXME(eddyb) distinguish closure kind inference variables from the rest.
kind: TypeVariableOriginKind::ClosureSynthetic,
- span: expr.span,
+ span: expr_span,
}),
};
@@ -174,34 +167,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expected_ty: Ty<'tcx>,
) -> (Option<ExpectedSig<'tcx>>, Option<ty::ClosureKind>) {
match *expected_ty.kind() {
- ty::Opaque(def_id, substs) => {
- let bounds = self.tcx.bound_explicit_item_bounds(def_id);
- let sig =
- bounds.subst_iter_copied(self.tcx, substs).find_map(|(pred, span)| match pred
- .kind()
- .skip_binder()
- {
- ty::PredicateKind::Projection(proj_predicate) => self
- .deduce_sig_from_projection(
- Some(span),
- pred.kind().rebind(proj_predicate),
- ),
- _ => None,
- });
-
- let kind = bounds
- .0
- .iter()
- .filter_map(|(pred, _)| match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(tp) => {
- self.tcx.fn_trait_kind_from_lang_item(tp.def_id())
- }
- _ => None,
- })
- .fold(None, |best, cur| Some(best.map_or(cur, |best| cmp::min(best, cur))));
- trace!(?sig, ?kind);
- (sig, kind)
- }
+ ty::Opaque(def_id, substs) => self.deduce_signature_from_predicates(
+ self.tcx.bound_explicit_item_bounds(def_id).subst_iter_copied(self.tcx, substs),
+ ),
ty::Dynamic(ref object_type, ..) => {
let sig = object_type.projection_bounds().find_map(|pb| {
let pb = pb.with_self_ty(self.tcx, self.tcx.types.trait_object_dummy_self);
@@ -209,10 +177,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
});
let kind = object_type
.principal_def_id()
- .and_then(|did| self.tcx.fn_trait_kind_from_lang_item(did));
+ .and_then(|did| self.tcx.fn_trait_kind_from_def_id(did));
(sig, kind)
}
- ty::Infer(ty::TyVar(vid)) => self.deduce_expectations_from_obligations(vid),
+ ty::Infer(ty::TyVar(vid)) => self.deduce_signature_from_predicates(
+ self.obligations_for_self_ty(vid).map(|obl| (obl.predicate, obl.cause.span)),
+ ),
ty::FnPtr(sig) => {
let expected_sig = ExpectedSig { cause_span: None, sig };
(Some(expected_sig), Some(ty::ClosureKind::Fn))
@@ -221,37 +191,57 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
- fn deduce_expectations_from_obligations(
+ fn deduce_signature_from_predicates(
&self,
- expected_vid: ty::TyVid,
+ predicates: impl DoubleEndedIterator<Item = (ty::Predicate<'tcx>, Span)>,
) -> (Option<ExpectedSig<'tcx>>, Option<ty::ClosureKind>) {
- let expected_sig =
- self.obligations_for_self_ty(expected_vid).find_map(|(_, obligation)| {
- debug!(?obligation.predicate);
-
- let bound_predicate = obligation.predicate.kind();
- if let ty::PredicateKind::Projection(proj_predicate) =
- obligation.predicate.kind().skip_binder()
- {
- // Given a Projection predicate, we can potentially infer
- // the complete signature.
+ let mut expected_sig = None;
+ let mut expected_kind = None;
+
+ for obligation in traits::elaborate_predicates_with_span(
+ self.tcx,
+ // Reverse the obligations here, since `elaborate_*` uses a stack,
+ // and we want to keep inference generally in the same order of
+ // the registered obligations.
+ predicates.rev(),
+ ) {
+ debug!(?obligation.predicate);
+ let bound_predicate = obligation.predicate.kind();
+
+ // Given a Projection predicate, we can potentially infer
+ // the complete signature.
+ if expected_sig.is_none()
+ && let ty::PredicateKind::Clause(ty::Clause::Projection(proj_predicate)) = bound_predicate.skip_binder()
+ {
+ expected_sig = self.normalize(
+ obligation.cause.span,
self.deduce_sig_from_projection(
- Some(obligation.cause.span),
+ Some(obligation.cause.span),
bound_predicate.rebind(proj_predicate),
- )
- } else {
- None
- }
- });
+ ),
+ );
+ }
- // Even if we can't infer the full signature, we may be able to
- // infer the kind. This can occur when we elaborate a predicate
- // like `F : Fn<A>`. Note that due to subtyping we could encounter
- // many viable options, so pick the most restrictive.
- let expected_kind = self
- .obligations_for_self_ty(expected_vid)
- .filter_map(|(tr, _)| self.tcx.fn_trait_kind_from_lang_item(tr.def_id()))
- .fold(None, |best, cur| Some(best.map_or(cur, |best| cmp::min(best, cur))));
+ // Even if we can't infer the full signature, we may be able to
+ // infer the kind. This can occur when we elaborate a predicate
+ // like `F : Fn<A>`. Note that due to subtyping we could encounter
+ // many viable options, so pick the most restrictive.
+ let trait_def_id = match bound_predicate.skip_binder() {
+ ty::PredicateKind::Clause(ty::Clause::Projection(data)) => {
+ Some(data.projection_ty.trait_def_id(self.tcx))
+ }
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => Some(data.def_id()),
+ _ => None,
+ };
+ if let Some(closure_kind) =
+ trait_def_id.and_then(|def_id| self.tcx.fn_trait_kind_from_def_id(def_id))
+ {
+ expected_kind = Some(
+ expected_kind
+ .map_or_else(|| closure_kind, |current| cmp::min(current, closure_kind)),
+ );
+ }
+ }
(expected_sig, expected_kind)
}
@@ -272,7 +262,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let trait_def_id = projection.trait_def_id(tcx);
- let is_fn = tcx.fn_trait_kind_from_lang_item(trait_def_id).is_some();
+ let is_fn = tcx.is_fn_trait(trait_def_id);
let gen_trait = tcx.require_lang_item(LangItem::Generator, cause_span);
let is_gen = gen_trait == trait_def_id;
if !is_fn && !is_gen {
@@ -323,30 +313,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn sig_of_closure(
&self,
- hir_id: hir::HirId,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
decl: &hir::FnDecl<'_>,
body: &hir::Body<'_>,
expected_sig: Option<ExpectedSig<'tcx>>,
) -> ClosureSignatures<'tcx> {
if let Some(e) = expected_sig {
- self.sig_of_closure_with_expectation(hir_id, expr_def_id, decl, body, e)
+ self.sig_of_closure_with_expectation(expr_def_id, decl, body, e)
} else {
- self.sig_of_closure_no_expectation(hir_id, expr_def_id, decl, body)
+ self.sig_of_closure_no_expectation(expr_def_id, decl, body)
}
}
/// If there is no expected signature, then we will convert the
/// types that the user gave into a signature.
- #[instrument(skip(self, hir_id, expr_def_id, decl, body), level = "debug")]
+ #[instrument(skip(self, expr_def_id, decl, body), level = "debug")]
fn sig_of_closure_no_expectation(
&self,
- hir_id: hir::HirId,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
decl: &hir::FnDecl<'_>,
body: &hir::Body<'_>,
) -> ClosureSignatures<'tcx> {
- let bound_sig = self.supplied_sig_of_closure(hir_id, expr_def_id, decl, body);
+ let bound_sig = self.supplied_sig_of_closure(expr_def_id, decl, body);
self.closure_sigs(expr_def_id, body, bound_sig)
}
@@ -392,17 +380,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
///
/// # Arguments
///
- /// - `expr_def_id`: the `DefId` of the closure expression
+ /// - `expr_def_id`: the `LocalDefId` of the closure expression
/// - `decl`: the HIR declaration of the closure
/// - `body`: the body of the closure
/// - `expected_sig`: the expected signature (if any). Note that
/// this is missing a binder: that is, there may be late-bound
/// regions with depth 1, which are bound then by the closure.
- #[instrument(skip(self, hir_id, expr_def_id, decl, body), level = "debug")]
+ #[instrument(skip(self, expr_def_id, decl, body), level = "debug")]
fn sig_of_closure_with_expectation(
&self,
- hir_id: hir::HirId,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
decl: &hir::FnDecl<'_>,
body: &hir::Body<'_>,
expected_sig: ExpectedSig<'tcx>,
@@ -411,7 +398,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// expectation if things don't see to match up with what we
// expect.
if expected_sig.sig.c_variadic() != decl.c_variadic {
- return self.sig_of_closure_no_expectation(hir_id, expr_def_id, decl, body);
+ return self.sig_of_closure_no_expectation(expr_def_id, decl, body);
} else if expected_sig.sig.skip_binder().inputs_and_output.len() != decl.inputs.len() + 1 {
return self.sig_of_closure_with_mismatched_number_of_arguments(
expr_def_id,
@@ -447,27 +434,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Along the way, it also writes out entries for types that the user
// wrote into our typeck results, which are then later used by the privacy
// check.
- match self.merge_supplied_sig_with_expectation(
- hir_id,
- expr_def_id,
- decl,
- body,
- closure_sigs,
- ) {
+ match self.merge_supplied_sig_with_expectation(expr_def_id, decl, body, closure_sigs) {
Ok(infer_ok) => self.register_infer_ok_obligations(infer_ok),
- Err(_) => self.sig_of_closure_no_expectation(hir_id, expr_def_id, decl, body),
+ Err(_) => self.sig_of_closure_no_expectation(expr_def_id, decl, body),
}
}
fn sig_of_closure_with_mismatched_number_of_arguments(
&self,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
decl: &hir::FnDecl<'_>,
body: &hir::Body<'_>,
expected_sig: ExpectedSig<'tcx>,
) -> ClosureSignatures<'tcx> {
let hir = self.tcx.hir();
- let expr_map_node = hir.get_if_local(expr_def_id).unwrap();
+ let expr_map_node = hir.get_by_def_id(expr_def_id);
let expected_args: Vec<_> = expected_sig
.sig
.skip_binder()
@@ -475,18 +456,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.iter()
.map(|ty| ArgKind::from_expected_ty(*ty, None))
.collect();
- let (closure_span, found_args) = match self.get_fn_like_arguments(expr_map_node) {
- Some((sp, args)) => (Some(sp), args),
- None => (None, Vec::new()),
- };
+ let (closure_span, closure_arg_span, found_args) =
+ match self.get_fn_like_arguments(expr_map_node) {
+ Some((sp, arg_sp, args)) => (Some(sp), arg_sp, args),
+ None => (None, None, Vec::new()),
+ };
let expected_span =
- expected_sig.cause_span.unwrap_or_else(|| hir.span_if_local(expr_def_id).unwrap());
+ expected_sig.cause_span.unwrap_or_else(|| self.tcx.def_span(expr_def_id));
self.report_arg_count_mismatch(
expected_span,
closure_span,
expected_args,
found_args,
true,
+ closure_arg_span,
)
.emit();
@@ -498,11 +481,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Enforce the user's types against the expectation. See
/// `sig_of_closure_with_expectation` for details on the overall
/// strategy.
- #[instrument(level = "debug", skip(self, hir_id, expr_def_id, decl, body, expected_sigs))]
+ #[instrument(level = "debug", skip(self, expr_def_id, decl, body, expected_sigs))]
fn merge_supplied_sig_with_expectation(
&self,
- hir_id: hir::HirId,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
decl: &hir::FnDecl<'_>,
body: &hir::Body<'_>,
mut expected_sigs: ClosureSignatures<'tcx>,
@@ -511,7 +493,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
//
// (See comment on `sig_of_closure_with_expectation` for the
// meaning of these letters.)
- let supplied_sig = self.supplied_sig_of_closure(hir_id, expr_def_id, decl, body);
+ let supplied_sig = self.supplied_sig_of_closure(expr_def_id, decl, body);
debug!(?supplied_sig);
@@ -591,8 +573,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
#[instrument(skip(self, decl, body), level = "debug", ret)]
fn supplied_sig_of_closure(
&self,
- hir_id: hir::HirId,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
decl: &hir::FnDecl<'_>,
body: &hir::Body<'_>,
) -> ty::PolyFnSig<'tcx> {
@@ -601,6 +582,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
trace!("decl = {:#?}", decl);
debug!(?body.generator_kind);
+ let hir_id = self.tcx.hir().local_def_id_to_hir_id(expr_def_id);
let bound_vars = self.tcx.late_bound_vars(hir_id);
// First, convert the types that the user supplied (if any).
@@ -642,7 +624,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
// Astconv can't normalize inputs or outputs with escaping bound vars,
// so normalize them here, after we've wrapped them in a binder.
- let result = self.normalize_associated_types_in(self.tcx.hir().span(hir_id), result);
+ let result = self.normalize(self.tcx.hir().span(hir_id), result);
let c_result = self.inh.infcx.canonicalize_response(result);
self.typeck_results.borrow_mut().user_provided_sigs.insert(expr_def_id, c_result);
@@ -659,7 +641,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
#[instrument(skip(self), level = "debug", ret)]
fn deduce_future_output_from_obligations(
&self,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
body_id: hir::HirId,
) -> Option<Ty<'tcx>> {
let ret_coercion = self.ret_coercion.as_ref().unwrap_or_else(|| {
@@ -677,7 +659,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// where R is the return type we are expecting. This type `T`
// will be our output.
let bound_predicate = predicate.kind();
- if let ty::PredicateKind::Projection(proj_predicate) = bound_predicate.skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(proj_predicate)) =
+ bound_predicate.skip_binder()
+ {
self.deduce_future_output_from_projection(
span,
bound_predicate.rebind(proj_predicate),
@@ -689,7 +673,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let output_ty = match *ret_ty.kind() {
ty::Infer(ty::TyVar(ret_vid)) => {
- self.obligations_for_self_ty(ret_vid).find_map(|(_, obligation)| {
+ self.obligations_for_self_ty(ret_vid).find_map(|obligation| {
get_future_output(obligation.predicate, obligation.cause.span)
})?
}
@@ -808,17 +792,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn closure_sigs(
&self,
- expr_def_id: DefId,
+ expr_def_id: LocalDefId,
body: &hir::Body<'_>,
bound_sig: ty::PolyFnSig<'tcx>,
) -> ClosureSignatures<'tcx> {
- let liberated_sig = self.tcx().liberate_late_bound_regions(expr_def_id, bound_sig);
- let liberated_sig = self.inh.normalize_associated_types_in(
- body.value.span,
- body.value.hir_id,
- self.param_env,
- liberated_sig,
- );
+ let liberated_sig =
+ self.tcx().liberate_late_bound_regions(expr_def_id.to_def_id(), bound_sig);
+ let liberated_sig = self.normalize(body.value.span, liberated_sig);
ClosureSignatures { bound_sig, liberated_sig }
}
}
diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs
index 86597a703..f0b349f0c 100644
--- a/compiler/rustc_hir_typeck/src/coercion.rs
+++ b/compiler/rustc_hir_typeck/src/coercion.rs
@@ -46,7 +46,7 @@ use rustc_hir::Expr;
use rustc_hir_analysis::astconv::AstConv;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::infer::{Coercion, InferOk, InferResult};
-use rustc_infer::traits::{Obligation, TraitEngine, TraitEngineExt};
+use rustc_infer::traits::Obligation;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast,
@@ -55,14 +55,16 @@ use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::relate::RelateResult;
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::visit::TypeVisitable;
-use rustc_middle::ty::{self, ToPredicate, Ty, TypeAndMut};
+use rustc_middle::ty::{self, Ty, TypeAndMut};
use rustc_session::parse::feature_err;
use rustc_span::symbol::sym;
use rustc_span::{self, BytePos, DesugaringKind, Span};
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
-use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode};
+use rustc_trait_selection::traits::{
+ self, NormalizeExt, ObligationCause, ObligationCauseCode, ObligationCtxt,
+};
use smallvec::{smallvec, SmallVec};
use std::ops::Deref;
@@ -108,11 +110,7 @@ fn coerce_mutbls<'tcx>(
from_mutbl: hir::Mutability,
to_mutbl: hir::Mutability,
) -> RelateResult<'tcx, ()> {
- match (from_mutbl, to_mutbl) {
- (hir::Mutability::Mut, hir::Mutability::Mut | hir::Mutability::Not)
- | (hir::Mutability::Not, hir::Mutability::Not) => Ok(()),
- (hir::Mutability::Not, hir::Mutability::Mut) => Err(TypeError::Mutability),
- }
+ if from_mutbl >= to_mutbl { Ok(()) } else { Err(TypeError::Mutability) }
}
/// Do not require any adjustments, i.e. coerce `x -> x`.
@@ -199,10 +197,6 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
debug!("coerce: unsize successful");
return unsize;
}
- Err(TypeError::ObjectUnsafeCoercion(did)) => {
- debug!("coerce: unsize not object safe");
- return Err(TypeError::ObjectUnsafeCoercion(did));
- }
Err(error) => {
debug!(?error, "coerce: unsize failed");
}
@@ -277,13 +271,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
for &source_ty in &[a, b] {
if source_ty != target_ty {
obligations.push(Obligation::new(
+ self.tcx(),
self.cause.clone(),
self.param_env,
ty::Binder::dummy(ty::PredicateKind::Coerce(ty::CoercePredicate {
a: source_ty,
b: target_ty,
- }))
- .to_predicate(self.tcx()),
+ })),
));
}
}
@@ -456,7 +450,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
return Err(err);
};
- if ty == a && mt_a.mutbl == hir::Mutability::Not && autoderef.step_count() == 1 {
+ if ty == a && mt_a.mutbl.is_not() && autoderef.step_count() == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
// we started with. In that case, just skip it
@@ -468,7 +462,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
// `self.x` both have `&mut `type would be a move of
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
- assert_eq!(mutbl_b, hir::Mutability::Not); // can only coerce &T -> &U
+ assert!(mutbl_b.is_not()); // can only coerce &T -> &U
return success(vec![], ty, obligations);
}
@@ -482,12 +476,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
let ty::Ref(r_borrow, _, _) = ty.kind() else {
span_bug!(span, "expected a ref type, got {:?}", ty);
};
- let mutbl = match mutbl_b {
- hir::Mutability::Not => AutoBorrowMutability::Not,
- hir::Mutability::Mut => {
- AutoBorrowMutability::Mut { allow_two_phase_borrow: self.allow_two_phase }
- }
- };
+ let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase);
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(*r_borrow, mutbl)),
target: ty,
@@ -507,27 +496,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
target = self.shallow_resolve(target);
debug!(?source, ?target);
- // These 'if' statements require some explanation.
- // The `CoerceUnsized` trait is special - it is only
- // possible to write `impl CoerceUnsized<B> for A` where
- // A and B have 'matching' fields. This rules out the following
- // two types of blanket impls:
- //
- // `impl<T> CoerceUnsized<T> for SomeType`
- // `impl<T> CoerceUnsized<SomeType> for T`
- //
- // Both of these trigger a special `CoerceUnsized`-related error (E0376)
- //
- // We can take advantage of this fact to avoid performing unnecessary work.
- // If either `source` or `target` is a type variable, then any applicable impl
- // would need to be generic over the self-type (`impl<T> CoerceUnsized<SomeType> for T`)
- // or generic over the `CoerceUnsized` type parameter (`impl<T> CoerceUnsized<T> for
- // SomeType`).
- //
- // However, these are exactly the kinds of impls which are forbidden by
- // the compiler! Therefore, we can be sure that coercion will always fail
- // when either the source or target type is a type variable. This allows us
- // to skip performing any trait selection, and immediately bail out.
+ // We don't apply any coercions incase either the source or target
+ // aren't sufficiently well known but tend to instead just equate
+ // them both.
if source.is_ty_var() {
debug!("coerce_unsized: source is a TyVar, bailing out");
return Err(TypeError::Mismatch);
@@ -556,15 +527,12 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
let coercion = Coercion(self.cause.span);
let r_borrow = self.next_region_var(coercion);
- let mutbl = match mutbl_b {
- hir::Mutability::Not => AutoBorrowMutability::Not,
- hir::Mutability::Mut => AutoBorrowMutability::Mut {
- // We don't allow two-phase borrows here, at least for initial
- // implementation. If it happens that this coercion is a function argument,
- // the reborrow in coerce_borrowed_ptr will pick it up.
- allow_two_phase_borrow: AllowTwoPhase::No,
- },
- };
+
+ // We don't allow two-phase borrows here, at least for initial
+ // implementation. If it happens that this coercion is a function argument,
+ // the reborrow in coerce_borrowed_ptr will pick it up.
+ let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
+
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
@@ -588,7 +556,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
}
_ => None,
};
- let coerce_source = reborrow.as_ref().map_or(source, |&(_, ref r)| r.target);
+ let coerce_source = reborrow.as_ref().map_or(source, |(_, r)| r.target);
// Setup either a subtyping or a LUB relationship between
// the `CoerceUnsized` target type and the expected type.
@@ -629,8 +597,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
cause,
coerce_unsized_did,
0,
- coerce_source,
- &[coerce_target.into()]
+ [coerce_source, coerce_target]
)];
let mut has_unsized_tuple_coercion = false;
@@ -645,7 +612,9 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
debug!("coerce_unsized resolve step: {:?}", obligation);
let bound_predicate = obligation.predicate.kind();
let trait_pred = match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(trait_pred) if traits.contains(&trait_pred.def_id()) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred))
+ if traits.contains(&trait_pred.def_id()) =>
+ {
if unsize_did == trait_pred.def_id() {
let self_ty = trait_pred.self_ty();
let unsize_ty = trait_pred.trait_ref.substs[1].expect_ty();
@@ -668,7 +637,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
continue;
}
};
- match selcx.select(&obligation.with(trait_pred)) {
+ match selcx.select(&obligation.with(selcx.tcx(), trait_pred)) {
// Uncertain or unimplemented.
Ok(None) => {
if trait_pred.def_id() == unsize_did {
@@ -705,12 +674,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
// Object safety violations or miscellaneous.
Err(err) => {
- self.err_ctxt().report_selection_error(
- obligation.clone(),
- &obligation,
- &err,
- false,
- );
+ self.err_ctxt().report_selection_error(obligation.clone(), &obligation, &err);
// Treat this like an obligation and follow through
// with the unsizing - the lack of a coercion should
// be silent, as it causes a type mismatch later.
@@ -752,7 +716,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
&self,
a: Ty<'tcx>,
b: Ty<'tcx>,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
b_region: ty::Region<'tcx>,
) -> CoerceResult<'tcx> {
if !self.tcx.features().dyn_star {
@@ -761,25 +725,14 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
if let ty::Dynamic(a_data, _, _) = a.kind()
&& let ty::Dynamic(b_data, _, _) = b.kind()
+ && a_data.principal_def_id() == b_data.principal_def_id()
{
- if a_data.principal_def_id() == b_data.principal_def_id() {
- return self.unify_and(a, b, |_| vec![]);
- } else if !self.tcx().features().trait_upcasting {
- let mut err = feature_err(
- &self.tcx.sess.parse_sess,
- sym::trait_upcasting,
- self.cause.span,
- &format!(
- "cannot cast `{a}` to `{b}`, trait upcasting coercion is experimental"
- ),
- );
- err.emit();
- }
+ return self.unify_and(a, b, |_| vec![]);
}
// Check the obligations of the cast -- for example, when casting
// `usize` to `dyn* Clone + 'static`:
- let obligations = predicates
+ let mut obligations: Vec<_> = predicates
.iter()
.map(|predicate| {
// For each existential predicate (e.g., `?Self: Clone`) substitute
@@ -787,18 +740,31 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
// and then require that the resulting predicate (e.g., `usize: Clone`)
// holds (it does).
let predicate = predicate.with_self_ty(self.tcx, a);
- Obligation::new(self.cause.clone(), self.param_env, predicate)
+ Obligation::new(self.tcx, self.cause.clone(), self.param_env, predicate)
})
- // Enforce the region bound (e.g., `usize: 'static`, in our example).
- .chain([Obligation::new(
- self.cause.clone(),
- self.param_env,
- self.tcx.mk_predicate(ty::Binder::dummy(ty::PredicateKind::TypeOutlives(
- ty::OutlivesPredicate(a, b_region),
- ))),
- )])
+ .chain([
+ // Enforce the region bound (e.g., `usize: 'static`, in our example).
+ Obligation::new(
+ self.tcx,
+ self.cause.clone(),
+ self.param_env,
+ ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
+ ty::OutlivesPredicate(a, b_region),
+ ))),
+ ),
+ ])
.collect();
+ // Enforce that the type is `usize`/pointer-sized.
+ obligations.push(Obligation::new(
+ self.tcx,
+ self.cause.clone(),
+ self.param_env,
+ ty::Binder::dummy(
+ self.tcx.at(self.cause.span).mk_trait_ref(hir::LangItem::PointerSized, [a]),
+ ),
+ ));
+
Ok(InferOk {
value: (vec![Adjustment { kind: Adjust::DynStar, target: b }], b),
obligations,
@@ -868,7 +834,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
let b = self.shallow_resolve(b);
let InferOk { value: b, mut obligations } =
- self.normalize_associated_types_in_as_infer_ok(self.cause.span, b);
+ self.at(&self.cause, self.param_env).normalize(b);
debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b);
match b.kind() {
@@ -890,7 +856,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
}
let InferOk { value: a_sig, obligations: o1 } =
- self.normalize_associated_types_in_as_infer_ok(self.cause.span, a_sig);
+ self.at(&self.cause, self.param_env).normalize(a_sig);
obligations.extend(o1);
let a_fn_pointer = self.tcx.mk_fn_ptr(a_sig);
@@ -1043,9 +1009,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let Ok(ok) = coerce.coerce(source, target) else {
return false;
};
- let mut fcx = traits::FulfillmentContext::new_in_snapshot();
- fcx.register_predicate_obligations(self, ok.obligations);
- fcx.select_where_possible(&self).is_empty()
+ let ocx = ObligationCtxt::new_in_snapshot(self);
+ ocx.register_obligations(ok.obligations);
+ ocx.select_where_possible().is_empty()
})
}
@@ -1072,8 +1038,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.infcx
.type_implements_trait(
self.tcx.lang_items().deref_mut_trait()?,
- expr_ty,
- ty::List::empty(),
+ [expr_ty],
self.param_env,
)
.may_apply()
@@ -1116,15 +1081,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Special-case that coercion alone cannot handle:
// Function items or non-capturing closures of differing IDs or InternalSubsts.
let (a_sig, b_sig) = {
- #[allow(rustc::usage_of_ty_tykind)]
- let is_capturing_closure = |ty: &ty::TyKind<'tcx>| {
- if let &ty::Closure(closure_def_id, _substs) = ty {
+ let is_capturing_closure = |ty: Ty<'tcx>| {
+ if let &ty::Closure(closure_def_id, _substs) = ty.kind() {
self.tcx.upvars_mentioned(closure_def_id.expect_local()).is_some()
} else {
false
}
};
- if is_capturing_closure(prev_ty.kind()) || is_capturing_closure(new_ty.kind()) {
+ if is_capturing_closure(prev_ty) || is_capturing_closure(new_ty) {
(None, None)
} else {
match (prev_ty.kind(), new_ty.kind()) {
@@ -1179,8 +1143,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return Err(TypeError::IntrinsicCast);
}
// The signature must match.
- let a_sig = self.normalize_associated_types_in(new.span, a_sig);
- let b_sig = self.normalize_associated_types_in(new.span, b_sig);
+ let (a_sig, b_sig) = self.normalize(new.span, (a_sig, b_sig));
let sig = self
.at(cause, self.param_env)
.trace(prev_ty, new_ty)
@@ -1547,7 +1510,9 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
// Mark that we've failed to coerce the types here to suppress
// any superfluous errors we might encounter while trying to
// emit or provide suggestions on how to fix the initial error.
- fcx.set_tainted_by_errors();
+ fcx.set_tainted_by_errors(
+ fcx.tcx.sess.delay_span_bug(cause.span, "coercion error but no error emitted"),
+ );
let (expected, found) = if label_expression_as_expected {
// In the case where this is a "forced unit", like
// `break`, we want to call the `()` "expected"
@@ -1644,9 +1609,9 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
if visitor.ret_exprs.len() > 0 && let Some(expr) = expression {
self.note_unreachable_loop_return(&mut err, &expr, &visitor.ret_exprs);
}
- err.emit_unless(unsized_return);
+ let reported = err.emit_unless(unsized_return);
- self.final_ty = Some(fcx.tcx.ty_error());
+ self.final_ty = Some(fcx.tcx.ty_error_with_guaranteed(reported));
}
}
}
@@ -1782,7 +1747,8 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
// may occur at the first return expression we see in the closure
// (if it conflicts with the declared return type). Skip adding a
// note in this case, since it would be incorrect.
- && !fcx.return_type_pre_known
+ && let Some(fn_sig) = fcx.body_fn_sig()
+ && fn_sig.output().is_ty_var()
{
err.span_note(
sp,
diff --git a/compiler/rustc_hir_typeck/src/demand.rs b/compiler/rustc_hir_typeck/src/demand.rs
index 16febfc46..24184bdbf 100644
--- a/compiler/rustc_hir_typeck/src/demand.rs
+++ b/compiler/rustc_hir_typeck/src/demand.rs
@@ -2,6 +2,7 @@ use crate::FnCtxt;
use rustc_ast::util::parser::PREC_POSTFIX;
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed};
use rustc_hir as hir;
+use rustc_hir::def::CtorKind;
use rustc_hir::lang_items::LangItem;
use rustc_hir::{is_range_literal, Node};
use rustc_infer::infer::InferOk;
@@ -18,6 +19,7 @@ use rustc_trait_selection::traits::ObligationCause;
use super::method::probe;
+use std::cmp::min;
use std::iter;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@@ -30,6 +32,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expected_ty_expr: Option<&'tcx hir::Expr<'tcx>>,
error: Option<TypeError<'tcx>>,
) {
+ if expr_ty == expected {
+ return;
+ }
+
self.annotate_expected_due_to_let_ty(err, expr, error);
// Use `||` to give these suggestions a precedence
@@ -42,15 +48,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|| self.suggest_boxing_when_appropriate(err, expr, expected, expr_ty)
|| self.suggest_block_to_brackets_peeling_refs(err, expr, expr_ty, expected)
|| self.suggest_copied_or_cloned(err, expr, expr_ty, expected)
- || self.suggest_into(err, expr, expr_ty, expected);
+ || self.suggest_into(err, expr, expr_ty, expected)
+ || self.suggest_option_to_bool(err, expr, expr_ty, expected)
+ || self.suggest_floating_point_literal(err, expr, expected);
self.note_type_is_not_clone(err, expected, expr_ty, expr);
self.note_need_for_fn_pointer(err, expected, expr_ty);
self.note_internal_mutation_in_method(err, expr, expected, expr_ty);
}
- // Requires that the two types unify, and prints an error message if
- // they don't.
+ /// Requires that the two types unify, and prints an error message if
+ /// they don't.
pub fn demand_suptype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
if let Some(mut e) = self.demand_suptype_diag(sp, expected, actual) {
e.emit();
@@ -148,7 +156,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Err(e) => e,
};
- self.set_tainted_by_errors();
+ self.set_tainted_by_errors(self.tcx.sess.delay_span_bug(
+ expr.span,
+ "`TypeError` when attempting coercion but no error emitted",
+ ));
let expr = expr.peel_drop_temps();
let cause = self.misc(expr.span);
let expr_ty = self.resolve_vars_with_obligations(checked_ty);
@@ -395,27 +406,27 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let Some(path) = variant_path.strip_prefix("std::prelude::")
&& let Some((_, path)) = path.split_once("::")
{
- return Some((path.to_string(), variant.ctor_kind, sole_field.name, note_about_variant_field_privacy));
+ return Some((path.to_string(), variant.ctor_kind(), sole_field.name, note_about_variant_field_privacy));
}
- Some((variant_path, variant.ctor_kind, sole_field.name, note_about_variant_field_privacy))
+ Some((variant_path, variant.ctor_kind(), sole_field.name, note_about_variant_field_privacy))
} else {
None
}
})
.collect();
- let suggestions_for = |variant: &_, ctor, field_name| {
+ let suggestions_for = |variant: &_, ctor_kind, field_name| {
let prefix = match self.maybe_get_struct_pattern_shorthand_field(expr) {
Some(ident) => format!("{ident}: "),
None => String::new(),
};
- let (open, close) = match ctor {
- hir::def::CtorKind::Fn => ("(".to_owned(), ")"),
- hir::def::CtorKind::Fictive => (format!(" {{ {field_name}: "), " }"),
+ let (open, close) = match ctor_kind {
+ Some(CtorKind::Fn) => ("(".to_owned(), ")"),
+ None => (format!(" {{ {field_name}: "), " }"),
// unit variants don't have fields
- hir::def::CtorKind::Const => unreachable!(),
+ Some(CtorKind::Const) => unreachable!(),
};
// Suggest constructor as deep into the block tree as possible.
@@ -845,31 +856,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
..
})) = self.tcx.hir().find(self.tcx.hir().get_parent_node(expr.hir_id))
{
- if mutability == hir::Mutability::Mut {
+ if mutability.is_mut() {
// Suppressing this diagnostic, we'll properly print it in `check_expr_assign`
return None;
}
}
let sugg_expr = if needs_parens { format!("({src})") } else { src };
- return Some(match mutability {
- hir::Mutability::Mut => (
- sp,
- "consider mutably borrowing here".to_string(),
- format!("{prefix}&mut {sugg_expr}"),
- Applicability::MachineApplicable,
- false,
- false,
- ),
- hir::Mutability::Not => (
- sp,
- "consider borrowing here".to_string(),
- format!("{prefix}&{sugg_expr}"),
- Applicability::MachineApplicable,
- false,
- false,
- ),
- });
+ return Some((
+ sp,
+ format!("consider {}borrowing here", mutability.mutably_str()),
+ format!("{prefix}{}{sugg_expr}", mutability.ref_prefix_str()),
+ Applicability::MachineApplicable,
+ false,
+ false,
+ ));
}
}
}
@@ -927,51 +928,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&& let Ok(src) = sm.span_to_snippet(sp)
{
let derefs = "*".repeat(steps);
- if let Some((span, src, applicability)) = match mutbl_b {
- hir::Mutability::Mut => {
- let new_prefix = "&mut ".to_owned() + &derefs;
- match mutbl_a {
- hir::Mutability::Mut => {
- replace_prefix(&src, "&mut ", &new_prefix).map(|_| {
- let pos = sp.lo() + BytePos(5);
- let sp = sp.with_lo(pos).with_hi(pos);
- (sp, derefs, Applicability::MachineApplicable)
- })
- }
- hir::Mutability::Not => {
- replace_prefix(&src, "&", &new_prefix).map(|_| {
- let pos = sp.lo() + BytePos(1);
- let sp = sp.with_lo(pos).with_hi(pos);
- (
- sp,
- format!("mut {derefs}"),
- Applicability::Unspecified,
- )
- })
- }
- }
- }
- hir::Mutability::Not => {
- let new_prefix = "&".to_owned() + &derefs;
- match mutbl_a {
- hir::Mutability::Mut => {
- replace_prefix(&src, "&mut ", &new_prefix).map(|_| {
- let lo = sp.lo() + BytePos(1);
- let hi = sp.lo() + BytePos(5);
- let sp = sp.with_lo(lo).with_hi(hi);
- (sp, derefs, Applicability::MachineApplicable)
- })
- }
- hir::Mutability::Not => {
- replace_prefix(&src, "&", &new_prefix).map(|_| {
- let pos = sp.lo() + BytePos(1);
- let sp = sp.with_lo(pos).with_hi(pos);
- (sp, derefs, Applicability::MachineApplicable)
- })
- }
- }
- }
- } {
+ let old_prefix = mutbl_a.ref_prefix_str();
+ let new_prefix = mutbl_b.ref_prefix_str().to_owned() + &derefs;
+
+ let suggestion = replace_prefix(&src, old_prefix, &new_prefix).map(|_| {
+ // skip `&` or `&mut ` if both mutabilities are mutable
+ let lo = sp.lo() + BytePos(min(old_prefix.len(), mutbl_b.ref_prefix_str().len()) as _);
+ // skip `&` or `&mut `
+ let hi = sp.lo() + BytePos(old_prefix.len() as _);
+ let sp = sp.with_lo(lo).with_hi(hi);
+
+ (
+ sp,
+ format!("{}{derefs}", if mutbl_a != mutbl_b { mutbl_b.prefix_str() } else { "" }),
+ if mutbl_b <= mutbl_a { Applicability::MachineApplicable } else { Applicability::MaybeIncorrect }
+ )
+ });
+
+ if let Some((span, src, applicability)) = suggestion {
return Some((
span,
"consider dereferencing".to_string(),
@@ -995,10 +969,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If the expression has `&`, removing it would fix the error
prefix_span = prefix_span.with_hi(inner.span.lo());
expr = inner;
- remove += match mutbl {
- hir::Mutability::Not => "&",
- hir::Mutability::Mut => "&mut ",
- };
+ remove.push_str(mutbl.ref_prefix_str());
steps -= 1;
} else {
break;
diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs
index 175037f9b..507272fde 100644
--- a/compiler/rustc_hir_typeck/src/errors.rs
+++ b/compiler/rustc_hir_typeck/src/errors.rs
@@ -1,10 +1,11 @@
-//! Errors emitted by `rustc_hir_analysis`.
+//! Errors emitted by `rustc_hir_typeck`.
+use rustc_errors::{AddToDiagnostic, Applicability, Diagnostic, MultiSpan, SubdiagnosticMessage};
use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_middle::ty::Ty;
use rustc_span::{symbol::Ident, Span};
#[derive(Diagnostic)]
-#[diag(hir_analysis_field_multiply_specified_in_initializer, code = "E0062")]
+#[diag(hir_typeck_field_multiply_specified_in_initializer, code = "E0062")]
pub struct FieldMultiplySpecifiedInInitializer {
#[primary_span]
#[label]
@@ -15,7 +16,7 @@ pub struct FieldMultiplySpecifiedInInitializer {
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_return_stmt_outside_of_fn_body, code = "E0572")]
+#[diag(hir_typeck_return_stmt_outside_of_fn_body, code = "E0572")]
pub struct ReturnStmtOutsideOfFnBody {
#[primary_span]
pub span: Span,
@@ -26,14 +27,14 @@ pub struct ReturnStmtOutsideOfFnBody {
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_yield_expr_outside_of_generator, code = "E0627")]
+#[diag(hir_typeck_yield_expr_outside_of_generator, code = "E0627")]
pub struct YieldExprOutsideOfGenerator {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_struct_expr_non_exhaustive, code = "E0639")]
+#[diag(hir_typeck_struct_expr_non_exhaustive, code = "E0639")]
pub struct StructExprNonExhaustive {
#[primary_span]
pub span: Span,
@@ -41,21 +42,21 @@ pub struct StructExprNonExhaustive {
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_method_call_on_unknown_type, code = "E0699")]
+#[diag(hir_typeck_method_call_on_unknown_type, code = "E0699")]
pub struct MethodCallOnUnknownType {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_functional_record_update_on_non_struct, code = "E0436")]
+#[diag(hir_typeck_functional_record_update_on_non_struct, code = "E0436")]
pub struct FunctionalRecordUpdateOnNonStruct {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_address_of_temporary_taken, code = "E0745")]
+#[diag(hir_typeck_address_of_temporary_taken, code = "E0745")]
pub struct AddressOfTemporaryTaken {
#[primary_span]
#[label]
@@ -65,7 +66,7 @@ pub struct AddressOfTemporaryTaken {
#[derive(Subdiagnostic)]
pub enum AddReturnTypeSuggestion {
#[suggestion(
- hir_analysis_add_return_type_add,
+ hir_typeck_add_return_type_add,
code = "-> {found} ",
applicability = "machine-applicable"
)]
@@ -75,7 +76,7 @@ pub enum AddReturnTypeSuggestion {
found: String,
},
#[suggestion(
- hir_analysis_add_return_type_missing_here,
+ hir_typeck_add_return_type_missing_here,
code = "-> _ ",
applicability = "has-placeholders"
)]
@@ -87,12 +88,12 @@ pub enum AddReturnTypeSuggestion {
#[derive(Subdiagnostic)]
pub enum ExpectedReturnTypeLabel<'tcx> {
- #[label(hir_analysis_expected_default_return_type)]
+ #[label(hir_typeck_expected_default_return_type)]
Unit {
#[primary_span]
span: Span,
},
- #[label(hir_analysis_expected_return_type)]
+ #[label(hir_typeck_expected_return_type)]
Other {
#[primary_span]
span: Span,
@@ -101,10 +102,10 @@ pub enum ExpectedReturnTypeLabel<'tcx> {
}
#[derive(Diagnostic)]
-#[diag(hir_analysis_missing_parentheses_in_range, code = "E0689")]
+#[diag(hir_typeck_missing_parentheses_in_range, code = "E0689")]
pub struct MissingParentheseInRange {
#[primary_span]
- #[label(hir_analysis_missing_parentheses_in_range)]
+ #[label(hir_typeck_missing_parentheses_in_range)]
pub span: Span,
pub ty_str: String,
pub method_name: String,
@@ -113,8 +114,9 @@ pub struct MissingParentheseInRange {
}
#[derive(Subdiagnostic)]
-#[multipart_suggestion_verbose(
- hir_analysis_add_missing_parentheses_in_range,
+#[multipart_suggestion(
+ hir_typeck_add_missing_parentheses_in_range,
+ style = "verbose",
applicability = "maybe-incorrect"
)]
pub struct AddMissingParenthesesInRange {
@@ -124,3 +126,49 @@ pub struct AddMissingParenthesesInRange {
#[suggestion_part(code = ")")]
pub right: Span,
}
+
+#[derive(Diagnostic)]
+#[diag(hir_typeck_op_trait_generic_params)]
+pub struct OpMethodGenericParams {
+ #[primary_span]
+ pub span: Span,
+ pub method_name: String,
+}
+
+pub struct TypeMismatchFruTypo {
+ /// Span of the LHS of the range
+ pub expr_span: Span,
+ /// Span of the `..RHS` part of the range
+ pub fru_span: Span,
+ /// Rendered expression of the RHS of the range
+ pub expr: Option<String>,
+}
+
+impl AddToDiagnostic for TypeMismatchFruTypo {
+ fn add_to_diagnostic_with<F>(self, diag: &mut Diagnostic, _: F)
+ where
+ F: Fn(&mut Diagnostic, SubdiagnosticMessage) -> SubdiagnosticMessage,
+ {
+ diag.set_arg("expr", self.expr.as_deref().unwrap_or("NONE"));
+
+ // Only explain that `a ..b` is a range if it's split up
+ if self.expr_span.between(self.fru_span).is_empty() {
+ diag.span_note(
+ self.expr_span.to(self.fru_span),
+ rustc_errors::fluent::hir_typeck_fru_note,
+ );
+ } else {
+ let mut multispan: MultiSpan = vec![self.expr_span, self.fru_span].into();
+ multispan.push_span_label(self.expr_span, rustc_errors::fluent::hir_typeck_fru_expr);
+ multispan.push_span_label(self.fru_span, rustc_errors::fluent::hir_typeck_fru_expr2);
+ diag.span_note(multispan, rustc_errors::fluent::hir_typeck_fru_note);
+ }
+
+ diag.span_suggestion(
+ self.expr_span.shrink_to_hi(),
+ rustc_errors::fluent::hir_typeck_fru_suggestion,
+ ", ",
+ Applicability::MaybeIncorrect,
+ );
+ }
+}
diff --git a/compiler/rustc_hir_typeck/src/expectation.rs b/compiler/rustc_hir_typeck/src/expectation.rs
index e9e810344..4f086cf59 100644
--- a/compiler/rustc_hir_typeck/src/expectation.rs
+++ b/compiler/rustc_hir_typeck/src/expectation.rs
@@ -79,9 +79,9 @@ impl<'a, 'tcx> Expectation<'tcx> {
}
}
- // Resolves `expected` by a single level if it is a variable. If
- // there is no expected type or resolution is not possible (e.g.,
- // no constraints yet present), just returns `self`.
+ /// Resolves `expected` by a single level if it is a variable. If
+ /// there is no expected type or resolution is not possible (e.g.,
+ /// no constraints yet present), just returns `self`.
fn resolve(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
match self {
NoExpectation => NoExpectation,
diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs
index 9fde62a81..ed87b94a0 100644
--- a/compiler/rustc_hir_typeck/src/expr.rs
+++ b/compiler/rustc_hir_typeck/src/expr.rs
@@ -5,6 +5,7 @@
use crate::cast;
use crate::coercion::CoerceMany;
use crate::coercion::DynamicCoerceMany;
+use crate::errors::TypeMismatchFruTypo;
use crate::errors::{AddressOfTemporaryTaken, ReturnStmtOutsideOfFnBody, StructExprNonExhaustive};
use crate::errors::{
FieldMultiplySpecifiedInInitializer, FunctionalRecordUpdateOnNonStruct,
@@ -30,7 +31,7 @@ use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::Visitor;
use rustc_hir::lang_items::LangItem;
-use rustc_hir::{Closure, ExprKind, HirId, QPath};
+use rustc_hir::{ExprKind, HirId, QPath};
use rustc_hir_analysis::astconv::AstConv as _;
use rustc_hir_analysis::check::ty_kind_suggestion;
use rustc_infer::infer;
@@ -80,14 +81,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// coercions from ! to `expected`.
if ty.is_never() {
if let Some(adjustments) = self.typeck_results.borrow().adjustments().get(expr.hir_id) {
- self.tcx().sess.delay_span_bug(
+ let reported = self.tcx().sess.delay_span_bug(
expr.span,
"expression with never type wound up being adjusted",
);
return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &adjustments[..] {
target.to_owned()
} else {
- self.tcx().ty_error()
+ self.tcx().ty_error_with_guaranteed(reported)
};
}
@@ -103,8 +104,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
if let Some(mut err) = self.demand_suptype_diag(expr.span, expected_ty, ty) {
- let expr = expr.peel_drop_temps();
- self.suggest_deref_ref_or_into(&mut err, expr, expected_ty, ty, None);
+ // FIXME(compiler-errors): We probably should fold some of the
+ // `suggest_` functions from `emit_coerce_suggestions` into here,
+ // since some of those aren't necessarily just coerce suggestions.
+ let _ = self.suggest_deref_ref_or_into(
+ &mut err,
+ expr.peel_drop_temps(),
+ expected_ty,
+ ty,
+ None,
+ ) || self.suggest_option_to_bool(&mut err, expr, ty, expected_ty);
extend_err(&mut err);
err.emit();
}
@@ -220,7 +229,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Hide the outer diverging and has_errors flags.
let old_diverges = self.diverges.replace(Diverges::Maybe);
- let old_has_errors = self.has_errors.replace(false);
let ty = ensure_sufficient_stack(|| match &expr.kind {
hir::ExprKind::Path(
@@ -259,7 +267,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Combine the diverging and has_error flags.
self.diverges.set(self.diverges.get() | old_diverges);
- self.has_errors.set(self.has_errors.get() | old_has_errors);
debug!("type of {} is...", self.tcx.hir().node_to_string(expr.hir_id));
debug!("... {:?}, expected is {:?}", ty, expected);
@@ -318,9 +325,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ExprKind::Match(discrim, arms, match_src) => {
self.check_match(expr, &discrim, arms, expected, match_src)
}
- ExprKind::Closure(&Closure { capture_clause, fn_decl, body, movability, .. }) => {
- self.check_expr_closure(expr, capture_clause, &fn_decl, body, movability, expected)
- }
+ ExprKind::Closure(closure) => self.check_expr_closure(closure, expr.span, expected),
ExprKind::Block(body, _) => self.check_block_with_expected(&body, expected),
ExprKind::Call(callee, args) => self.check_call(expr, &callee, args, expected),
ExprKind::MethodCall(segment, receiver, args, _) => {
@@ -398,8 +403,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
}
- err.emit();
- oprnd_t = tcx.ty_error();
+ oprnd_t = tcx.ty_error_with_guaranteed(err.emit());
}
}
hir::UnOp::Not => {
@@ -524,12 +528,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.resolve_ty_and_res_fully_qualified_call(qpath, expr.hir_id, expr.span);
let ty = match res {
Res::Err => {
- self.set_tainted_by_errors();
- tcx.ty_error()
+ self.suggest_assoc_method_call(segs);
+ let e =
+ self.tcx.sess.delay_span_bug(qpath.span(), "`Res::Err` but no error emitted");
+ self.set_tainted_by_errors(e);
+ tcx.ty_error_with_guaranteed(e)
}
- Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _) => {
- report_unexpected_variant_res(tcx, res, qpath, expr.span);
- tcx.ty_error()
+ Res::Def(DefKind::Variant, _) => {
+ let e = report_unexpected_variant_res(tcx, res, qpath, expr.span, "E0533", "value");
+ tcx.ty_error_with_guaranteed(e)
}
_ => self.instantiate_value_path(segs, opt_ty, res, expr.span, expr.hir_id).0,
};
@@ -840,10 +847,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return_expr_ty,
);
- if self.return_type_has_opaque {
+ if let Some(fn_sig) = self.body_fn_sig()
+ && fn_sig.output().has_opaque_types()
+ {
// Point any obligations that were registered due to opaque type
// inference at the return expression.
- self.select_obligations_where_possible(false, |errors| {
+ self.select_obligations_where_possible(|errors| {
self.point_at_return_for_opaque_ty_error(errors, span, return_expr_ty);
});
}
@@ -906,8 +915,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
// Check if an expression `original_expr_id` comes from the condition of a while loop,
- // as opposed from the body of a while loop, which we can naively check by iterating
- // parents until we find a loop...
+ /// as opposed from the body of a while loop, which we can naively check by iterating
+ /// parents until we find a loop...
pub(super) fn comes_from_while_condition(
&self,
original_expr_id: HirId,
@@ -1097,12 +1106,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If the assignment expression itself is ill-formed, don't
// bother emitting another error
- if lhs_ty.references_error() || rhs_ty.references_error() {
- err.delay_as_bug()
- } else {
- err.emit();
- }
- return self.tcx.ty_error();
+ let reported = err.emit_unless(lhs_ty.references_error() || rhs_ty.references_error());
+ return self.tcx.ty_error_with_guaranteed(reported);
}
let lhs_ty = self.check_expr_with_needs(&lhs, Needs::MutPlace);
@@ -1114,9 +1119,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let lhs_deref_ty_is_sized = self
.infcx
.type_implements_trait(
- self.tcx.lang_items().sized_trait().unwrap(),
- lhs_deref_ty,
- ty::List::empty(),
+ self.tcx.require_lang_item(LangItem::Sized, None),
+ [lhs_deref_ty],
self.param_env,
)
.may_apply();
@@ -1613,10 +1617,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.demand_coerce_diag(&field.expr, ty, field_type, None, AllowTwoPhase::No);
if let Some(mut diag) = diag {
- if idx == ast_fields.len() - 1 && remaining_fields.is_empty() {
- self.suggest_fru_from_range(field, variant, substs, &mut diag);
+ if idx == ast_fields.len() - 1 {
+ if remaining_fields.is_empty() {
+ self.suggest_fru_from_range(field, variant, substs, &mut diag);
+ diag.emit();
+ } else {
+ diag.stash(field.span, StashKey::MaybeFruTypo);
+ }
+ } else {
+ diag.emit();
}
- diag.emit();
}
}
@@ -1637,6 +1647,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// the fields with the base_expr. This could cause us to hit errors later
// when certain fields are assumed to exist that in fact do not.
if error_happened {
+ if let Some(base_expr) = base_expr {
+ self.check_expr(base_expr);
+ }
return;
}
@@ -1655,7 +1668,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.fields
.iter()
.map(|f| {
- let fru_ty = self.normalize_associated_types_in(
+ let fru_ty = self.normalize(
expr_span,
self.field_ty(base_expr.span, f, fresh_substs),
);
@@ -1739,9 +1752,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty::Adt(adt, substs) if adt.is_struct() => variant
.fields
.iter()
- .map(|f| {
- self.normalize_associated_types_in(expr_span, f.ty(self.tcx, substs))
- })
+ .map(|f| self.normalize(expr_span, f.ty(self.tcx, substs)))
.collect(),
_ => {
self.tcx
@@ -1874,19 +1885,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.map(|adt| adt.did())
!= range_def_id
{
- let instead = self
+ // Suppress any range expr type mismatches
+ if let Some(mut diag) = self
+ .tcx
+ .sess
+ .diagnostic()
+ .steal_diagnostic(last_expr_field.span, StashKey::MaybeFruTypo)
+ {
+ diag.delay_as_bug();
+ }
+
+ // Use a (somewhat arbitrary) filtering heuristic to avoid printing
+ // expressions that are either too long, or have control character
+ //such as newlines in them.
+ let expr = self
.tcx
.sess
.source_map()
.span_to_snippet(range_end.expr.span)
- .map(|s| format!(" from `{s}`"))
- .unwrap_or_default();
- err.span_suggestion(
- range_start.span.shrink_to_hi(),
- &format!("to set the remaining fields{instead}, separate the last named field with a comma"),
- ",",
- Applicability::MaybeIncorrect,
- );
+ .ok()
+ .filter(|s| s.len() < 25 && !s.contains(|c: char| c.is_control()));
+
+ let fru_span = self
+ .tcx
+ .sess
+ .source_map()
+ .span_extend_while(range_start.span, |c| c.is_whitespace())
+ .unwrap_or(range_start.span).shrink_to_hi().to(range_end.span);
+
+ err.subdiagnostic(TypeMismatchFruTypo {
+ expr_span: range_start.span,
+ fru_span,
+ expr,
+ });
}
}
@@ -1961,7 +1992,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expr_span: Span,
) {
if variant.is_recovered() {
- self.set_tainted_by_errors();
+ self.set_tainted_by_errors(
+ self.tcx
+ .sess
+ .delay_span_bug(expr_span, "parser recovered but no error was emitted"),
+ );
return;
}
let mut err = self.err_ctxt().type_error_struct_with_diag(
@@ -1991,8 +2026,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
let variant_ident_span = self.tcx.def_ident_span(variant.def_id).unwrap();
- match variant.ctor_kind {
- CtorKind::Fn => match ty.kind() {
+ match variant.ctor_kind() {
+ Some(CtorKind::Fn) => match ty.kind() {
ty::Adt(adt, ..) if adt.is_enum() => {
err.span_label(
variant_ident_span,
@@ -2300,12 +2335,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
base: &'tcx hir::Expr<'tcx>,
ty: Ty<'tcx>,
) {
- let output_ty = match self.get_impl_future_output_ty(ty) {
- Some(output_ty) => self.resolve_vars_if_possible(output_ty),
- _ => return,
- };
+ let Some(output_ty) = self.get_impl_future_output_ty(ty) else { return; };
let mut add_label = true;
- if let ty::Adt(def, _) = output_ty.skip_binder().kind() {
+ if let ty::Adt(def, _) = output_ty.kind() {
// no field access on enum type
if !def.is_enum() {
if def
@@ -2738,7 +2770,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Some((index_ty, element_ty)) => {
// two-phase not needed because index_ty is never mutable
self.demand_coerce(idx, idx_t, index_ty, None, AllowTwoPhase::No);
- self.select_obligations_where_possible(false, |errors| {
+ self.select_obligations_where_possible(|errors| {
self.point_at_index_if_possible(errors, idx.span)
});
element_ty
@@ -2777,8 +2809,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
}
}
- err.emit();
- self.tcx.ty_error()
+ let reported = err.emit();
+ self.tcx.ty_error_with_guaranteed(reported)
}
}
}
@@ -2791,7 +2823,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
for error in errors {
match error.obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(predicate)
+ ty::PredicateKind::Clause(ty::Clause::Trait(predicate))
if self.tcx.is_diagnostic_item(sym::SliceIndex, predicate.trait_ref.def_id) => {
}
_ => continue,
diff --git a/compiler/rustc_hir_typeck/src/expr_use_visitor.rs b/compiler/rustc_hir_typeck/src/expr_use_visitor.rs
index fce2a5888..03b174c77 100644
--- a/compiler/rustc_hir_typeck/src/expr_use_visitor.rs
+++ b/compiler/rustc_hir_typeck/src/expr_use_visitor.rs
@@ -252,11 +252,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
hir::ExprKind::Match(ref discr, arms, _) => {
let discr_place = return_if_err!(self.mc.cat_expr(discr));
- self.maybe_read_scrutinee(
+ return_if_err!(self.maybe_read_scrutinee(
discr,
discr_place.clone(),
arms.iter().map(|arm| arm.pat),
- );
+ ));
// treatment of the discriminant is handled while walking the arms.
for arm in arms {
@@ -352,8 +352,8 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
self.consume_expr(base);
}
- hir::ExprKind::Closure { .. } => {
- self.walk_captures(expr);
+ hir::ExprKind::Closure(closure) => {
+ self.walk_captures(closure);
}
hir::ExprKind::Box(ref base) => {
@@ -390,7 +390,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
discr: &Expr<'_>,
discr_place: PlaceWithHirId<'tcx>,
pats: impl Iterator<Item = &'t hir::Pat<'t>>,
- ) {
+ ) -> Result<(), ()> {
// Matching should not always be considered a use of the place, hence
// discr does not necessarily need to be borrowed.
// We only want to borrow discr if the pattern contain something other
@@ -398,7 +398,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
let ExprUseVisitor { ref mc, body_owner: _, delegate: _ } = *self;
let mut needs_to_be_read = false;
for pat in pats {
- return_if_err!(mc.cat_pattern(discr_place.clone(), pat, |place, pat| {
+ mc.cat_pattern(discr_place.clone(), pat, |place, pat| {
match &pat.kind {
PatKind::Binding(.., opt_sub_pat) => {
// If the opt_sub_pat is None, than the binding does not count as
@@ -453,7 +453,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
// examined
}
}
- }));
+ })?
}
if needs_to_be_read {
@@ -474,6 +474,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
// that the discriminant has been initialized.
self.walk_expr(discr);
}
+ Ok(())
}
fn walk_local<F>(
@@ -490,7 +491,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
f(self);
if let Some(els) = els {
// borrowing because we need to test the discriminant
- self.maybe_read_scrutinee(expr, expr_place.clone(), from_ref(pat).iter());
+ return_if_err!(self.maybe_read_scrutinee(
+ expr,
+ expr_place.clone(),
+ from_ref(pat).iter()
+ ));
self.walk_block(els)
}
self.walk_irrefutable_pat(&expr_place, &pat);
@@ -518,6 +523,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
// Consume the expressions supplying values for each field.
for field in fields {
self.consume_expr(field.expr);
+
+ // The struct path probably didn't resolve
+ if self.mc.typeck_results.opt_field_index(field.hir_id).is_none() {
+ self.tcx().sess.delay_span_bug(field.span, "couldn't resolve index for field");
+ }
}
let with_expr = match *opt_with {
@@ -535,9 +545,9 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
ty::Adt(adt, substs) if adt.is_struct() => {
// Consume those fields of the with expression that are needed.
for (f_index, with_field) in adt.non_enum_variant().fields.iter().enumerate() {
- let is_mentioned = fields.iter().any(|f| {
- self.tcx().field_index(f.hir_id, self.mc.typeck_results) == f_index
- });
+ let is_mentioned = fields
+ .iter()
+ .any(|f| self.mc.typeck_results.opt_field_index(f.hir_id) == Some(f_index));
if !is_mentioned {
let field_place = self.mc.cat_projection(
&*with_expr,
@@ -745,7 +755,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
///
/// - When reporting the Place back to the Delegate, ensure that the UpvarId uses the enclosing
/// closure as the DefId.
- fn walk_captures(&mut self, closure_expr: &hir::Expr<'_>) {
+ fn walk_captures(&mut self, closure_expr: &hir::Closure<'_>) {
fn upvar_is_local_variable<'tcx>(
upvars: Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>>,
upvar_id: hir::HirId,
@@ -757,7 +767,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
debug!("walk_captures({:?})", closure_expr);
let tcx = self.tcx();
- let closure_def_id = tcx.hir().local_def_id(closure_expr.hir_id);
+ let closure_def_id = closure_expr.def_id;
let upvars = tcx.upvars_mentioned(self.body_owner);
// For purposes of this function, generator and closures are equivalent.
@@ -829,10 +839,11 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
// be a local variable
PlaceBase::Local(*var_hir_id)
};
+ let closure_hir_id = tcx.hir().local_def_id_to_hir_id(closure_def_id);
let place_with_id = PlaceWithHirId::new(
- capture_info.path_expr_id.unwrap_or(
- capture_info.capture_kind_expr_id.unwrap_or(closure_expr.hir_id),
- ),
+ capture_info
+ .path_expr_id
+ .unwrap_or(capture_info.capture_kind_expr_id.unwrap_or(closure_hir_id)),
place.base_ty,
place_base,
place.projections.clone(),
diff --git a/compiler/rustc_hir_typeck/src/fallback.rs b/compiler/rustc_hir_typeck/src/fallback.rs
index 747ecb036..ac6b0924a 100644
--- a/compiler/rustc_hir_typeck/src/fallback.rs
+++ b/compiler/rustc_hir_typeck/src/fallback.rs
@@ -7,16 +7,16 @@ use rustc_data_structures::{
use rustc_middle::ty::{self, Ty};
impl<'tcx> FnCtxt<'_, 'tcx> {
- /// Performs type inference fallback, returning true if any fallback
- /// occurs.
- pub(super) fn type_inference_fallback(&self) -> bool {
+ /// Performs type inference fallback, setting `FnCtxt::fallback_has_occurred`
+ /// if fallback has occurred.
+ pub(super) fn type_inference_fallback(&self) {
debug!(
"type-inference-fallback start obligations: {:#?}",
self.fulfillment_cx.borrow_mut().pending_obligations()
);
// All type checking constraints were added, try to fallback unsolved variables.
- self.select_obligations_where_possible(false, |_| {});
+ self.select_obligations_where_possible(|_| {});
debug!(
"type-inference-fallback post selection obligations: {:#?}",
@@ -26,18 +26,17 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
// Check if we have any unsolved variables. If not, no need for fallback.
let unsolved_variables = self.unsolved_variables();
if unsolved_variables.is_empty() {
- return false;
+ return;
}
let diverging_fallback = self.calculate_diverging_fallback(&unsolved_variables);
- let mut fallback_has_occurred = false;
// We do fallback in two passes, to try to generate
// better error messages.
// The first time, we do *not* replace opaque types.
for ty in unsolved_variables {
debug!("unsolved_variable = {:?}", ty);
- fallback_has_occurred |= self.fallback_if_possible(ty, &diverging_fallback);
+ self.fallback_if_possible(ty, &diverging_fallback);
}
// We now see if we can make progress. This might cause us to
@@ -63,9 +62,7 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
// If we had tried to fallback the opaque inference variable to `MyType`,
// we will generate a confusing type-check error that does not explicitly
// refer to opaque types.
- self.select_obligations_where_possible(fallback_has_occurred, |_| {});
-
- fallback_has_occurred
+ self.select_obligations_where_possible(|_| {});
}
// Tries to apply a fallback to `ty` if it is an unsolved variable.
@@ -81,12 +78,13 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
// Fallback becomes very dubious if we have encountered
// type-checking errors. In that case, fallback to Error.
//
- // The return value indicates whether fallback has occurred.
+ // Sets `FnCtxt::fallback_has_occurred` if fallback is performed
+ // during this call.
fn fallback_if_possible(
&self,
ty: Ty<'tcx>,
diverging_fallback: &FxHashMap<Ty<'tcx>, Ty<'tcx>>,
- ) -> bool {
+ ) {
// Careful: we do NOT shallow-resolve `ty`. We know that `ty`
// is an unsolved variable, and we determine its fallback
// based solely on how it was created, not what other type
@@ -106,12 +104,12 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
// type, `?T` is not considered unsolved, but `?I` is. The
// same is true for float variables.)
let fallback = match ty.kind() {
- _ if self.is_tainted_by_errors() => self.tcx.ty_error(),
+ _ if let Some(e) = self.tainted_by_errors() => self.tcx.ty_error_with_guaranteed(e),
ty::Infer(ty::IntVar(_)) => self.tcx.types.i32,
ty::Infer(ty::FloatVar(_)) => self.tcx.types.f64,
_ => match diverging_fallback.get(&ty) {
Some(&fallback_ty) => fallback_ty,
- None => return false,
+ None => return,
},
};
debug!("fallback_if_possible(ty={:?}): defaulting to `{:?}`", ty, fallback);
@@ -122,7 +120,7 @@ impl<'tcx> FnCtxt<'_, 'tcx> {
.map(|origin| origin.span)
.unwrap_or(rustc_span::DUMMY_SP);
self.demand_eqtype(span, ty, fallback);
- true
+ self.fallback_has_occurred.set(true);
}
/// The "diverging fallback" system is rather complicated. This is
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
index 6a1cffe3e..952d27262 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
@@ -16,13 +16,13 @@ use rustc_hir_analysis::astconv::{
};
use rustc_infer::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
-use rustc_infer::infer::{InferOk, InferResult};
+use rustc_infer::infer::InferResult;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
+use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{
- self, AdtKind, CanonicalUserType, DefIdTree, EarlyBinder, GenericParamDefKind, ToPolyTraitRef,
- ToPredicate, Ty, UserType,
+ self, AdtKind, CanonicalUserType, DefIdTree, EarlyBinder, GenericParamDefKind, Ty, UserType,
};
use rustc_middle::ty::{GenericArgKind, InternalSubsts, SubstsRef, UserSelfTy, UserSubsts};
use rustc_session::lint;
@@ -30,11 +30,8 @@ use rustc_span::def_id::LocalDefId;
use rustc_span::hygiene::DesugaringKind;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, DUMMY_SP};
-use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
-use rustc_trait_selection::traits::{
- self, ObligationCause, ObligationCauseCode, TraitEngine, TraitEngineExt,
-};
+use rustc_trait_selection::traits::{self, NormalizeExt, ObligationCauseCode, ObligationCtxt};
use std::collections::hash_map::Entry;
use std::slice;
@@ -106,7 +103,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// possible. This can help substantially when there are
// indirect dependencies that don't seem worth tracking
// precisely.
- self.select_obligations_where_possible(false, mutate_fulfillment_errors);
+ self.select_obligations_where_possible(mutate_fulfillment_errors);
self.resolve_vars_if_possible(ty)
}
@@ -142,9 +139,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!("write_ty({:?}, {:?}) in fcx {}", id, self.resolve_vars_if_possible(ty), self.tag());
self.typeck_results.borrow_mut().node_types_mut().insert(id, ty);
- if ty.references_error() {
- self.has_errors.set(true);
- self.set_tainted_by_errors();
+ if let Err(e) = ty.error_reported() {
+ self.set_tainted_by_errors(e);
}
}
@@ -346,7 +342,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
debug!("instantiate_type_scheme(value={:?}, substs={:?})", value, substs);
let value = EarlyBinder(value).subst(self.tcx, substs);
- let result = self.normalize_associated_types_in(span, value);
+ let result = self.normalize(span, value);
debug!("instantiate_type_scheme = {:?}", result);
result
}
@@ -362,7 +358,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let bounds = self.tcx.predicates_of(def_id);
let spans: Vec<Span> = bounds.predicates.iter().map(|(_, span)| *span).collect();
let result = bounds.instantiate(self.tcx, substs);
- let result = self.normalize_associated_types_in(span, result);
+ let result = self.normalize(span, result);
debug!(
"instantiate_bounds(bounds={:?}, substs={:?}) = {:?}, {:?}",
bounds, substs, result, spans,
@@ -370,50 +366,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(result, spans)
}
- pub(in super::super) fn normalize_associated_types_in<T>(&self, span: Span, value: T) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- self.inh.normalize_associated_types_in(span, self.body_id, self.param_env, value)
- }
-
- pub(in super::super) fn normalize_associated_types_in_as_infer_ok<T>(
- &self,
- span: Span,
- value: T,
- ) -> InferOk<'tcx, T>
+ pub(in super::super) fn normalize<T>(&self, span: Span, value: T) -> T
where
T: TypeFoldable<'tcx>,
{
- self.inh.partially_normalize_associated_types_in(
- ObligationCause::misc(span, self.body_id),
- self.param_env,
- value,
- )
- }
-
- pub(in super::super) fn normalize_op_associated_types_in_as_infer_ok<T>(
- &self,
- span: Span,
- value: T,
- opt_input_expr: Option<&hir::Expr<'_>>,
- ) -> InferOk<'tcx, T>
- where
- T: TypeFoldable<'tcx>,
- {
- self.inh.partially_normalize_associated_types_in(
- ObligationCause::new(
- span,
- self.body_id,
- traits::BinOp {
- rhs_span: opt_input_expr.map(|expr| expr.span),
- is_lit: opt_input_expr
- .map_or(false, |expr| matches!(expr.kind, ExprKind::Lit(_))),
- output_ty: None,
- },
- ),
- self.param_env,
- value,
+ self.register_infer_ok_obligations(
+ self.at(&self.misc(span), self.param_env).normalize(value),
)
}
@@ -490,11 +448,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match length {
&hir::ArrayLen::Infer(_, span) => self.ct_infer(self.tcx.types.usize, None, span),
hir::ArrayLen::Body(anon_const) => {
- let const_def_id = self.tcx.hir().local_def_id(anon_const.hir_id);
- let span = self.tcx.hir().span(anon_const.hir_id);
- let c = ty::Const::from_anon_const(self.tcx, const_def_id);
+ let span = self.tcx.def_span(anon_const.def_id);
+ let c = ty::Const::from_anon_const(self.tcx, anon_const.def_id);
self.register_wf_obligation(c.into(), span, ObligationCauseCode::WellFormed(None));
- self.normalize_associated_types_in(span, c)
+ self.normalize(span, c)
}
}
}
@@ -504,10 +461,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ast_c: &hir::AnonConst,
param_def_id: DefId,
) -> ty::Const<'tcx> {
- let const_def = ty::WithOptConstParam {
- did: self.tcx.hir().local_def_id(ast_c.hir_id),
- const_param_did: Some(param_def_id),
- };
+ let const_def =
+ ty::WithOptConstParam { did: ast_c.def_id, const_param_did: Some(param_def_id) };
let c = ty::Const::from_opt_const_arg_anon_const(self.tcx, const_def);
self.register_wf_obligation(
c.into(),
@@ -534,7 +489,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn node_ty(&self, id: hir::HirId) -> Ty<'tcx> {
match self.typeck_results.borrow().node_types().get(id) {
Some(&t) => t,
- None if self.is_tainted_by_errors() => self.tcx.ty_error(),
+ None if let Some(e) = self.tainted_by_errors() => self.tcx.ty_error_with_guaranteed(e),
None => {
bug!(
"no type for node {}: {} in fcx {}",
@@ -549,7 +504,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn node_ty_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
match self.typeck_results.borrow().node_types().get(id) {
Some(&t) => Some(t),
- None if self.is_tainted_by_errors() => Some(self.tcx.ty_error()),
+ None if let Some(e) = self.tainted_by_errors() => Some(self.tcx.ty_error_with_guaranteed(e)),
None => None,
}
}
@@ -564,9 +519,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// WF obligations never themselves fail, so no real need to give a detailed cause:
let cause = traits::ObligationCause::new(span, self.body_id, code);
self.register_predicate(traits::Obligation::new(
+ self.tcx,
cause,
self.param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)).to_predicate(self.tcx),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)),
));
}
@@ -588,7 +544,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
field: &'tcx ty::FieldDef,
substs: SubstsRef<'tcx>,
) -> Ty<'tcx> {
- self.normalize_associated_types_in(span, field.ty(self.tcx, substs))
+ self.normalize(span, field.ty(self.tcx, substs))
}
pub(in super::super) fn resolve_rvalue_scopes(&self, def_id: DefId) {
@@ -601,7 +557,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(in super::super) fn resolve_generator_interiors(&self, def_id: DefId) {
let mut generators = self.deferred_generator_interiors.borrow_mut();
for (body_id, interior, kind) in generators.drain(..) {
- self.select_obligations_where_possible(false, |_| {});
+ self.select_obligations_where_possible(|_| {});
crate::generator_interior::resolve_interior(self, def_id, body_id, interior, kind);
}
}
@@ -612,25 +568,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if !errors.is_empty() {
self.adjust_fulfillment_errors_for_expr_obligation(&mut errors);
- self.err_ctxt().report_fulfillment_errors(&errors, self.inh.body_id, false);
+ self.err_ctxt().report_fulfillment_errors(&errors, self.inh.body_id);
}
}
/// Select as many obligations as we can at present.
pub(in super::super) fn select_obligations_where_possible(
&self,
- fallback_has_occurred: bool,
mutate_fulfillment_errors: impl Fn(&mut Vec<traits::FulfillmentError<'tcx>>),
) {
let mut result = self.fulfillment_cx.borrow_mut().select_where_possible(self);
if !result.is_empty() {
mutate_fulfillment_errors(&mut result);
self.adjust_fulfillment_errors_for_expr_obligation(&mut result);
- self.err_ctxt().report_fulfillment_errors(
- &result,
- self.inh.body_id,
- fallback_has_occurred,
- );
+ self.err_ctxt().report_fulfillment_errors(&result, self.inh.body_id);
}
}
@@ -650,12 +601,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
#[instrument(skip(self), level = "debug")]
- fn self_type_matches_expected_vid(
- &self,
- trait_ref: ty::PolyTraitRef<'tcx>,
- expected_vid: ty::TyVid,
- ) -> bool {
- let self_ty = self.shallow_resolve(trait_ref.skip_binder().self_ty());
+ fn self_type_matches_expected_vid(&self, self_ty: Ty<'tcx>, expected_vid: ty::TyVid) -> bool {
+ let self_ty = self.shallow_resolve(self_ty);
debug!(?self_ty);
match *self_ty.kind() {
@@ -674,54 +621,64 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(in super::super) fn obligations_for_self_ty<'b>(
&'b self,
self_ty: ty::TyVid,
- ) -> impl Iterator<Item = (ty::PolyTraitRef<'tcx>, traits::PredicateObligation<'tcx>)>
- + Captures<'tcx>
- + 'b {
+ ) -> impl DoubleEndedIterator<Item = traits::PredicateObligation<'tcx>> + Captures<'tcx> + 'b
+ {
// FIXME: consider using `sub_root_var` here so we
// can see through subtyping.
let ty_var_root = self.root_var(self_ty);
trace!("pending_obligations = {:#?}", self.fulfillment_cx.borrow().pending_obligations());
- self.fulfillment_cx
- .borrow()
- .pending_obligations()
- .into_iter()
- .filter_map(move |obligation| {
- let bound_predicate = obligation.predicate.kind();
- match bound_predicate.skip_binder() {
- ty::PredicateKind::Projection(data) => Some((
- bound_predicate.rebind(data).required_poly_trait_ref(self.tcx),
- obligation,
- )),
- ty::PredicateKind::Trait(data) => {
- Some((bound_predicate.rebind(data).to_poly_trait_ref(), obligation))
- }
- ty::PredicateKind::Subtype(..) => None,
- ty::PredicateKind::Coerce(..) => None,
- ty::PredicateKind::RegionOutlives(..) => None,
- ty::PredicateKind::TypeOutlives(..) => None,
- ty::PredicateKind::WellFormed(..) => None,
- ty::PredicateKind::ObjectSafe(..) => None,
- ty::PredicateKind::ConstEvaluatable(..) => None,
- ty::PredicateKind::ConstEquate(..) => None,
- // N.B., this predicate is created by breaking down a
- // `ClosureType: FnFoo()` predicate, where
- // `ClosureType` represents some `Closure`. It can't
- // possibly be referring to the current closure,
- // because we haven't produced the `Closure` for
- // this closure yet; this is exactly why the other
- // code is looking for a self type of an unresolved
- // inference variable.
- ty::PredicateKind::ClosureKind(..) => None,
- ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
+ self.fulfillment_cx.borrow().pending_obligations().into_iter().filter_map(
+ move |obligation| match &obligation.predicate.kind().skip_binder() {
+ ty::PredicateKind::Clause(ty::Clause::Projection(data))
+ if self.self_type_matches_expected_vid(
+ data.projection_ty.self_ty(),
+ ty_var_root,
+ ) =>
+ {
+ Some(obligation)
}
- })
- .filter(move |(tr, _)| self.self_type_matches_expected_vid(*tr, ty_var_root))
+ ty::PredicateKind::Clause(ty::Clause::Trait(data))
+ if self.self_type_matches_expected_vid(data.self_ty(), ty_var_root) =>
+ {
+ Some(obligation)
+ }
+
+ ty::PredicateKind::Clause(ty::Clause::Trait(..))
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::Subtype(..)
+ | ty::PredicateKind::Coerce(..)
+ | ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..))
+ | ty::PredicateKind::WellFormed(..)
+ | ty::PredicateKind::ObjectSafe(..)
+ | ty::PredicateKind::ConstEvaluatable(..)
+ | ty::PredicateKind::ConstEquate(..)
+ // N.B., this predicate is created by breaking down a
+ // `ClosureType: FnFoo()` predicate, where
+ // `ClosureType` represents some `Closure`. It can't
+ // possibly be referring to the current closure,
+ // because we haven't produced the `Closure` for
+ // this closure yet; this is exactly why the other
+ // code is looking for a self type of an unresolved
+ // inference variable.
+ | ty::PredicateKind::ClosureKind(..)
+ | ty::PredicateKind::Ambiguous
+ | ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
+ },
+ )
}
pub(in super::super) fn type_var_is_sized(&self, self_ty: ty::TyVid) -> bool {
- self.obligations_for_self_ty(self_ty)
- .any(|(tr, _)| Some(tr.def_id()) == self.tcx.lang_items().sized_trait())
+ let sized_did = self.tcx.lang_items().sized_trait();
+ self.obligations_for_self_ty(self_ty).any(|obligation| {
+ match obligation.predicate.kind().skip_binder() {
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => {
+ Some(data.def_id()) == sized_did
+ }
+ _ => false,
+ }
+ })
}
pub(in super::super) fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
@@ -769,34 +726,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let expect_args = self
.fudge_inference_if_ok(|| {
+ let ocx = ObligationCtxt::new_in_snapshot(self);
+
// Attempt to apply a subtyping relationship between the formal
// return type (likely containing type variables if the function
// is polymorphic) and the expected return type.
// No argument expectations are produced if unification fails.
let origin = self.misc(call_span);
- let ures = self.at(&origin, self.param_env).sup(ret_ty, formal_ret);
-
- // FIXME(#27336) can't use ? here, Try::from_error doesn't default
- // to identity so the resulting type is not constrained.
- match ures {
- Ok(ok) => {
- // Process any obligations locally as much as
- // we can. We don't care if some things turn
- // out unconstrained or ambiguous, as we're
- // just trying to get hints here.
- let errors = self.save_and_restore_in_snapshot_flag(|_| {
- let mut fulfill = <dyn TraitEngine<'_>>::new(self.tcx);
- for obligation in ok.obligations {
- fulfill.register_predicate_obligation(self, obligation);
- }
- fulfill.select_where_possible(self)
- });
-
- if !errors.is_empty() {
- return Err(());
- }
- }
- Err(_) => return Err(()),
+ ocx.sup(&origin, self.param_env, ret_ty, formal_ret)?;
+ if !ocx.select_where_possible().is_empty() {
+ return Err(TypeError::Mismatch);
}
// Record all the argument types, with the substitutions
@@ -1132,7 +1071,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let Res::Local(hid) = res {
let ty = self.local_ty(span, hid).decl_ty;
- let ty = self.normalize_associated_types_in(span, ty);
+ let ty = self.normalize(span, ty);
self.write_ty(hir_id, ty);
return (ty, res);
}
@@ -1173,9 +1112,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
explicit_late_bound = ExplicitLateBound::Yes;
}
- if let Err(GenericArgCountMismatch { reported: Some(_), .. }) = arg_count.correct {
+ if let Err(GenericArgCountMismatch { reported: Some(e), .. }) = arg_count.correct {
infer_args_for_err.insert(index);
- self.set_tainted_by_errors(); // See issue #53251.
+ self.set_tainted_by_errors(e); // See issue #53251.
}
}
@@ -1189,11 +1128,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match *ty.kind() {
ty::Adt(adt_def, substs) if adt_def.has_ctor() => {
let variant = adt_def.non_enum_variant();
- let ctor_def_id = variant.ctor_def_id.unwrap();
- (
- Res::Def(DefKind::Ctor(CtorOf::Struct, variant.ctor_kind), ctor_def_id),
- Some(substs),
- )
+ let (ctor_kind, ctor_def_id) = variant.ctor.unwrap();
+ (Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id), Some(substs))
}
_ => {
let mut err = tcx.sess.struct_span_err(
@@ -1215,9 +1151,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
}
- err.emit();
-
- return (tcx.ty_error(), res);
+ let reported = err.emit();
+ return (tcx.ty_error_with_guaranteed(reported), res);
}
}
} else {
@@ -1466,12 +1401,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if !ty.is_ty_var() {
ty
} else {
- if !self.is_tainted_by_errors() {
+ let e = self.tainted_by_errors().unwrap_or_else(|| {
self.err_ctxt()
.emit_inference_failure_err((**self).body_id, sp, ty.into(), E0282, true)
- .emit();
- }
- let err = self.tcx.ty_error();
+ .emit()
+ });
+ let err = self.tcx.ty_error_with_guaranteed(e);
self.demand_suptype(sp, err, ty);
err
}
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
index 8e0fcb56c..60fec05d3 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
@@ -73,7 +73,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let ty = self.typeck_results.borrow().expr_ty_adjusted(expr);
let ty = self.resolve_vars_if_possible(ty);
if ty.has_non_region_infer() {
- assert!(self.is_tainted_by_errors());
self.tcx.ty_error()
} else {
self.tcx.erase_regions(ty)
@@ -136,6 +135,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
tuple_arguments,
Some(method.def_id),
);
+
method.sig.output()
}
@@ -214,7 +214,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
"cannot use call notation; the first type parameter \
for the function trait is neither a tuple nor unit"
)
- .emit();
+ .delay_as_bug();
(self.err_args(provided_args.len()), None)
}
}
@@ -344,7 +344,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// an "opportunistic" trait resolution of any trait bounds on
// the call. This helps coercions.
if check_closures {
- self.select_obligations_where_possible(false, |_| {})
+ self.select_obligations_where_possible(|_| {})
}
// Check each argument, to satisfy the input it was provided for
@@ -511,8 +511,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
tys.into_iter().any(|ty| ty.references_error() || ty.is_ty_var())
}
- self.set_tainted_by_errors();
let tcx = self.tcx;
+ // FIXME: taint after emitting errors and pass through an `ErrorGuaranteed`
+ self.set_tainted_by_errors(
+ tcx.sess.delay_span_bug(call_span, "no errors reported for args"),
+ );
// Get the argument span in the context of the call span so that
// suggestions and labels are (more) correct when an arg is a
@@ -596,6 +599,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
};
+ let mk_trace = |span, (formal_ty, expected_ty), provided_ty| {
+ let mismatched_ty = if expected_ty == provided_ty {
+ // If expected == provided, then we must have failed to sup
+ // the formal type. Avoid printing out "expected Ty, found Ty"
+ // in that case.
+ formal_ty
+ } else {
+ expected_ty
+ };
+ TypeTrace::types(&self.misc(span), true, mismatched_ty, provided_ty)
+ };
+
// The algorithm here is inspired by levenshtein distance and longest common subsequence.
// We'll try to detect 4 different types of mistakes:
// - An extra parameter has been provided that doesn't satisfy *any* of the other inputs
@@ -660,10 +675,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// A tuple wrap suggestion actually occurs within,
// so don't do anything special here.
err = self.err_ctxt().report_and_explain_type_error(
- TypeTrace::types(
- &self.misc(*lo),
- true,
- formal_and_expected_inputs[mismatch_idx.into()].1,
+ mk_trace(
+ *lo,
+ formal_and_expected_inputs[mismatch_idx.into()],
provided_arg_tys[mismatch_idx.into()].0,
),
terr,
@@ -747,9 +761,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
errors.drain_filter(|error| {
let Error::Invalid(provided_idx, expected_idx, Compatibility::Incompatible(Some(e))) = error else { return false };
let (provided_ty, provided_span) = provided_arg_tys[*provided_idx];
- let (expected_ty, _) = formal_and_expected_inputs[*expected_idx];
- let cause = &self.misc(provided_span);
- let trace = TypeTrace::types(cause, true, expected_ty, provided_ty);
+ let trace = mk_trace(provided_span, formal_and_expected_inputs[*expected_idx], provided_ty);
if !matches!(trace.cause.as_failure_code(*e), FailureCode::Error0308(_)) {
self.err_ctxt().report_and_explain_type_error(trace, *e).emit();
return true;
@@ -773,8 +785,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
let (formal_ty, expected_ty) = formal_and_expected_inputs[*expected_idx];
let (provided_ty, provided_arg_span) = provided_arg_tys[*provided_idx];
- let cause = &self.misc(provided_arg_span);
- let trace = TypeTrace::types(cause, true, expected_ty, provided_ty);
+ let trace = mk_trace(provided_arg_span, (formal_ty, expected_ty), provided_ty);
let mut err = self.err_ctxt().report_and_explain_type_error(trace, *err);
self.emit_coerce_suggestions(
&mut err,
@@ -846,8 +857,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (formal_ty, expected_ty) = formal_and_expected_inputs[expected_idx];
let (provided_ty, provided_span) = provided_arg_tys[provided_idx];
if let Compatibility::Incompatible(error) = compatibility {
- let cause = &self.misc(provided_span);
- let trace = TypeTrace::types(cause, true, expected_ty, provided_ty);
+ let trace = mk_trace(provided_span, (formal_ty, expected_ty), provided_ty);
if let Some(e) = error {
self.err_ctxt().note_type_err(
&mut err,
@@ -1200,7 +1210,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (def, ty) = self.finish_resolving_struct_path(qpath, path_span, hir_id);
let variant = match def {
Res::Err => {
- self.set_tainted_by_errors();
+ self.set_tainted_by_errors(
+ self.tcx.sess.delay_span_bug(path_span, "`Res::Err` but no error emitted"),
+ );
return None;
}
Res::Def(DefKind::Variant, _) => match ty.kind() {
@@ -1334,7 +1346,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Hide the outer diverging and `has_errors` flags.
let old_diverges = self.diverges.replace(Diverges::Maybe);
- let old_has_errors = self.has_errors.replace(false);
match stmt.kind {
hir::StmtKind::Local(l) => {
@@ -1364,7 +1375,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Combine the diverging and `has_error` flags.
self.diverges.set(self.diverges.get() | old_diverges);
- self.has_errors.set(self.has_errors.get() | old_has_errors);
}
pub fn check_block_no_value(&self, blk: &'tcx hir::Block<'tcx>) {
@@ -1383,8 +1393,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
blk: &'tcx hir::Block<'tcx>,
expected: Expectation<'tcx>,
) -> Ty<'tcx> {
- let prev = self.ps.replace(self.ps.get().recurse(blk));
-
// In some cases, blocks have just one exit, but other blocks
// can be targeted by multiple breaks. This can happen both
// with labeled blocks as well as when we desugar
@@ -1544,15 +1552,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.diverges.set(prev_diverges);
}
- let mut ty = ctxt.coerce.unwrap().complete(self);
-
- if self.has_errors.get() || ty.references_error() {
- ty = self.tcx.ty_error()
- }
+ let ty = ctxt.coerce.unwrap().complete(self);
self.write_ty(blk.hir_id, ty);
- self.ps.set(prev);
ty
}
@@ -1728,22 +1731,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let hir = self.tcx.hir();
let hir::Node::Expr(expr) = hir.get(hir_id) else { return false; };
- // Skip over mentioning async lang item
- if Some(def_id) == self.tcx.lang_items().from_generator_fn()
- && error.obligation.cause.span.desugaring_kind()
- == Some(rustc_span::DesugaringKind::Async)
- {
- return false;
- }
-
let Some(unsubstituted_pred) =
self.tcx.predicates_of(def_id).instantiate_identity(self.tcx).predicates.into_iter().nth(idx)
else { return false; };
let generics = self.tcx.generics_of(def_id);
let predicate_substs = match unsubstituted_pred.kind().skip_binder() {
- ty::PredicateKind::Trait(pred) => pred.trait_ref.substs,
- ty::PredicateKind::Projection(pred) => pred.projection_ty.substs,
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => pred.trait_ref.substs,
+ ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => pred.projection_ty.substs,
_ => ty::List::empty(),
};
@@ -1920,7 +1915,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
receiver: Option<&'tcx hir::Expr<'tcx>>,
args: &'tcx [hir::Expr<'tcx>],
) -> bool {
- let sig = self.tcx.fn_sig(def_id).skip_binder();
+ let ty = self.tcx.type_of(def_id);
+ if !ty.is_fn() {
+ return false;
+ }
+ let sig = ty.fn_sig(self.tcx).skip_binder();
let args_referencing_param: Vec<_> = sig
.inputs()
.iter()
@@ -2091,7 +2090,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&& let maybe_trait_item_def_id = assoc_item.trait_item_def_id.unwrap_or(def_id)
&& let maybe_trait_def_id = self.tcx.parent(maybe_trait_item_def_id)
// Just an easy way to check "trait_def_id == Fn/FnMut/FnOnce"
- && let Some(call_kind) = ty::ClosureKind::from_def_id(self.tcx, maybe_trait_def_id)
+ && let Some(call_kind) = self.tcx.fn_trait_kind_from_def_id(maybe_trait_def_id)
&& let Some(callee_ty) = callee_ty
{
let callee_ty = callee_ty.peel_refs();
@@ -2115,9 +2114,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
for (predicate, span) in
std::iter::zip(instantiated.predicates, instantiated.spans)
{
- if let ty::PredicateKind::Trait(pred) = predicate.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) = predicate.kind().skip_binder()
&& pred.self_ty().peel_refs() == callee_ty
- && ty::ClosureKind::from_def_id(self.tcx, pred.def_id()).is_some()
+ && self.tcx.is_fn_trait(pred.def_id())
{
err.span_note(span, "callable defined here");
return;
@@ -2148,13 +2147,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
),
);
let obligation = traits::Obligation::new(
+ self.tcx,
traits::ObligationCause::dummy(),
self.param_env,
- ty::Binder::dummy(ty::TraitPredicate {
- trait_ref,
- constness: ty::BoundConstness::NotConst,
- polarity: ty::ImplPolarity::Positive,
- }),
+ ty::Binder::dummy(trait_ref),
);
match SelectionContext::new(&self).select(&obligation) {
Ok(Some(traits::ImplSource::UserDefined(impl_source))) => {
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs
index 0c600daf4..30b59da78 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs
@@ -4,10 +4,11 @@ mod checks;
mod suggestions;
pub use _impl::*;
+use rustc_errors::ErrorGuaranteed;
pub use suggestions::*;
use crate::coercion::DynamicCoerceMany;
-use crate::{Diverges, EnclosingBreakables, Inherited, UnsafetyState};
+use crate::{Diverges, EnclosingBreakables, Inherited};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir_analysis::astconv::AstConv;
@@ -21,7 +22,7 @@ use rustc_middle::ty::{self, Const, Ty, TyCtxt};
use rustc_session::Session;
use rustc_span::symbol::Ident;
use rustc_span::{self, Span};
-use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode};
+use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode, ObligationCtxt};
use std::cell::{Cell, RefCell};
use std::ops::Deref;
@@ -68,17 +69,11 @@ pub struct FnCtxt<'a, 'tcx> {
/// any).
pub(super) ret_coercion: Option<RefCell<DynamicCoerceMany<'tcx>>>,
- /// Used exclusively to reduce cost of advanced evaluation used for
- /// more helpful diagnostics.
- pub(super) in_tail_expr: bool,
-
/// First span of a return site that we find. Used in error messages.
pub(super) ret_coercion_span: Cell<Option<Span>>,
pub(super) resume_yield_tys: Option<(Ty<'tcx>, Ty<'tcx>)>,
- pub(super) ps: Cell<UnsafetyState>,
-
/// Whether the last checked node generates a divergence (e.g.,
/// `return` will set this to `Always`). In general, when entering
/// an expression or other node in the tree, the initial value
@@ -112,21 +107,11 @@ pub struct FnCtxt<'a, 'tcx> {
/// the diverges flag is set to something other than `Maybe`.
pub(super) diverges: Cell<Diverges>,
- /// Whether any child nodes have any type errors.
- pub(super) has_errors: Cell<bool>,
-
pub(super) enclosing_breakables: RefCell<EnclosingBreakables<'tcx>>,
pub(super) inh: &'a Inherited<'tcx>,
- /// True if the function or closure's return type is known before
- /// entering the function/closure, i.e. if the return type is
- /// either given explicitly or inferred from, say, an `Fn*` trait
- /// bound. Used for diagnostic purposes only.
- pub(super) return_type_pre_known: bool,
-
- /// True if the return type has an Opaque type
- pub(super) return_type_has_opaque: bool,
+ pub(super) fallback_has_occurred: Cell<bool>,
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@@ -140,19 +125,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
param_env,
err_count_on_creation: inh.tcx.sess.err_count(),
ret_coercion: None,
- in_tail_expr: false,
ret_coercion_span: Cell::new(None),
resume_yield_tys: None,
- ps: Cell::new(UnsafetyState::function(hir::Unsafety::Normal, hir::CRATE_HIR_ID)),
diverges: Cell::new(Diverges::Maybe),
- has_errors: Cell::new(false),
enclosing_breakables: RefCell::new(EnclosingBreakables {
stack: Vec::new(),
by_id: Default::default(),
}),
inh,
- return_type_pre_known: true,
- return_type_has_opaque: false,
+ fallback_has_occurred: Cell::new(false),
}
}
@@ -174,7 +155,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
///
/// [`InferCtxt::err_ctxt`]: infer::InferCtxt::err_ctxt
pub fn err_ctxt(&'a self) -> TypeErrCtxt<'a, 'tcx> {
- TypeErrCtxt { infcx: &self.infcx, typeck_results: Some(self.typeck_results.borrow()) }
+ TypeErrCtxt {
+ infcx: &self.infcx,
+ typeck_results: Some(self.typeck_results.borrow()),
+ fallback_has_occurred: self.fallback_has_occurred.get(),
+ normalize_fn_sig: Box::new(|fn_sig| {
+ if fn_sig.has_escaping_bound_vars() {
+ return fn_sig;
+ }
+ self.probe(|_| {
+ let ocx = ObligationCtxt::new_in_snapshot(self);
+ let normalized_fn_sig =
+ ocx.normalize(&ObligationCause::dummy(), self.param_env, fn_sig);
+ if ocx.select_all_or_error().is_empty() {
+ let normalized_fn_sig = self.resolve_vars_if_possible(normalized_fn_sig);
+ if !normalized_fn_sig.needs_infer() {
+ return normalized_fn_sig;
+ }
+ }
+ fn_sig
+ })
+ }),
+ }
}
pub fn errors_reported_since_creation(&self) -> bool {
@@ -194,8 +196,8 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
self.tcx
}
- fn item_def_id(&self) -> Option<DefId> {
- None
+ fn item_def_id(&self) -> DefId {
+ self.body_id.owner.to_def_id()
}
fn get_type_parameter_bounds(
@@ -213,7 +215,9 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
predicates: tcx.arena.alloc_from_iter(
self.param_env.caller_bounds().iter().filter_map(|predicate| {
match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(data) if data.self_ty().is_param(index) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(data))
+ if data.self_ty().is_param(index) =>
+ {
// HACK(eddyb) should get the original `Span`.
let span = tcx.def_span(def_id);
Some((predicate, span))
@@ -298,12 +302,12 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
if ty.has_escaping_bound_vars() {
ty // FIXME: normalization and escaping regions
} else {
- self.normalize_associated_types_in(span, ty)
+ self.normalize(span, ty)
}
}
- fn set_tainted_by_errors(&self) {
- self.infcx.set_tainted_by_errors()
+ fn set_tainted_by_errors(&self, e: ErrorGuaranteed) {
+ self.infcx.set_tainted_by_errors(e)
}
fn record_ty(&self, hir_id: hir::HirId, ty: Ty<'tcx>, _span: Span) {
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
index 4db9c56f9..4f92477b5 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
@@ -10,24 +10,35 @@ use rustc_hir::{
Expr, ExprKind, GenericBound, Node, Path, QPath, Stmt, StmtKind, TyKind, WherePredicate,
};
use rustc_hir_analysis::astconv::AstConv;
-use rustc_infer::infer::{self, TyCtxtInferExt};
+use rustc_infer::infer;
use rustc_infer::traits::{self, StatementAsExpression};
use rustc_middle::lint::in_external_macro;
-use rustc_middle::ty::{self, Binder, IsSuggestable, ToPredicate, Ty};
+use rustc_middle::ty::{self, Binder, DefIdTree, IsSuggestable, ToPredicate, Ty};
use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::symbol::sym;
use rustc_span::Span;
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::error_reporting::DefIdOrName;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
+use rustc_trait_selection::traits::NormalizeExt;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
+ pub(crate) fn body_fn_sig(&self) -> Option<ty::FnSig<'tcx>> {
+ self.typeck_results
+ .borrow()
+ .liberated_fn_sigs()
+ .get(self.tcx.hir().get_parent_node(self.body_id))
+ .copied()
+ }
+
pub(in super::super) fn suggest_semicolon_at_end(&self, span: Span, err: &mut Diagnostic) {
+ // This suggestion is incorrect for
+ // fn foo() -> bool { match () { () => true } || match () { () => true } }
err.span_suggestion_short(
span.shrink_to_hi(),
"consider using a semicolon here",
";",
- Applicability::MachineApplicable,
+ Applicability::MaybeIncorrect,
);
}
@@ -165,7 +176,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
ty::Opaque(def_id, substs) => {
self.tcx.bound_item_bounds(def_id).subst(self.tcx, substs).iter().find_map(|pred| {
- if let ty::PredicateKind::Projection(proj) = pred.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
&& Some(proj.projection_ty.item_def_id) == self.tcx.lang_items().fn_once_output()
// args tuple will always be substs[1]
&& let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
@@ -200,7 +211,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty::Param(param) => {
let def_id = self.tcx.generics_of(self.body_id.owner).type_param(&param, self.tcx).def_id;
self.tcx.predicates_of(self.body_id.owner).predicates.iter().find_map(|(pred, _)| {
- if let ty::PredicateKind::Projection(proj) = pred.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
&& Some(proj.projection_ty.item_def_id) == self.tcx.lang_items().fn_once_output()
&& proj.projection_ty.self_ty() == found
// args tuple will always be substs[1]
@@ -237,7 +248,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// implied by wf, but also because that would possibly result in
// erroneous errors later on.
let infer::InferOk { value: output, obligations: _ } =
- self.normalize_associated_types_in_as_infer_ok(expr.span, output);
+ self.at(&self.misc(expr.span), self.param_env).normalize(output);
if output.is_ty_var() { None } else { Some((def_id_or_name, output, inputs)) }
}
@@ -337,8 +348,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
if annotation {
let suggest_annotation = match expr.peel_drop_temps().kind {
- hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Not, _) => "&",
- hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, _) => "&mut ",
+ hir::ExprKind::AddrOf(hir::BorrowKind::Ref, mutbl, _) => mutbl.ref_prefix_str(),
_ => return true,
};
let mut tuple_indexes = Vec::new();
@@ -366,7 +376,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let annotation_span = ty.span;
err.span_suggestion(
annotation_span.with_hi(annotation_span.lo()),
- format!("alternatively, consider changing the type annotation"),
+ "alternatively, consider changing the type annotation",
suggest_annotation,
Applicability::MaybeIncorrect,
);
@@ -456,7 +466,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ref_cnt += 1;
}
if let ty::Adt(adt, _) = peeled.kind()
- && self.tcx.is_diagnostic_item(sym::String, adt.did())
+ && Some(adt.did()) == self.tcx.lang_items().string()
{
err.span_suggestion_verbose(
expr.span.shrink_to_hi(),
@@ -752,7 +762,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!("suggest_missing_return_type: expected type {:?}", ty);
let bound_vars = self.tcx.late_bound_vars(fn_id);
let ty = Binder::bind_with_vars(ty, bound_vars);
- let ty = self.normalize_associated_types_in(span, ty);
+ let ty = self.normalize(span, ty);
let ty = self.tcx.erase_late_bound_regions(ty);
if self.can_coerce(expected, ty) {
err.subdiagnostic(ExpectedReturnTypeLabel::Other { span, expected });
@@ -913,22 +923,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let ty = <dyn AstConv<'_>>::ast_ty_to_ty(self, ty);
let bound_vars = self.tcx.late_bound_vars(fn_id);
let ty = self.tcx.erase_late_bound_regions(Binder::bind_with_vars(ty, bound_vars));
- let ty = self.normalize_associated_types_in(expr.span, ty);
let ty = match self.tcx.asyncness(fn_id.owner) {
- hir::IsAsync::Async => {
- let infcx = self.tcx.infer_ctxt().build();
- infcx
- .get_impl_future_output_ty(ty)
- .unwrap_or_else(|| {
- span_bug!(
- fn_decl.output.span(),
- "failed to get output type of async function"
- )
- })
- .skip_binder()
- }
+ hir::IsAsync::Async => self.get_impl_future_output_ty(ty).unwrap_or_else(|| {
+ span_bug!(fn_decl.output.span(), "failed to get output type of async function")
+ }),
hir::IsAsync::NotAsync => ty,
};
+ let ty = self.normalize(expr.span, ty);
if self.can_coerce(found, ty) {
err.multipart_suggestion(
"you might have meant to return this value",
@@ -1082,14 +1083,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let Some(into_def_id) = self.tcx.get_diagnostic_item(sym::Into)
&& self.predicate_must_hold_modulo_regions(&traits::Obligation::new(
+ self.tcx,
self.misc(expr.span),
self.param_env,
- ty::Binder::dummy(ty::TraitRef {
- def_id: into_def_id,
- substs: self.tcx.mk_substs_trait(expr_ty, &[expected_ty.into()]),
- })
- .to_poly_trait_predicate()
- .to_predicate(self.tcx),
+ ty::Binder::dummy(self.tcx.mk_trait_ref(
+ into_def_id,
+ [expr_ty, expected_ty]
+ )),
))
{
let sugg = if expr.precedence().order() >= PREC_POSTFIX {
@@ -1108,6 +1108,53 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
false
}
+ /// When expecting a `bool` and finding an `Option`, suggests using `let Some(..)` or `.is_some()`
+ pub(crate) fn suggest_option_to_bool(
+ &self,
+ diag: &mut Diagnostic,
+ expr: &hir::Expr<'_>,
+ expr_ty: Ty<'tcx>,
+ expected_ty: Ty<'tcx>,
+ ) -> bool {
+ if !expected_ty.is_bool() {
+ return false;
+ }
+
+ let ty::Adt(def, _) = expr_ty.peel_refs().kind() else { return false; };
+ if !self.tcx.is_diagnostic_item(sym::Option, def.did()) {
+ return false;
+ }
+
+ let hir = self.tcx.hir();
+ let cond_parent = hir.parent_iter(expr.hir_id).skip_while(|(_, node)| {
+ matches!(node, hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Binary(op, _, _), .. }) if op.node == hir::BinOpKind::And)
+ }).next();
+ // Don't suggest:
+ // `let Some(_) = a.is_some() && b`
+ // ++++++++++
+ // since the user probably just misunderstood how `let else`
+ // and `&&` work together.
+ if let Some((_, hir::Node::Local(local))) = cond_parent
+ && let hir::PatKind::Path(qpath) | hir::PatKind::TupleStruct(qpath, _, _) = &local.pat.kind
+ && let hir::QPath::Resolved(None, path) = qpath
+ && let Some(did) = path.res.opt_def_id()
+ .and_then(|did| self.tcx.opt_parent(did))
+ .and_then(|did| self.tcx.opt_parent(did))
+ && self.tcx.is_diagnostic_item(sym::Option, did)
+ {
+ return false;
+ }
+
+ diag.span_suggestion(
+ expr.span.shrink_to_hi(),
+ "use `Option::is_some` to test if the `Option` has a value",
+ ".is_some()",
+ Applicability::MachineApplicable,
+ );
+
+ true
+ }
+
/// Suggest wrapping the block in square brackets instead of curly braces
/// in case the block was mistaken array syntax, e.g. `{ 1 }` -> `[ 1 ]`.
pub(crate) fn suggest_block_to_brackets(
@@ -1149,6 +1196,48 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
+ #[instrument(skip(self, err))]
+ pub(crate) fn suggest_floating_point_literal(
+ &self,
+ err: &mut Diagnostic,
+ expr: &hir::Expr<'_>,
+ expected_ty: Ty<'tcx>,
+ ) -> bool {
+ if !expected_ty.is_floating_point() {
+ return false;
+ }
+ match expr.kind {
+ ExprKind::Struct(QPath::LangItem(LangItem::Range, ..), [start, end], _) => {
+ err.span_suggestion_verbose(
+ start.span.shrink_to_hi().with_hi(end.span.lo()),
+ "remove the unnecessary `.` operator for a floating point literal",
+ '.',
+ Applicability::MaybeIncorrect,
+ );
+ true
+ }
+ ExprKind::Struct(QPath::LangItem(LangItem::RangeFrom, ..), [start], _) => {
+ err.span_suggestion_verbose(
+ expr.span.with_lo(start.span.hi()),
+ "remove the unnecessary `.` operator for a floating point literal",
+ '.',
+ Applicability::MaybeIncorrect,
+ );
+ true
+ }
+ ExprKind::Struct(QPath::LangItem(LangItem::RangeTo, ..), [end], _) => {
+ err.span_suggestion_verbose(
+ expr.span.until(end.span),
+ "remove the unnecessary `.` operator and add an integer part for a floating point literal",
+ "0.",
+ Applicability::MaybeIncorrect,
+ );
+ true
+ }
+ _ => false,
+ }
+ }
+
fn is_loop(&self, id: hir::HirId) -> bool {
let node = self.tcx.hir().get(id);
matches!(node, Node::Expr(Expr { kind: ExprKind::Loop(..), .. }))
diff --git a/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_build.rs b/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_build.rs
index 122ad7009..fd8ea1ad7 100644
--- a/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_build.rs
+++ b/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/cfg_build.rs
@@ -9,9 +9,10 @@ use hir::{
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_index::vec::IndexVec;
+use rustc_infer::infer::InferCtxt;
use rustc_middle::{
hir::map::Map,
- ty::{TyCtxt, TypeckResults},
+ ty::{ParamEnv, TyCtxt, TypeVisitable, TypeckResults},
};
use std::mem::swap;
@@ -21,20 +22,29 @@ use std::mem::swap;
/// The resulting structure still needs to be iterated to a fixed point, which
/// can be done with propagate_to_fixpoint in cfg_propagate.
pub(super) fn build_control_flow_graph<'tcx>(
- hir: Map<'tcx>,
- tcx: TyCtxt<'tcx>,
+ infcx: &InferCtxt<'tcx>,
typeck_results: &TypeckResults<'tcx>,
+ param_env: ParamEnv<'tcx>,
consumed_borrowed_places: ConsumedAndBorrowedPlaces,
body: &'tcx Body<'tcx>,
num_exprs: usize,
) -> (DropRangesBuilder, FxHashSet<HirId>) {
- let mut drop_range_visitor =
- DropRangeVisitor::new(hir, tcx, typeck_results, consumed_borrowed_places, num_exprs);
+ let mut drop_range_visitor = DropRangeVisitor::new(
+ infcx,
+ typeck_results,
+ param_env,
+ consumed_borrowed_places,
+ num_exprs,
+ );
intravisit::walk_body(&mut drop_range_visitor, body);
drop_range_visitor.drop_ranges.process_deferred_edges();
- if let Some(filename) = &tcx.sess.opts.unstable_opts.dump_drop_tracking_cfg {
- super::cfg_visualize::write_graph_to_file(&drop_range_visitor.drop_ranges, filename, tcx);
+ if let Some(filename) = &infcx.tcx.sess.opts.unstable_opts.dump_drop_tracking_cfg {
+ super::cfg_visualize::write_graph_to_file(
+ &drop_range_visitor.drop_ranges,
+ filename,
+ infcx.tcx,
+ );
}
(drop_range_visitor.drop_ranges, drop_range_visitor.places.borrowed_temporaries)
@@ -82,40 +92,44 @@ pub(super) fn build_control_flow_graph<'tcx>(
/// ```
struct DropRangeVisitor<'a, 'tcx> {
- hir: Map<'tcx>,
+ typeck_results: &'a TypeckResults<'tcx>,
+ infcx: &'a InferCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
places: ConsumedAndBorrowedPlaces,
drop_ranges: DropRangesBuilder,
expr_index: PostOrderId,
- tcx: TyCtxt<'tcx>,
- typeck_results: &'a TypeckResults<'tcx>,
label_stack: Vec<(Option<rustc_ast::Label>, PostOrderId)>,
}
impl<'a, 'tcx> DropRangeVisitor<'a, 'tcx> {
fn new(
- hir: Map<'tcx>,
- tcx: TyCtxt<'tcx>,
+ infcx: &'a InferCtxt<'tcx>,
typeck_results: &'a TypeckResults<'tcx>,
+ param_env: ParamEnv<'tcx>,
places: ConsumedAndBorrowedPlaces,
num_exprs: usize,
) -> Self {
debug!("consumed_places: {:?}", places.consumed);
let drop_ranges = DropRangesBuilder::new(
places.consumed.iter().flat_map(|(_, places)| places.iter().cloned()),
- hir,
+ infcx.tcx.hir(),
num_exprs,
);
Self {
- hir,
+ infcx,
+ typeck_results,
+ param_env,
places,
drop_ranges,
expr_index: PostOrderId::from_u32(0),
- typeck_results,
- tcx,
label_stack: vec![],
}
}
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.infcx.tcx
+ }
+
fn record_drop(&mut self, value: TrackedValue) {
if self.places.borrowed.contains(&value) {
debug!("not marking {:?} as dropped because it is borrowed at some point", value);
@@ -137,7 +151,7 @@ impl<'a, 'tcx> DropRangeVisitor<'a, 'tcx> {
.map_or(vec![], |places| places.iter().cloned().collect());
for place in places {
trace!(?place, "consuming place");
- for_each_consumable(self.hir, place, |value| self.record_drop(value));
+ for_each_consumable(self.tcx().hir(), place, |value| self.record_drop(value));
}
}
@@ -214,10 +228,15 @@ impl<'a, 'tcx> DropRangeVisitor<'a, 'tcx> {
/// return.
fn handle_uninhabited_return(&mut self, expr: &Expr<'tcx>) {
let ty = self.typeck_results.expr_ty(expr);
- let ty = self.tcx.erase_regions(ty);
- let m = self.tcx.parent_module(expr.hir_id).to_def_id();
- let param_env = self.tcx.param_env(m.expect_local());
- if self.tcx.is_ty_uninhabited_from(m, ty, param_env) {
+ let ty = self.infcx.resolve_vars_if_possible(ty);
+ if ty.has_non_region_infer() {
+ self.tcx()
+ .sess
+ .delay_span_bug(expr.span, format!("could not resolve infer vars in `{ty}`"));
+ }
+ let ty = self.tcx().erase_regions(ty);
+ let m = self.tcx().parent_module(expr.hir_id).to_def_id();
+ if !ty.is_inhabited_from(self.tcx(), m, self.param_env) {
// This function will not return. We model this fact as an infinite loop.
self.drop_ranges.add_control_edge(self.expr_index + 1, self.expr_index + 1);
}
@@ -238,7 +257,7 @@ impl<'a, 'tcx> DropRangeVisitor<'a, 'tcx> {
destination: hir::Destination,
) -> Result<HirId, LoopIdError> {
destination.target_id.map(|target| {
- let node = self.hir.get(target);
+ let node = self.tcx().hir().get(target);
match node {
hir::Node::Expr(_) => target,
hir::Node::Block(b) => find_last_block_expression(b),
diff --git a/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/mod.rs b/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/mod.rs
index 4f3bdfbe7..2abcadcc9 100644
--- a/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/mod.rs
+++ b/compiler/rustc_hir_typeck/src/generator_interior/drop_ranges/mod.rs
@@ -43,9 +43,9 @@ pub fn compute_drop_ranges<'a, 'tcx>(
let typeck_results = &fcx.typeck_results.borrow();
let num_exprs = fcx.tcx.region_scope_tree(def_id).body_expr_count(body.id()).unwrap_or(0);
let (mut drop_ranges, borrowed_temporaries) = build_control_flow_graph(
- fcx.tcx.hir(),
- fcx.tcx,
+ &fcx,
typeck_results,
+ fcx.param_env,
consumed_borrowed_places,
body,
num_exprs,
diff --git a/compiler/rustc_hir_typeck/src/generator_interior/mod.rs b/compiler/rustc_hir_typeck/src/generator_interior/mod.rs
index b7dd599cd..3b1518ff7 100644
--- a/compiler/rustc_hir_typeck/src/generator_interior/mod.rs
+++ b/compiler/rustc_hir_typeck/src/generator_interior/mod.rs
@@ -13,10 +13,13 @@ use rustc_hir::def_id::DefId;
use rustc_hir::hir_id::HirIdSet;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{Arm, Expr, ExprKind, Guard, HirId, Pat, PatKind};
+use rustc_infer::infer::RegionVariableOrigin;
use rustc_middle::middle::region::{self, Scope, ScopeData, YieldData};
-use rustc_middle::ty::{self, RvalueScopes, Ty, TyCtxt, TypeVisitable};
+use rustc_middle::ty::fold::FnMutDelegate;
+use rustc_middle::ty::{self, BoundVariableKind, RvalueScopes, Ty, TyCtxt, TypeVisitable};
use rustc_span::symbol::sym;
use rustc_span::Span;
+use smallvec::{smallvec, SmallVec};
mod drop_ranges;
@@ -95,8 +98,8 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
expr, scope, ty, self.expr_count, yield_data.span
);
- if let Some((unresolved_type, unresolved_type_span)) =
- self.fcx.unresolved_type_vars(&ty)
+ if let Some((unresolved_term, unresolved_type_span)) =
+ self.fcx.first_unresolved_const_or_ty_var(&ty)
{
// If unresolved type isn't a ty_var then unresolved_type_span is None
let span = self
@@ -105,21 +108,22 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
// If we encounter an int/float variable, then inference fallback didn't
// finish due to some other error. Don't emit spurious additional errors.
- if let ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(_)) =
- unresolved_type.kind()
+ if let Some(unresolved_ty) = unresolved_term.ty()
+ && let ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(_)) = unresolved_ty.kind()
{
self.fcx
.tcx
.sess
- .delay_span_bug(span, &format!("Encountered var {:?}", unresolved_type));
+ .delay_span_bug(span, &format!("Encountered var {:?}", unresolved_term));
} else {
let note = format!(
"the type is part of the {} because of this {}",
- self.kind, yield_data.source
+ self.kind.descr(),
+ yield_data.source
);
self.fcx
- .need_type_info_err_in_generator(self.kind, span, unresolved_type)
+ .need_type_info_err_in_generator(self.kind, span, unresolved_term)
.span_note(yield_data.span, &*note)
.emit();
}
@@ -159,7 +163,7 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
expr.map(|e| e.span)
);
if let Some((unresolved_type, unresolved_type_span)) =
- self.fcx.unresolved_type_vars(&ty)
+ self.fcx.first_unresolved_const_or_ty_var(&ty)
{
debug!(
"remained unresolved_type = {:?}, unresolved_type_span: {:?}",
@@ -211,31 +215,57 @@ pub fn resolve_interior<'a, 'tcx>(
debug!("types in generator {:?}, span = {:?}", types, body.value.span);
- let mut counter = 0;
+ // We want to deduplicate if the lifetimes are the same modulo some non-informative counter.
+ // So, we need to actually do two passes: first by type to anonymize (preserving information
+ // required for diagnostics), then a second pass over all captured types to reassign disjoint
+ // region indices.
let mut captured_tys = FxHashSet::default();
let type_causes: Vec<_> = types
.into_iter()
.filter_map(|mut cause| {
- // Erase regions and canonicalize late-bound regions to deduplicate as many types as we
- // can.
- let ty = fcx.normalize_associated_types_in(cause.span, cause.ty);
- let erased = fcx.tcx.erase_regions(ty);
- if captured_tys.insert(erased) {
- // Replace all regions inside the generator interior with late bound regions.
- // Note that each region slot in the types gets a new fresh late bound region,
- // which means that none of the regions inside relate to any other, even if
- // typeck had previously found constraints that would cause them to be related.
- let folded = fcx.tcx.fold_regions(erased, |_, current_depth| {
- let br = ty::BoundRegion {
- var: ty::BoundVar::from_u32(counter),
- kind: ty::BrAnon(counter),
- };
- let r = fcx.tcx.mk_region(ty::ReLateBound(current_depth, br));
- counter += 1;
- r
- });
-
- cause.ty = folded;
+ // Replace all regions inside the generator interior with late bound regions.
+ // Note that each region slot in the types gets a new fresh late bound region,
+ // which means that none of the regions inside relate to any other, even if
+ // typeck had previously found constraints that would cause them to be related.
+
+ let mut counter = 0;
+ let mut mk_bound_region = |span| {
+ let kind = ty::BrAnon(counter, span);
+ let var = ty::BoundVar::from_u32(counter);
+ counter += 1;
+ ty::BoundRegion { var, kind }
+ };
+ let ty = fcx.normalize(cause.span, cause.ty);
+ let ty = fcx.tcx.fold_regions(ty, |region, current_depth| {
+ let br = match region.kind() {
+ ty::ReVar(vid) => {
+ let origin = fcx.region_var_origin(vid);
+ match origin {
+ RegionVariableOrigin::EarlyBoundRegion(span, _) => {
+ mk_bound_region(Some(span))
+ }
+ _ => mk_bound_region(None),
+ }
+ }
+ // FIXME: these should use `BrNamed`
+ ty::ReEarlyBound(region) => {
+ mk_bound_region(Some(fcx.tcx.def_span(region.def_id)))
+ }
+ ty::ReLateBound(_, ty::BoundRegion { kind, .. })
+ | ty::ReFree(ty::FreeRegion { bound_region: kind, .. }) => match kind {
+ ty::BoundRegionKind::BrAnon(_, span) => mk_bound_region(span),
+ ty::BoundRegionKind::BrNamed(def_id, _) => {
+ mk_bound_region(Some(fcx.tcx.def_span(def_id)))
+ }
+ ty::BoundRegionKind::BrEnv => mk_bound_region(None),
+ },
+ _ => mk_bound_region(None),
+ };
+ let r = fcx.tcx.mk_region(ty::ReLateBound(current_depth, br));
+ r
+ });
+ if captured_tys.insert(ty) {
+ cause.ty = ty;
Some(cause)
} else {
None
@@ -243,11 +273,38 @@ pub fn resolve_interior<'a, 'tcx>(
})
.collect();
+ let mut bound_vars: SmallVec<[BoundVariableKind; 4]> = smallvec![];
+ let mut counter = 0;
+ // Optimization: If there is only one captured type, then we don't actually
+ // need to fold and reindex (since the first type doesn't change).
+ let type_causes = if captured_tys.len() > 0 {
+ // Optimization: Use `replace_escaping_bound_vars_uncached` instead of
+ // `fold_regions`, since we only have late bound regions, and it skips
+ // types without bound regions.
+ fcx.tcx.replace_escaping_bound_vars_uncached(
+ type_causes,
+ FnMutDelegate {
+ regions: &mut |br| {
+ let kind = match br.kind {
+ ty::BrAnon(_, span) => ty::BrAnon(counter, span),
+ _ => br.kind,
+ };
+ let var = ty::BoundVar::from_usize(bound_vars.len());
+ bound_vars.push(ty::BoundVariableKind::Region(kind));
+ counter += 1;
+ fcx.tcx.mk_region(ty::ReLateBound(ty::INNERMOST, ty::BoundRegion { var, kind }))
+ },
+ types: &mut |b| bug!("unexpected bound ty in binder: {b:?}"),
+ consts: &mut |b, ty| bug!("unexpected bound ct in binder: {b:?} {ty}"),
+ },
+ )
+ } else {
+ type_causes
+ };
+
// Extract type components to build the witness type.
let type_list = fcx.tcx.mk_type_list(type_causes.iter().map(|cause| cause.ty));
- let bound_vars = fcx.tcx.mk_bound_variable_kinds(
- (0..counter).map(|i| ty::BoundVariableKind::Region(ty::BrAnon(i))),
- );
+ let bound_vars = fcx.tcx.mk_bound_variable_kinds(bound_vars.iter());
let witness =
fcx.tcx.mk_generator_witness(ty::Binder::bind_with_vars(type_list, bound_vars.clone()));
@@ -486,10 +543,10 @@ fn check_must_not_suspend_ty<'tcx>(
data: SuspendCheckData<'_, 'tcx>,
) -> bool {
if ty.is_unit()
- // FIXME: should this check `is_ty_uninhabited_from`. This query is not available in this stage
+ // FIXME: should this check `Ty::is_inhabited_from`. This query is not available in this stage
// of typeck (before ReVar and RePlaceholder are removed), but may remove noise, like in
// `must_use`
- // || fcx.tcx.is_ty_uninhabited_from(fcx.tcx.parent_module(hir_id).to_def_id(), ty, fcx.param_env)
+ // || !ty.is_inhabited_from(fcx.tcx, fcx.tcx.parent_module(hir_id).to_def_id(), fcx.param_env)
{
return false;
}
@@ -510,7 +567,7 @@ fn check_must_not_suspend_ty<'tcx>(
let mut has_emitted = false;
for &(predicate, _) in fcx.tcx.explicit_item_bounds(def) {
// We only look at the `DefId`, so it is safe to skip the binder here.
- if let ty::PredicateKind::Trait(ref poly_trait_predicate) =
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(ref poly_trait_predicate)) =
predicate.kind().skip_binder()
{
let def_id = poly_trait_predicate.trait_ref.def_id;
diff --git a/compiler/rustc_hir_typeck/src/inherited.rs b/compiler/rustc_hir_typeck/src/inherited.rs
index 0fb7651b3..b33e7b8d6 100644
--- a/compiler/rustc_hir_typeck/src/inherited.rs
+++ b/compiler/rustc_hir_typeck/src/inherited.rs
@@ -1,21 +1,16 @@
use super::callee::DeferredCallResolution;
use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::sync::Lrc;
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::HirIdMap;
use rustc_infer::infer;
use rustc_infer::infer::{DefiningAnchor, InferCtxt, InferOk, TyCtxtInferExt};
-use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::LocalDefIdMap;
use rustc_span::{self, Span};
-use rustc_trait_selection::infer::InferCtxtExt as _;
-use rustc_trait_selection::traits::{
- self, ObligationCause, ObligationCtxt, TraitEngine, TraitEngineExt as _,
-};
+use rustc_trait_selection::traits::{self, TraitEngine, TraitEngineExt as _};
use std::cell::RefCell;
use std::ops::Deref;
@@ -38,19 +33,19 @@ pub struct Inherited<'tcx> {
pub(super) fulfillment_cx: RefCell<Box<dyn TraitEngine<'tcx>>>,
- // Some additional `Sized` obligations badly affect type inference.
- // These obligations are added in a later stage of typeck.
- // Removing these may also cause additional complications, see #101066.
+ /// Some additional `Sized` obligations badly affect type inference.
+ /// These obligations are added in a later stage of typeck.
+ /// Removing these may also cause additional complications, see #101066.
pub(super) deferred_sized_obligations:
RefCell<Vec<(Ty<'tcx>, Span, traits::ObligationCauseCode<'tcx>)>>,
- // When we process a call like `c()` where `c` is a closure type,
- // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
- // `FnOnce` closure. In that case, we defer full resolution of the
- // call until upvar inference can kick in and make the
- // decision. We keep these deferred resolutions grouped by the
- // def-id of the closure, so that once we decide, we can easily go
- // back and process them.
+ /// When we process a call like `c()` where `c` is a closure type,
+ /// we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
+ /// `FnOnce` closure. In that case, we defer full resolution of the
+ /// call until upvar inference can kick in and make the
+ /// decision. We keep these deferred resolutions grouped by the
+ /// def-id of the closure, so that once we decide, we can easily go
+ /// back and process them.
pub(super) deferred_call_resolutions: RefCell<LocalDefIdMap<Vec<DeferredCallResolution<'tcx>>>>,
pub(super) deferred_cast_checks: RefCell<Vec<super::cast::CastCheck<'tcx>>>,
@@ -94,29 +89,7 @@ impl<'tcx> Inherited<'tcx> {
infcx: tcx
.infer_ctxt()
.ignoring_regions()
- .with_opaque_type_inference(DefiningAnchor::Bind(hir_owner.def_id))
- .with_normalize_fn_sig_for_diagnostic(Lrc::new(move |infcx, fn_sig| {
- if fn_sig.has_escaping_bound_vars() {
- return fn_sig;
- }
- infcx.probe(|_| {
- let ocx = ObligationCtxt::new_in_snapshot(infcx);
- let normalized_fn_sig = ocx.normalize(
- ObligationCause::dummy(),
- // FIXME(compiler-errors): This is probably not the right param-env...
- infcx.tcx.param_env(def_id),
- fn_sig,
- );
- if ocx.select_all_or_error().is_empty() {
- let normalized_fn_sig =
- infcx.resolve_vars_if_possible(normalized_fn_sig);
- if !normalized_fn_sig.needs_infer() {
- return normalized_fn_sig;
- }
- }
- fn_sig
- })
- })),
+ .with_opaque_type_inference(DefiningAnchor::Bind(hir_owner.def_id)),
def_id,
typeck_results: RefCell::new(ty::TypeckResults::new(hir_owner)),
}
@@ -179,35 +152,4 @@ impl<'tcx> Inherited<'tcx> {
self.register_predicates(infer_ok.obligations);
infer_ok.value
}
-
- pub(super) fn normalize_associated_types_in<T>(
- &self,
- span: Span,
- body_id: hir::HirId,
- param_env: ty::ParamEnv<'tcx>,
- value: T,
- ) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- self.normalize_associated_types_in_with_cause(
- ObligationCause::misc(span, body_id),
- param_env,
- value,
- )
- }
-
- pub(super) fn normalize_associated_types_in_with_cause<T>(
- &self,
- cause: ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- value: T,
- ) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- let ok = self.partially_normalize_associated_types_in(cause, param_env, value);
- debug!(?ok);
- self.register_infer_ok_obligations(ok)
- }
}
diff --git a/compiler/rustc_hir_typeck/src/intrinsicck.rs b/compiler/rustc_hir_typeck/src/intrinsicck.rs
index 9812d96fc..c2dc14024 100644
--- a/compiler/rustc_hir_typeck/src/intrinsicck.rs
+++ b/compiler/rustc_hir_typeck/src/intrinsicck.rs
@@ -3,7 +3,7 @@ use rustc_errors::struct_span_err;
use rustc_hir as hir;
use rustc_index::vec::Idx;
use rustc_middle::ty::layout::{LayoutError, SizeSkeleton};
-use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable};
use rustc_target::abi::{Pointer, VariantIdx};
use super::FnCtxt;
@@ -46,7 +46,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let from = normalize(from);
let to = normalize(to);
trace!(?from, ?to);
-
+ if from.has_non_region_infer() || to.has_non_region_infer() {
+ tcx.sess.delay_span_bug(span, "argument to transmute has inference variables");
+ return;
+ }
// Transmutes that are only changing lifetimes are always ok.
if from == to {
return;
diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs
index 959c54866..5b2352cda 100644
--- a/compiler/rustc_hir_typeck/src/lib.rs
+++ b/compiler/rustc_hir_typeck/src/lib.rs
@@ -53,9 +53,9 @@ use crate::check::check_fn;
use crate::coercion::DynamicCoerceMany;
use crate::gather_locals::GatherLocalsVisitor;
use rustc_data_structures::unord::UnordSet;
-use rustc_errors::{struct_span_err, MultiSpan};
+use rustc_errors::{struct_span_err, DiagnosticId, ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
-use rustc_hir::def::Res;
+use rustc_hir::def::{DefKind, Res};
use rustc_hir::intravisit::Visitor;
use rustc_hir::{HirIdMap, Node};
use rustc_hir_analysis::astconv::AstConv;
@@ -89,38 +89,6 @@ pub struct LocalTy<'tcx> {
revealed_ty: Ty<'tcx>,
}
-#[derive(Copy, Clone)]
-pub struct UnsafetyState {
- pub def: hir::HirId,
- pub unsafety: hir::Unsafety,
- from_fn: bool,
-}
-
-impl UnsafetyState {
- pub fn function(unsafety: hir::Unsafety, def: hir::HirId) -> UnsafetyState {
- UnsafetyState { def, unsafety, from_fn: true }
- }
-
- pub fn recurse(self, blk: &hir::Block<'_>) -> UnsafetyState {
- use hir::BlockCheckMode;
- match self.unsafety {
- // If this unsafe, then if the outer function was already marked as
- // unsafe we shouldn't attribute the unsafe'ness to the block. This
- // way the block can be warned about instead of ignoring this
- // extraneous block (functions are never warned about).
- hir::Unsafety::Unsafe if self.from_fn => self,
-
- unsafety => {
- let (unsafety, def) = match blk.rules {
- BlockCheckMode::UnsafeBlock(..) => (hir::Unsafety::Unsafe, blk.hir_id),
- BlockCheckMode::DefaultBlock => (unsafety, self.def),
- };
- UnsafetyState { def, unsafety, from_fn: false }
- }
- }
- }
-}
-
/// If this `DefId` is a "primary tables entry", returns
/// `Some((body_id, body_ty, fn_sig))`. Otherwise, returns `None`.
///
@@ -209,6 +177,7 @@ fn diagnostic_only_typeck<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &ty::T
typeck_with_fallback(tcx, def_id, fallback)
}
+#[instrument(level = "debug", skip(tcx, fallback), ret)]
fn typeck_with_fallback<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: LocalDefId,
@@ -232,9 +201,10 @@ fn typeck_with_fallback<'tcx>(
let typeck_results = Inherited::build(tcx, def_id).enter(|inh| {
let param_env = tcx.param_env(def_id);
- let mut fcx = if let Some(hir::FnSig { header, decl, .. }) = fn_sig {
+ let mut fcx = FnCtxt::new(&inh, param_env, body.value.hir_id);
+
+ if let Some(hir::FnSig { header, decl, .. }) = fn_sig {
let fn_sig = if rustc_hir_analysis::collect::get_infer_ret_ty(&decl.output).is_some() {
- let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id);
<dyn AstConv<'_>>::ty_of_fn(&fcx, id, header.unsafety, header.abi, decl, None, None)
} else {
tcx.fn_sig(def_id)
@@ -244,15 +214,10 @@ fn typeck_with_fallback<'tcx>(
// Compute the function signature from point of view of inside the fn.
let fn_sig = tcx.liberate_late_bound_regions(def_id.to_def_id(), fn_sig);
- let fn_sig = inh.normalize_associated_types_in(
- body.value.span,
- body_id.hir_id,
- param_env,
- fn_sig,
- );
- check_fn(&inh, param_env, fn_sig, decl, id, body, None, true).0
+ let fn_sig = fcx.normalize(body.value.span, fn_sig);
+
+ check_fn(&mut fcx, fn_sig, decl, def_id, body, None);
} else {
- let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id);
let expected_type = body_ty
.and_then(|ty| match ty.kind {
hir::TyKind::Infer => Some(<dyn AstConv<'_>>::ast_ty_to_ty(&fcx, ty)),
@@ -303,7 +268,7 @@ fn typeck_with_fallback<'tcx>(
_ => fallback(),
});
- let expected_type = fcx.normalize_associated_types_in(body.value.span, expected_type);
+ let expected_type = fcx.normalize(body.value.span, expected_type);
fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized);
// Gather locals in statics (because of block expressions).
@@ -312,16 +277,14 @@ fn typeck_with_fallback<'tcx>(
fcx.check_expr_coercable_to_type(&body.value, expected_type, None);
fcx.write_ty(id, expected_type);
-
- fcx
};
- let fallback_has_occurred = fcx.type_inference_fallback();
+ fcx.type_inference_fallback();
// Even though coercion casts provide type hints, we check casts after fallback for
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
fcx.check_casts();
- fcx.select_obligations_where_possible(fallback_has_occurred, |_| {});
+ fcx.select_obligations_where_possible(|_| {});
// Closure and generator analysis may run after fallback
// because they don't constrain other type variables.
@@ -343,7 +306,7 @@ fn typeck_with_fallback<'tcx>(
fcx.select_all_obligations_or_error();
- if !fcx.infcx.is_tainted_by_errors() {
+ if let None = fcx.infcx.tainted_by_errors() {
fcx.check_transmutes();
}
@@ -427,16 +390,33 @@ impl<'tcx> EnclosingBreakables<'tcx> {
}
}
-fn report_unexpected_variant_res(tcx: TyCtxt<'_>, res: Res, qpath: &hir::QPath<'_>, span: Span) {
- struct_span_err!(
- tcx.sess,
+fn report_unexpected_variant_res(
+ tcx: TyCtxt<'_>,
+ res: Res,
+ qpath: &hir::QPath<'_>,
+ span: Span,
+ err_code: &str,
+ expected: &str,
+) -> ErrorGuaranteed {
+ let res_descr = match res {
+ Res::Def(DefKind::Variant, _) => "struct variant",
+ _ => res.descr(),
+ };
+ let path_str = rustc_hir_pretty::qpath_to_string(qpath);
+ let mut err = tcx.sess.struct_span_err_with_code(
span,
- E0533,
- "expected unit struct, unit variant or constant, found {} `{}`",
- res.descr(),
- rustc_hir_pretty::qpath_to_string(qpath),
- )
- .emit();
+ format!("expected {expected}, found {res_descr} `{path_str}`"),
+ DiagnosticId::Error(err_code.into()),
+ );
+ match res {
+ Res::Def(DefKind::Fn | DefKind::AssocFn, _) if err_code == "E0164" => {
+ let patterns_url = "https://doc.rust-lang.org/book/ch18-00-patterns.html";
+ err.span_label(span, "`fn` calls are not allowed in patterns");
+ err.help(format!("for more information, visit {patterns_url}"))
+ }
+ _ => err.span_label(span, format!("not a {expected}")),
+ }
+ .emit()
}
/// Controls whether the arguments are tupled. This is used for the call
@@ -458,7 +438,7 @@ fn report_unexpected_variant_res(tcx: TyCtxt<'_>, res: Res, qpath: &hir::QPath<'
/// # fn f(x: (isize, isize)) {}
/// f((1, 2));
/// ```
-#[derive(Clone, Eq, PartialEq)]
+#[derive(Copy, Clone, Eq, PartialEq)]
enum TupleArgumentsFlag {
DontTupleArguments,
TupleArguments,
diff --git a/compiler/rustc_hir_typeck/src/mem_categorization.rs b/compiler/rustc_hir_typeck/src/mem_categorization.rs
index 362f1c343..0b5dc946c 100644
--- a/compiler/rustc_hir_typeck/src/mem_categorization.rs
+++ b/compiler/rustc_hir_typeck/src/mem_categorization.rs
@@ -133,7 +133,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
}
fn is_tainted_by_errors(&self) -> bool {
- self.infcx.is_tainted_by_errors()
+ self.infcx.tainted_by_errors().is_some()
}
fn resolve_type_vars_or_error(
diff --git a/compiler/rustc_hir_typeck/src/method/confirm.rs b/compiler/rustc_hir_typeck/src/method/confirm.rs
index be4ea9986..03d0e7926 100644
--- a/compiler/rustc_hir_typeck/src/method/confirm.rs
+++ b/compiler/rustc_hir_typeck/src/method/confirm.rs
@@ -106,7 +106,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
// traits, no trait system method can be called before this point because they
// could alter our Self-type, except for normalizing the receiver from the
// signature (which is also done during probing).
- let method_sig_rcvr = self.normalize_associated_types_in(self.span, method_sig.inputs()[0]);
+ let method_sig_rcvr = self.normalize(self.span, method_sig.inputs()[0]);
debug!(
"confirm: self_ty={:?} method_sig_rcvr={:?} method_sig={:?} method_predicates={:?}",
self_ty, method_sig_rcvr, method_sig, method_predicates
@@ -114,7 +114,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
self.unify_receivers(self_ty, method_sig_rcvr, &pick, all_substs);
let (method_sig, method_predicates) =
- self.normalize_associated_types_in(self.span, (method_sig, method_predicates));
+ self.normalize(self.span, (method_sig, method_predicates));
let method_sig = ty::Binder::dummy(method_sig);
// Make sure nobody calls `drop()` explicitly.
@@ -151,8 +151,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
) -> Ty<'tcx> {
// Commit the autoderefs by calling `autoderef` again, but this
// time writing the results into the various typeck results.
- let mut autoderef =
- self.autoderef_overloaded_span(self.span, unadjusted_self_ty, self.call_expr.span);
+ let mut autoderef = self.autoderef(self.call_expr.span, unadjusted_self_ty);
let Some((ty, n)) = autoderef.nth(pick.autoderefs) else {
return self.tcx.ty_error_with_message(
rustc_span::DUMMY_SP,
@@ -171,14 +170,11 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
let base_ty = target;
target = self.tcx.mk_ref(region, ty::TypeAndMut { mutbl, ty: target });
- let mutbl = match mutbl {
- hir::Mutability::Not => AutoBorrowMutability::Not,
- hir::Mutability::Mut => AutoBorrowMutability::Mut {
- // Method call receivers are the primary use case
- // for two-phase borrows.
- allow_two_phase_borrow: AllowTwoPhase::Yes,
- },
- };
+
+ // Method call receivers are the primary use case
+ // for two-phase borrows.
+ let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
+
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)),
target,
@@ -203,7 +199,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => {
target = match target.kind() {
&ty::RawPtr(ty::TypeAndMut { ty, mutbl }) => {
- assert_eq!(mutbl, hir::Mutability::Mut);
+ assert!(mutbl.is_mut());
self.tcx.mk_ptr(ty::TypeAndMut { mutbl: hir::Mutability::Not, ty })
}
other => panic!("Cannot adjust receiver type {:?} to const ptr", other),
@@ -532,7 +528,9 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
traits::elaborate_predicates(self.tcx, predicates.predicates.iter().copied())
// We don't care about regions here.
.filter_map(|obligation| match obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(trait_pred) if trait_pred.def_id() == sized_def_id => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred))
+ if trait_pred.def_id() == sized_def_id =>
+ {
let span = iter::zip(&predicates.predicates, &predicates.spans)
.find_map(
|(p, span)| {
diff --git a/compiler/rustc_hir_typeck/src/method/mod.rs b/compiler/rustc_hir_typeck/src/method/mod.rs
index a1278edef..a2ca5c3b7 100644
--- a/compiler/rustc_hir_typeck/src/method/mod.rs
+++ b/compiler/rustc_hir_typeck/src/method/mod.rs
@@ -10,6 +10,7 @@ mod suggest;
pub use self::suggest::SelfSource;
pub use self::MethodError::*;
+use crate::errors::OpMethodGenericParams;
use crate::{Expectation, FnCtxt};
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, Diagnostic};
@@ -19,11 +20,11 @@ use rustc_hir::def_id::DefId;
use rustc_infer::infer::{self, InferOk};
use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
-use rustc_middle::ty::{self, DefIdTree, GenericParamDefKind, ToPredicate, Ty, TypeVisitable};
+use rustc_middle::ty::{self, GenericParamDefKind, Ty, TypeVisitable};
use rustc_span::symbol::Ident;
use rustc_span::Span;
-use rustc_trait_selection::traits;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
+use rustc_trait_selection::traits::{self, NormalizeExt};
use self::probe::{IsSuggestion, ProbeScope};
@@ -55,8 +56,7 @@ pub enum MethodError<'tcx> {
// not-in-scope traits which may work.
PrivateMatch(DefKind, DefId, Vec<DefId>),
- // Found a `Self: Sized` bound where `Self` is a trait object, also the caller may have
- // forgotten to import a trait.
+ // Found a `Self: Sized` bound where `Self` is a trait object.
IllegalSizedBound(Vec<DefId>, bool, Span),
// Found a match, but the return type is wrong
@@ -93,17 +93,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
call_expr_id: hir::HirId,
allow_private: bool,
) -> bool {
- let mode = probe::Mode::MethodCall;
match self.probe_for_name(
- method_name.span,
- mode,
+ probe::Mode::MethodCall,
method_name,
IsSuggestion(false),
self_ty,
call_expr_id,
ProbeScope::TraitsInScope,
) {
- Ok(..) => true,
+ Ok(pick) => {
+ pick.maybe_emit_unstable_name_collision_hint(
+ self.tcx,
+ method_name.span,
+ call_expr_id,
+ );
+ true
+ }
Err(NoMatch(..)) => false,
Err(Ambiguity(..)) => true,
Err(PrivateMatch(..)) => allow_private,
@@ -125,10 +130,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
let params = self
.probe_for_name(
- method_name.span,
probe::Mode::MethodCall,
method_name,
- IsSuggestion(false),
+ IsSuggestion(true),
self_ty,
call_expr.hir_id,
ProbeScope::TraitsInScope,
@@ -175,7 +179,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
args: &'tcx [hir::Expr<'tcx>],
) -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
let pick =
- self.lookup_probe(span, segment.ident, self_ty, call_expr, ProbeScope::TraitsInScope)?;
+ self.lookup_probe(segment.ident, self_ty, call_expr, ProbeScope::TraitsInScope)?;
self.lint_dot_call_from_2018(self_ty, segment, span, call_expr, self_expr, &pick, args);
@@ -200,13 +204,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.mk_ref(*region, ty::TypeAndMut { ty: *t_type, mutbl: mutability.invert() });
// We probe again to see if there might be a borrow mutability discrepancy.
match self.lookup_probe(
- span,
segment.ident,
trait_type,
call_expr,
ProbeScope::TraitsInScope,
) {
- Ok(ref new_pick) if *new_pick != pick => {
+ Ok(ref new_pick) if pick.differs_from(new_pick) => {
needs_mut = true;
}
_ => {}
@@ -214,29 +217,25 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
// We probe again, taking all traits into account (not only those in scope).
- let mut candidates = match self.lookup_probe(
- span,
- segment.ident,
- self_ty,
- call_expr,
- ProbeScope::AllTraits,
- ) {
- // If we find a different result the caller probably forgot to import a trait.
- Ok(ref new_pick) if *new_pick != pick => vec![new_pick.item.container_id(self.tcx)],
- Err(Ambiguity(ref sources)) => sources
- .iter()
- .filter_map(|source| {
- match *source {
- // Note: this cannot come from an inherent impl,
- // because the first probing succeeded.
- CandidateSource::Impl(def) => self.tcx.trait_id_of_impl(def),
- CandidateSource::Trait(_) => None,
- }
- })
- .collect(),
- _ => Vec::new(),
- };
- candidates.retain(|candidate| *candidate != self.tcx.parent(result.callee.def_id));
+ let candidates =
+ match self.lookup_probe(segment.ident, self_ty, call_expr, ProbeScope::AllTraits) {
+ // If we find a different result the caller probably forgot to import a trait.
+ Ok(ref new_pick) if pick.differs_from(new_pick) => {
+ vec![new_pick.item.container_id(self.tcx)]
+ }
+ Err(Ambiguity(ref sources)) => sources
+ .iter()
+ .filter_map(|source| {
+ match *source {
+ // Note: this cannot come from an inherent impl,
+ // because the first probing succeeded.
+ CandidateSource::Impl(def) => self.tcx.trait_id_of_impl(def),
+ CandidateSource::Trait(_) => None,
+ }
+ })
+ .collect(),
+ _ => Vec::new(),
+ };
return Err(IllegalSizedBound(candidates, needs_mut, span));
}
@@ -247,23 +246,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
#[instrument(level = "debug", skip(self, call_expr))]
pub fn lookup_probe(
&self,
- span: Span,
method_name: Ident,
self_ty: Ty<'tcx>,
call_expr: &'tcx hir::Expr<'tcx>,
scope: ProbeScope,
) -> probe::PickResult<'tcx> {
- let mode = probe::Mode::MethodCall;
- let self_ty = self.resolve_vars_if_possible(self_ty);
- self.probe_for_name(
- span,
- mode,
+ let pick = self.probe_for_name(
+ probe::Mode::MethodCall,
method_name,
IsSuggestion(false),
self_ty,
call_expr.hir_id,
scope,
- )
+ )?;
+ pick.maybe_emit_unstable_name_collision_hint(self.tcx, method_name.span, call_expr.hir_id);
+ Ok(pick)
}
pub(super) fn obligation_for_method(
@@ -295,10 +292,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let poly_trait_ref = ty::Binder::dummy(trait_ref);
(
traits::Obligation::misc(
+ self.tcx,
span,
self.body_id,
self.param_env,
- poly_trait_ref.without_const().to_predicate(self.tcx),
+ poly_trait_ref.without_const(),
),
substs,
)
@@ -337,6 +335,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(
traits::Obligation::new(
+ self.tcx,
traits::ObligationCause::new(
span,
self.body_id,
@@ -348,7 +347,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
},
),
self.param_env,
- poly_trait_ref.without_const().to_predicate(self.tcx),
+ poly_trait_ref,
),
substs,
)
@@ -444,7 +443,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
};
let def_id = method_item.def_id;
let generics = tcx.generics_of(def_id);
- assert_eq!(generics.params.len(), 0);
+
+ if generics.params.len() != 0 {
+ tcx.sess.emit_fatal(OpMethodGenericParams {
+ span: tcx.def_span(method_item.def_id),
+ method_name: m_name.to_string(),
+ });
+ }
debug!("lookup_in_trait_adjusted: method_item={:?}", method_item);
let mut obligations = vec![];
@@ -459,11 +464,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let fn_sig = fn_sig.subst(self.tcx, substs);
let fn_sig = self.replace_bound_vars_with_fresh_vars(span, infer::FnCall, fn_sig);
- let InferOk { value, obligations: o } = if is_op {
- self.normalize_op_associated_types_in_as_infer_ok(span, fn_sig, opt_input_expr)
+ let cause = if is_op {
+ ObligationCause::new(
+ span,
+ self.body_id,
+ traits::BinOp {
+ rhs_span: opt_input_expr.map(|expr| expr.span),
+ is_lit: opt_input_expr
+ .map_or(false, |expr| matches!(expr.kind, hir::ExprKind::Lit(_))),
+ output_ty: None,
+ },
+ )
} else {
- self.normalize_associated_types_in_as_infer_ok(span, fn_sig)
+ traits::ObligationCause::misc(span, self.body_id)
};
+
+ let InferOk { value, obligations: o } = self.at(&cause, self.param_env).normalize(fn_sig);
let fn_sig = {
obligations.extend(o);
value
@@ -479,11 +495,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// any late-bound regions appearing in its bounds.
let bounds = self.tcx.predicates_of(def_id).instantiate(self.tcx, substs);
- let InferOk { value, obligations: o } = if is_op {
- self.normalize_op_associated_types_in_as_infer_ok(span, bounds, opt_input_expr)
- } else {
- self.normalize_associated_types_in_as_infer_ok(span, bounds)
- };
+ let InferOk { value, obligations: o } = self.at(&cause, self.param_env).normalize(bounds);
let bounds = {
obligations.extend(o);
value
@@ -491,20 +503,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
assert!(!bounds.has_escaping_bound_vars());
- let cause = if is_op {
- ObligationCause::new(
- span,
- self.body_id,
- traits::BinOp {
- rhs_span: opt_input_expr.map(|expr| expr.span),
- is_lit: opt_input_expr
- .map_or(false, |expr| matches!(expr.kind, hir::ExprKind::Lit(_))),
- output_ty: None,
- },
- )
- } else {
- traits::ObligationCause::misc(span, self.body_id)
- };
let predicates_cause = cause.clone();
obligations.extend(traits::predicates_for_generics(
move |_, _| predicates_cause.clone(),
@@ -519,9 +517,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
method_ty, obligation
);
obligations.push(traits::Obligation::new(
+ tcx,
cause,
self.param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(method_ty.into())).to_predicate(tcx),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(method_ty.into())),
));
let callee = MethodCallee { def_id, substs, sig: fn_sig };
@@ -559,6 +558,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let tcx = self.tcx;
// Check if we have an enum variant.
+ let mut struct_variant = None;
if let ty::Adt(adt_def, _) = self_ty.kind() {
if adt_def.is_enum() {
let variant_def = adt_def
@@ -566,29 +566,36 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.iter()
.find(|vd| tcx.hygienic_eq(method_name, vd.ident(tcx), adt_def.did()));
if let Some(variant_def) = variant_def {
- // Braced variants generate unusable names in value namespace (reserved for
- // possible future use), so variants resolved as associated items may refer to
- // them as well. It's ok to use the variant's id as a ctor id since an
- // error will be reported on any use of such resolution anyway.
- let ctor_def_id = variant_def.ctor_def_id.unwrap_or(variant_def.def_id);
- tcx.check_stability(ctor_def_id, Some(expr_id), span, Some(method_name.span));
- return Ok((
- DefKind::Ctor(CtorOf::Variant, variant_def.ctor_kind),
- ctor_def_id,
- ));
+ if let Some((ctor_kind, ctor_def_id)) = variant_def.ctor {
+ tcx.check_stability(
+ ctor_def_id,
+ Some(expr_id),
+ span,
+ Some(method_name.span),
+ );
+ return Ok((DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id));
+ } else {
+ struct_variant = Some((DefKind::Variant, variant_def.def_id));
+ }
}
}
}
let pick = self.probe_for_name(
- span,
probe::Mode::Path,
method_name,
IsSuggestion(false),
self_ty,
expr_id,
ProbeScope::TraitsInScope,
- )?;
+ );
+ let pick = match (pick, struct_variant) {
+ // Fall back to a resolution that will produce an error later.
+ (Err(_), Some(res)) => return Ok(res),
+ (pick, _) => pick?,
+ };
+
+ pick.maybe_emit_unstable_name_collision_hint(self.tcx, span, expr_id);
self.lint_fully_qualified_call_from_2018(
span,
diff --git a/compiler/rustc_hir_typeck/src/method/prelude2021.rs b/compiler/rustc_hir_typeck/src/method/prelude2021.rs
index 3c98a2aa3..dea14dd93 100644
--- a/compiler/rustc_hir_typeck/src/method/prelude2021.rs
+++ b/compiler/rustc_hir_typeck/src/method/prelude2021.rs
@@ -5,10 +5,9 @@ use crate::{
use hir::def_id::DefId;
use hir::HirId;
use hir::ItemKind;
-use rustc_ast::Mutability;
use rustc_errors::Applicability;
use rustc_hir as hir;
-use rustc_middle::ty::subst::InternalSubsts;
+use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_middle::ty::{Adt, Array, Ref, Ty};
use rustc_session::lint::builtin::RUST_2021_PRELUDE_COLLISIONS;
use rustc_span::symbol::kw::{Empty, Underscore};
@@ -88,14 +87,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let derefs = "*".repeat(pick.autoderefs);
let autoref = match pick.autoref_or_ptr_adjustment {
- Some(probe::AutorefOrPtrAdjustment::Autoref {
- mutbl: Mutability::Mut,
- ..
- }) => "&mut ",
- Some(probe::AutorefOrPtrAdjustment::Autoref {
- mutbl: Mutability::Not,
- ..
- }) => "&",
+ Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl, .. }) => {
+ mutbl.ref_prefix_str()
+ }
Some(probe::AutorefOrPtrAdjustment::ToConstPtr) | None => "",
};
if let Ok(self_expr) = self.sess().source_map().span_to_snippet(self_expr.span)
@@ -227,14 +221,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If we know it does not, we don't need to warn.
if method_name.name == sym::from_iter {
if let Some(trait_def_id) = self.tcx.get_diagnostic_item(sym::FromIterator) {
+ let any_type = self.infcx.next_ty_var(TypeVariableOrigin {
+ kind: TypeVariableOriginKind::MiscVariable,
+ span,
+ });
if !self
.infcx
- .type_implements_trait(
- trait_def_id,
- self_ty,
- InternalSubsts::empty(),
- self.param_env,
- )
+ .type_implements_trait(trait_def_id, [self_ty, any_type], self.param_env)
.may_apply()
{
return;
@@ -387,8 +380,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let derefs = "*".repeat(pick.autoderefs);
let autoref = match pick.autoref_or_ptr_adjustment {
- Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl: Mutability::Mut, .. }) => "&mut ",
- Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl: Mutability::Not, .. }) => "&",
+ Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl, .. }) => mutbl.ref_prefix_str(),
Some(probe::AutorefOrPtrAdjustment::ToConstPtr) | None => "",
};
diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs
index 28aa2302f..ae299cc9d 100644
--- a/compiler/rustc_hir_typeck/src/method/probe.rs
+++ b/compiler/rustc_hir_typeck/src/method/probe.rs
@@ -9,7 +9,6 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
-use rustc_hir::def::Namespace;
use rustc_infer::infer::canonical::OriginalQueryValues;
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
@@ -17,8 +16,10 @@ use rustc_infer::infer::{self, InferOk, TyCtxtInferExt};
use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
use rustc_middle::middle::stability;
use rustc_middle::ty::fast_reject::{simplify_type, TreatParams};
+use rustc_middle::ty::AssocItem;
use rustc_middle::ty::GenericParamDefKind;
-use rustc_middle::ty::{self, ParamEnvAnd, ToPredicate, Ty, TyCtxt, TypeFoldable, TypeVisitable};
+use rustc_middle::ty::ToPredicate;
+use rustc_middle::ty::{self, ParamEnvAnd, Ty, TyCtxt, TypeFoldable, TypeVisitable};
use rustc_middle::ty::{InternalSubsts, SubstsRef};
use rustc_session::lint;
use rustc_span::def_id::DefId;
@@ -35,6 +36,7 @@ use rustc_trait_selection::traits::query::method_autoderef::{
CandidateStep, MethodAutoderefStepsResult,
};
use rustc_trait_selection::traits::query::CanonicalTyGoal;
+use rustc_trait_selection::traits::NormalizeExt;
use rustc_trait_selection::traits::{self, ObligationCause};
use std::cmp::max;
use std::iter;
@@ -83,8 +85,6 @@ struct ProbeContext<'a, 'tcx> {
unsatisfied_predicates:
Vec<(ty::Predicate<'tcx>, Option<ty::Predicate<'tcx>>, Option<ObligationCause<'tcx>>)>,
- is_suggestion: IsSuggestion,
-
scope_expr_id: hir::HirId,
}
@@ -193,7 +193,7 @@ impl AutorefOrPtrAdjustment {
}
}
-#[derive(Debug, PartialEq, Clone)]
+#[derive(Debug, Clone)]
pub struct Pick<'tcx> {
pub item: ty::AssocItem,
pub kind: PickKind<'tcx>,
@@ -209,6 +209,9 @@ pub struct Pick<'tcx> {
/// `*mut T`, convert it to `*const T`.
pub autoref_or_ptr_adjustment: Option<AutorefOrPtrAdjustment>,
pub self_ty: Ty<'tcx>,
+
+ /// Unstable candidates alongside the stable ones.
+ unstable_candidates: Vec<(Candidate<'tcx>, Symbol)>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -298,7 +301,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
#[instrument(level = "debug", skip(self))]
pub fn probe_for_name(
&self,
- span: Span,
mode: Mode,
item_name: Ident,
is_suggestion: IsSuggestion,
@@ -307,7 +309,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
scope: ProbeScope,
) -> PickResult<'tcx> {
self.probe_op(
- span,
+ item_name.span,
mode,
Some(item_name),
None,
@@ -340,10 +342,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&mut orig_values,
);
- let steps = if mode == Mode::MethodCall {
- self.tcx.method_autoderef_steps(param_env_and_self_ty)
- } else {
- self.probe(|_| {
+ let steps = match mode {
+ Mode::MethodCall => self.tcx.method_autoderef_steps(param_env_and_self_ty),
+ Mode::Path => self.probe(|_| {
// Mode::Path - the deref steps is "trivial". This turns
// our CanonicalQuery into a "trivial" QueryResponse. This
// is a bit inefficient, but I don't think that writing
@@ -372,7 +373,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
opt_bad_ty: None,
reached_recursion_limit: false,
}
- })
+ }),
};
// If our autoderef loop had reached the recursion limit,
@@ -446,7 +447,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return_type,
orig_values,
steps.steps,
- is_suggestion,
scope_expr_id,
);
@@ -475,10 +475,9 @@ fn method_autoderef_steps<'tcx>(
let (ref infcx, goal, inference_vars) = tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &goal);
let ParamEnvAnd { param_env, value: self_ty } = goal;
- let mut autoderef =
- Autoderef::new(infcx, param_env, hir::CRATE_HIR_ID, DUMMY_SP, self_ty, DUMMY_SP)
- .include_raw_pointers()
- .silence_errors();
+ let mut autoderef = Autoderef::new(infcx, param_env, hir::CRATE_HIR_ID, DUMMY_SP, self_ty)
+ .include_raw_pointers()
+ .silence_errors();
let mut reached_raw_pointer = false;
let mut steps: Vec<_> = autoderef
.by_ref()
@@ -542,7 +541,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
return_type: Option<Ty<'tcx>>,
orig_steps_var_values: OriginalQueryValues<'tcx>,
steps: &'tcx [CandidateStep<'tcx>],
- is_suggestion: IsSuggestion,
scope_expr_id: hir::HirId,
) -> ProbeContext<'a, 'tcx> {
ProbeContext {
@@ -560,7 +558,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
allow_similar_names: false,
private_candidate: None,
unsatisfied_predicates: Vec::new(),
- is_suggestion,
scope_expr_id,
}
}
@@ -718,9 +715,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
// maybe shouldn't include `Param`s, but rather fresh variables or be canonicalized,
// see issue #89650
let cause = traits::ObligationCause::misc(self.span, self.body_id);
- let selcx = &mut traits::SelectionContext::new(self.fcx);
- let traits::Normalized { value: xform_self_ty, obligations } =
- traits::normalize(selcx, self.param_env, cause, xform_self_ty);
+ let InferOk { value: xform_self_ty, obligations } =
+ self.fcx.at(&cause, self.param_env).normalize(xform_self_ty);
+
debug!(
"assemble_inherent_impl_probe after normalization: xform_self_ty = {:?}/{:?}",
xform_self_ty, xform_ret_ty
@@ -787,7 +784,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| {
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(trait_predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_predicate)) => {
match *trait_predicate.trait_ref.self_ty().kind() {
ty::Param(p) if p == param_ty => {
Some(bound_predicate.rebind(trait_predicate.trait_ref))
@@ -797,14 +794,15 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}
ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
- | ty::PredicateKind::Projection(..)
- | ty::PredicateKind::RegionOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
- | ty::PredicateKind::TypeOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..))
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
}
});
@@ -882,7 +880,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}
}
- pub fn matches_return_type(
+ fn matches_return_type(
&self,
method: &ty::AssocItem,
self_ty: Option<Ty<'tcx>>,
@@ -1019,7 +1017,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let out_of_scope_traits = match self.pick_core() {
Some(Ok(p)) => vec![p.item.container_id(self.tcx)],
- //Some(Ok(p)) => p.iter().map(|p| p.item.container().id()).collect(),
Some(Err(MethodError::Ambiguity(v))) => v
.into_iter()
.map(|source| match source {
@@ -1054,26 +1051,17 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}
fn pick_core(&mut self) -> Option<PickResult<'tcx>> {
- let mut unstable_candidates = Vec::new();
- let pick = self.pick_all_method(Some(&mut unstable_candidates));
+ let pick = self.pick_all_method(Some(&mut vec![]));
// In this case unstable picking is done by `pick_method`.
if !self.tcx.sess.opts.unstable_opts.pick_stable_methods_before_any_unstable {
return pick;
}
- match pick {
- // Emit a lint if there are unstable candidates alongside the stable ones.
- //
- // We suppress warning if we're picking the method only because it is a
- // suggestion.
- Some(Ok(ref p)) if !self.is_suggestion.0 && !unstable_candidates.is_empty() => {
- self.emit_unstable_name_collision_hint(p, &unstable_candidates);
- pick
- }
- Some(_) => pick,
- None => self.pick_all_method(None),
+ if pick.is_none() {
+ return self.pick_all_method(None);
}
+ pick
}
fn pick_all_method(
@@ -1218,7 +1206,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
debug!("pick_method_with_unstable(self_ty={})", self.ty_to_string(self_ty));
let mut possibly_unsatisfied_predicates = Vec::new();
- let mut unstable_candidates = Vec::new();
for (kind, candidates) in
&[("inherent", &self.inherent_candidates), ("extension", &self.extension_candidates)]
@@ -1228,26 +1215,17 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
self_ty,
candidates.iter(),
&mut possibly_unsatisfied_predicates,
- Some(&mut unstable_candidates),
+ Some(&mut vec![]),
);
- if let Some(pick) = res {
- if !self.is_suggestion.0 && !unstable_candidates.is_empty() {
- if let Ok(p) = &pick {
- // Emit a lint if there are unstable candidates alongside the stable ones.
- //
- // We suppress warning if we're picking the method only because it is a
- // suggestion.
- self.emit_unstable_name_collision_hint(p, &unstable_candidates);
- }
- }
- return Some(pick);
+ if res.is_some() {
+ return res;
}
}
debug!("searching unstable candidates");
let res = self.consider_candidates(
self_ty,
- unstable_candidates.iter().map(|(c, _)| c),
+ self.inherent_candidates.iter().chain(&self.extension_candidates),
&mut possibly_unsatisfied_predicates,
None,
);
@@ -1302,7 +1280,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
Option<ty::Predicate<'tcx>>,
Option<ObligationCause<'tcx>>,
)>,
- unstable_candidates: Option<&mut Vec<(Candidate<'tcx>, Symbol)>>,
+ mut unstable_candidates: Option<&mut Vec<(Candidate<'tcx>, Symbol)>>,
) -> Option<PickResult<'tcx>>
where
ProbesIter: Iterator<Item = &'b Candidate<'tcx>> + Clone,
@@ -1326,7 +1304,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}
}
- if let Some(uc) = unstable_candidates {
+ if let Some(uc) = &mut unstable_candidates {
applicable_candidates.retain(|&(p, _)| {
if let stability::EvalResult::Deny { feature, .. } =
self.tcx.eval_stability(p.item.def_id, None, self.span, None)
@@ -1345,30 +1323,63 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
applicable_candidates.pop().map(|(probe, status)| {
if status == ProbeResult::Match {
- Ok(probe.to_unadjusted_pick(self_ty))
+ Ok(probe
+ .to_unadjusted_pick(self_ty, unstable_candidates.cloned().unwrap_or_default()))
} else {
Err(MethodError::BadReturnType)
}
})
}
+}
+
+impl<'tcx> Pick<'tcx> {
+ /// In case there were unstable name collisions, emit them as a lint.
+ /// Checks whether two picks do not refer to the same trait item for the same `Self` type.
+ /// Only useful for comparisons of picks in order to improve diagnostics.
+ /// Do not use for type checking.
+ pub fn differs_from(&self, other: &Self) -> bool {
+ let Self {
+ item:
+ AssocItem {
+ def_id,
+ name: _,
+ kind: _,
+ container: _,
+ trait_item_def_id: _,
+ fn_has_self_parameter: _,
+ },
+ kind: _,
+ import_ids: _,
+ autoderefs: _,
+ autoref_or_ptr_adjustment: _,
+ self_ty,
+ unstable_candidates: _,
+ } = *self;
+ self_ty != other.self_ty || def_id != other.item.def_id
+ }
- fn emit_unstable_name_collision_hint(
+ /// In case there were unstable name collisions, emit them as a lint.
+ pub fn maybe_emit_unstable_name_collision_hint(
&self,
- stable_pick: &Pick<'_>,
- unstable_candidates: &[(Candidate<'tcx>, Symbol)],
+ tcx: TyCtxt<'tcx>,
+ span: Span,
+ scope_expr_id: hir::HirId,
) {
- let def_kind = stable_pick.item.kind.as_def_kind();
- self.tcx.struct_span_lint_hir(
+ if self.unstable_candidates.is_empty() {
+ return;
+ }
+ let def_kind = self.item.kind.as_def_kind();
+ tcx.struct_span_lint_hir(
lint::builtin::UNSTABLE_NAME_COLLISIONS,
- self.scope_expr_id,
- self.span,
+ scope_expr_id,
+ span,
format!(
"{} {} with this name may be added to the standard library in the future",
def_kind.article(),
- def_kind.descr(stable_pick.item.def_id),
+ def_kind.descr(self.item.def_id),
),
|lint| {
- match (stable_pick.item.kind, stable_pick.item.container) {
+ match (self.item.kind, self.item.container) {
(ty::AssocKind::Fn, _) => {
// FIXME: This should be a `span_suggestion` instead of `help`
// However `self.span` only
@@ -1377,31 +1388,31 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
lint.help(&format!(
"call with fully qualified syntax `{}(...)` to keep using the current \
method",
- self.tcx.def_path_str(stable_pick.item.def_id),
+ tcx.def_path_str(self.item.def_id),
));
}
(ty::AssocKind::Const, ty::AssocItemContainer::TraitContainer) => {
- let def_id = stable_pick.item.container_id(self.tcx);
+ let def_id = self.item.container_id(tcx);
lint.span_suggestion(
- self.span,
+ span,
"use the fully qualified path to the associated const",
format!(
"<{} as {}>::{}",
- stable_pick.self_ty,
- self.tcx.def_path_str(def_id),
- stable_pick.item.name
+ self.self_ty,
+ tcx.def_path_str(def_id),
+ self.item.name
),
Applicability::MachineApplicable,
);
}
_ => {}
}
- if self.tcx.sess.is_nightly_build() {
- for (candidate, feature) in unstable_candidates {
+ if tcx.sess.is_nightly_build() {
+ for (candidate, feature) in &self.unstable_candidates {
lint.help(&format!(
"add `#![feature({})]` to the crate attributes to enable `{}`",
feature,
- self.tcx.def_path_str(candidate.item.def_id),
+ tcx.def_path_str(candidate.item.def_id),
));
}
}
@@ -1410,14 +1421,16 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
},
);
}
+}
+impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
fn select_trait_candidate(
&self,
trait_ref: ty::TraitRef<'tcx>,
) -> traits::SelectionResult<'tcx, traits::Selection<'tcx>> {
let cause = traits::ObligationCause::misc(self.span, self.body_id);
- let predicate = ty::Binder::dummy(trait_ref).to_poly_trait_predicate();
- let obligation = traits::Obligation::new(cause, self.param_env, predicate);
+ let predicate = ty::Binder::dummy(trait_ref);
+ let obligation = traits::Obligation::new(self.tcx, cause, self.param_env, predicate);
traits::SelectionContext::new(self).select(&obligation)
}
@@ -1476,7 +1489,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let mut xform_ret_ty = probe.xform_ret_ty;
debug!(?xform_ret_ty);
- let selcx = &mut traits::SelectionContext::new(self);
let cause = traits::ObligationCause::misc(self.span, self.body_id);
let mut parent_pred = None;
@@ -1490,10 +1502,10 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
// `xform_ret_ty` hasn't been normalized yet, only `xform_self_ty`,
// see the reasons mentioned in the comments in `assemble_inherent_impl_probe`
// for why this is necessary
- let traits::Normalized {
+ let InferOk {
value: normalized_xform_ret_ty,
obligations: normalization_obligations,
- } = traits::normalize(selcx, self.param_env, cause.clone(), probe.xform_ret_ty);
+ } = self.fcx.at(&cause, self.param_env).normalize(probe.xform_ret_ty);
xform_ret_ty = normalized_xform_ret_ty;
debug!("xform_ret_ty after normalization: {:?}", xform_ret_ty);
@@ -1501,8 +1513,9 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let impl_def_id = probe.item.container_id(self.tcx);
let impl_bounds = self.tcx.predicates_of(impl_def_id);
let impl_bounds = impl_bounds.instantiate(self.tcx, substs);
- let traits::Normalized { value: impl_bounds, obligations: norm_obligations } =
- traits::normalize(selcx, self.param_env, cause.clone(), impl_bounds);
+
+ let InferOk { value: impl_bounds, obligations: norm_obligations } =
+ self.fcx.at(&cause, self.param_env).normalize(impl_bounds);
// Convert the bounds into obligations.
let impl_obligations = traits::predicates_for_generics(
@@ -1548,7 +1561,8 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let predicate =
ty::Binder::dummy(trait_ref).without_const().to_predicate(self.tcx);
parent_pred = Some(predicate);
- let obligation = traits::Obligation::new(cause, self.param_env, predicate);
+ let obligation =
+ traits::Obligation::new(self.tcx, cause, self.param_env, predicate);
if !self.predicate_may_hold(&obligation) {
result = ProbeResult::NoMatch;
if self.probe(|_| {
@@ -1669,6 +1683,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
autoderefs: 0,
autoref_or_ptr_adjustment: None,
self_ty,
+ unstable_candidates: vec![],
})
}
@@ -1688,7 +1703,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
self.return_type,
self.orig_steps_var_values.clone(),
steps,
- IsSuggestion(true),
self.scope_expr_id,
);
pcx.allow_similar_names = true;
@@ -1861,6 +1875,15 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
self.tcx.erase_late_bound_regions(value)
}
+ /// Determine if the given associated item type is relevant in the current context.
+ fn is_relevant_kind_for_mode(&self, kind: ty::AssocKind) -> bool {
+ match (self.mode, kind) {
+ (Mode::MethodCall, ty::AssocKind::Fn) => true,
+ (Mode::Path, ty::AssocKind::Const | ty::AssocKind::Fn) => true,
+ _ => false,
+ }
+ }
+
/// Finds the method with the appropriate name (or return type, as the case may be). If
/// `allow_similar_names` is set, find methods with close-matching names.
// The length of the returned iterator is nearly always 0 or 1 and this
@@ -1873,7 +1896,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
.associated_items(def_id)
.in_definition_order()
.filter(|x| {
- if x.kind.namespace() != Namespace::ValueNS {
+ if !self.is_relevant_kind_for_mode(x.kind) {
return false;
}
match lev_distance_with_substrings(name.as_str(), x.name.as_str(), max_dist)
@@ -1887,16 +1910,26 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
} else {
self.fcx
.associated_value(def_id, name)
+ .filter(|x| self.is_relevant_kind_for_mode(x.kind))
.map_or_else(SmallVec::new, |x| SmallVec::from_buf([x]))
}
} else {
- self.tcx.associated_items(def_id).in_definition_order().copied().collect()
+ self.tcx
+ .associated_items(def_id)
+ .in_definition_order()
+ .filter(|x| self.is_relevant_kind_for_mode(x.kind))
+ .copied()
+ .collect()
}
}
}
impl<'tcx> Candidate<'tcx> {
- fn to_unadjusted_pick(&self, self_ty: Ty<'tcx>) -> Pick<'tcx> {
+ fn to_unadjusted_pick(
+ &self,
+ self_ty: Ty<'tcx>,
+ unstable_candidates: Vec<(Candidate<'tcx>, Symbol)>,
+ ) -> Pick<'tcx> {
Pick {
item: self.item,
kind: match self.kind {
@@ -1921,6 +1954,7 @@ impl<'tcx> Candidate<'tcx> {
autoderefs: 0,
autoref_or_ptr_adjustment: None,
self_ty,
+ unstable_candidates,
}
}
}
diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs
index 6c21ed902..db93cfab2 100644
--- a/compiler/rustc_hir_typeck/src/method/suggest.rs
+++ b/compiler/rustc_hir_typeck/src/method/suggest.rs
@@ -5,6 +5,7 @@ use crate::errors;
use crate::FnCtxt;
use rustc_ast::ast::Mutability;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_errors::StashKey;
use rustc_errors::{
pluralize, struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
MultiSpan,
@@ -13,27 +14,35 @@ use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
+use rustc_hir::PatKind::Binding;
+use rustc_hir::PathSegment;
use rustc_hir::{ExprKind, Node, QPath};
-use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
+use rustc_infer::infer::{
+ type_variable::{TypeVariableOrigin, TypeVariableOriginKind},
+ RegionVariableOrigin,
+};
+use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
use rustc_middle::traits::util::supertraits;
+use rustc_middle::ty::fast_reject::DeepRejectCtxt;
use rustc_middle::ty::fast_reject::{simplify_type, TreatParams};
use rustc_middle::ty::print::with_crate_prefix;
-use rustc_middle::ty::{self, DefIdTree, ToPredicate, Ty, TyCtxt, TypeVisitable};
+use rustc_middle::ty::{self, DefIdTree, GenericArgKind, Ty, TyCtxt, TypeVisitable};
use rustc_middle::ty::{IsSuggestable, ToPolyTraitRef};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Symbol;
use rustc_span::{lev_distance, source_map, ExpnKind, FileName, MacroKind, Span};
+use rustc_trait_selection::traits::error_reporting::on_unimplemented::OnUnimplementedNote;
use rustc_trait_selection::traits::error_reporting::on_unimplemented::TypeErrCtxtExt as _;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_trait_selection::traits::{
- FulfillmentError, Obligation, ObligationCause, ObligationCauseCode, OnUnimplementedNote,
+ FulfillmentError, Obligation, ObligationCause, ObligationCauseCode,
};
-use std::cmp::Ordering;
-use std::iter;
-
use super::probe::{AutorefOrPtrAdjustment, IsSuggestion, Mode, ProbeScope};
use super::{CandidateSource, MethodError, NoMatchData};
+use rustc_hir::intravisit::Visitor;
+use std::cmp::Ordering;
+use std::iter;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn is_fn_ty(&self, ty: Ty<'tcx>, span: Span) -> bool {
@@ -62,22 +71,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.autoderef(span, ty).any(|(ty, _)| {
info!("check deref {:?} impl FnOnce", ty);
self.probe(|_| {
- let fn_once_substs = tcx.mk_substs_trait(
- ty,
- &[self
- .next_ty_var(TypeVariableOrigin {
+ let trait_ref = tcx.mk_trait_ref(
+ fn_once,
+ [
+ ty,
+ self.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::MiscVariable,
span,
- })
- .into()],
+ }),
+ ],
);
- let trait_ref = ty::TraitRef::new(fn_once, fn_once_substs);
let poly_trait_ref = ty::Binder::dummy(trait_ref);
let obligation = Obligation::misc(
+ tcx,
span,
self.body_id,
self.param_env,
- poly_trait_ref.without_const().to_predicate(tcx),
+ poly_trait_ref.without_const(),
);
self.predicate_may_hold(&obligation)
})
@@ -107,7 +117,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let report_candidates = |span: Span,
err: &mut Diagnostic,
sources: &mut Vec<CandidateSource>,
- sugg_span: Span| {
+ sugg_span: Option<Span>| {
sources.sort();
sources.dedup();
// Dynamic limit to avoid hiding just one candidate, which is silly.
@@ -168,7 +178,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} else {
err.note(&note_str);
}
- if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_did) {
+ if let Some(sugg_span) = sugg_span
+ && let Some(trait_ref) = self.tcx.impl_trait_ref(impl_did) {
let path = self.tcx.def_path_str(trait_ref.def_id);
let ty = match item.kind {
@@ -217,20 +228,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_note(item_span, msg);
None
};
- let path = self.tcx.def_path_str(trait_did);
- print_disambiguation_help(
- item_name,
- args,
- err,
- path,
- rcvr_ty,
- item.kind,
- item.def_id,
- sugg_span,
- idx,
- self.tcx.sess.source_map(),
- item.fn_has_self_parameter,
- );
+ if let Some(sugg_span) = sugg_span {
+ let path = self.tcx.def_path_str(trait_did);
+ print_disambiguation_help(
+ item_name,
+ args,
+ err,
+ path,
+ rcvr_ty,
+ item.kind,
+ item.def_id,
+ sugg_span,
+ idx,
+ self.tcx.sess.source_map(),
+ item.fn_has_self_parameter,
+ );
+ }
}
}
}
@@ -248,7 +261,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match error {
MethodError::NoMatch(NoMatchData {
- static_candidates: mut static_sources,
+ mut static_candidates,
unsatisfied_predicates,
out_of_scope_traits,
lev_candidate,
@@ -256,15 +269,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}) => {
let tcx = self.tcx;
- let actual = self.resolve_vars_if_possible(rcvr_ty);
- let ty_str = self.ty_to_string(actual);
+ let rcvr_ty = self.resolve_vars_if_possible(rcvr_ty);
+ let ty_str = self.ty_to_string(rcvr_ty);
let is_method = mode == Mode::MethodCall;
let item_kind = if is_method {
"method"
- } else if actual.is_enum() {
+ } else if rcvr_ty.is_enum() {
"variant or associated item"
} else {
- match (item_name.as_str().chars().next(), actual.is_fresh_ty()) {
+ match (item_name.as_str().chars().next(), rcvr_ty.is_fresh_ty()) {
(Some(name), false) if name.is_lowercase() => "function or associated item",
(Some(_), false) => "associated item",
(Some(_), true) | (None, false) => "variant or associated item",
@@ -273,24 +286,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
};
if self.suggest_wrapping_range_with_parens(
- tcx, actual, source, span, item_name, &ty_str,
+ tcx, rcvr_ty, source, span, item_name, &ty_str,
) || self.suggest_constraining_numerical_ty(
- tcx, actual, source, span, item_kind, item_name, &ty_str,
+ tcx, rcvr_ty, source, span, item_kind, item_name, &ty_str,
) {
return None;
}
-
span = item_name.span;
// Don't show generic arguments when the method can't be found in any implementation (#81576).
let mut ty_str_reported = ty_str.clone();
- if let ty::Adt(_, generics) = actual.kind() {
+ if let ty::Adt(_, generics) = rcvr_ty.kind() {
if generics.len() > 0 {
- let mut autoderef = self.autoderef(span, actual);
+ let mut autoderef = self.autoderef(span, rcvr_ty);
let candidate_found = autoderef.any(|(ty, _)| {
- if let ty::Adt(adt_deref, _) = ty.kind() {
+ if let ty::Adt(adt_def, _) = ty.kind() {
self.tcx
- .inherent_impls(adt_deref.did())
+ .inherent_impls(adt_def.did())
.iter()
.filter_map(|def_id| self.associated_value(*def_id, item_name))
.count()
@@ -315,16 +327,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
"no {} named `{}` found for {} `{}` in the current scope",
item_kind,
item_name,
- actual.prefix_string(self.tcx),
+ rcvr_ty.prefix_string(self.tcx),
ty_str_reported,
);
- if actual.references_error() {
+ if rcvr_ty.references_error() {
err.downgrade_to_delayed_bug();
}
if let Mode::MethodCall = mode && let SelfSource::MethodCall(cal) = source {
self.suggest_await_before_method(
- &mut err, item_name, actual, cal, span,
+ &mut err, item_name, rcvr_ty, cal, span,
);
}
if let Some(span) = tcx.resolutions(()).confused_type_with_std_module.get(&span) {
@@ -335,7 +347,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Applicability::MachineApplicable,
);
}
- if let ty::RawPtr(_) = &actual.kind() {
+ if let ty::RawPtr(_) = &rcvr_ty.kind() {
err.note(
"try using `<*const T>::as_ref()` to get a reference to the \
type behind the pointer: https://doc.rust-lang.org/std/\
@@ -347,22 +359,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
}
- let ty_span = match actual.kind() {
- ty::Param(param_type) => {
- let generics = self.tcx.generics_of(self.body_id.owner.to_def_id());
- let type_param = generics.type_param(param_type, self.tcx);
- Some(self.tcx.def_span(type_param.def_id))
- }
+ let ty_span = match rcvr_ty.kind() {
+ ty::Param(param_type) => Some(
+ param_type.span_from_generics(self.tcx, self.body_id.owner.to_def_id()),
+ ),
ty::Adt(def, _) if def.did().is_local() => Some(tcx.def_span(def.did())),
_ => None,
};
-
if let Some(span) = ty_span {
err.span_label(
span,
format!(
"{item_kind} `{item_name}` not found for this {}",
- actual.prefix_string(self.tcx)
+ rcvr_ty.prefix_string(self.tcx)
),
);
}
@@ -374,7 +383,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.hir()
.expect_expr(self.tcx.hir().get_parent_node(rcvr_expr.hir_id));
let probe = self.lookup_probe(
- span,
item_name,
output_ty,
call_expr,
@@ -386,7 +394,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut custom_span_label = false;
- if !static_sources.is_empty() {
+ if !static_candidates.is_empty() {
err.note(
"found the following associated functions; to be used as methods, \
functions must have a `self` parameter",
@@ -394,43 +402,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_label(span, "this is an associated function, not a method");
custom_span_label = true;
}
- if static_sources.len() == 1 {
- let ty_str =
- if let Some(CandidateSource::Impl(impl_did)) = static_sources.get(0) {
- // When the "method" is resolved through dereferencing, we really want the
- // original type that has the associated function for accurate suggestions.
- // (#61411)
- let ty = tcx.at(span).type_of(*impl_did);
- match (&ty.peel_refs().kind(), &actual.peel_refs().kind()) {
- (ty::Adt(def, _), ty::Adt(def_actual, _)) if def == def_actual => {
- // Use `actual` as it will have more `substs` filled in.
- self.ty_to_value_string(actual.peel_refs())
- }
- _ => self.ty_to_value_string(ty.peel_refs()),
- }
- } else {
- self.ty_to_value_string(actual.peel_refs())
- };
- if let SelfSource::MethodCall(expr) = source {
- err.span_suggestion(
- expr.span.to(span),
- "use associated function syntax instead",
- format!("{}::{}", ty_str, item_name),
- Applicability::MachineApplicable,
- );
- } else {
- err.help(&format!("try with `{}::{}`", ty_str, item_name,));
- }
+ if static_candidates.len() == 1 {
+ self.suggest_associated_call_syntax(
+ &mut err,
+ &static_candidates,
+ rcvr_ty,
+ source,
+ item_name,
+ args,
+ sugg_span,
+ );
- report_candidates(span, &mut err, &mut static_sources, sugg_span);
- } else if static_sources.len() > 1 {
- report_candidates(span, &mut err, &mut static_sources, sugg_span);
+ report_candidates(span, &mut err, &mut static_candidates, None);
+ } else if static_candidates.len() > 1 {
+ report_candidates(span, &mut err, &mut static_candidates, Some(sugg_span));
}
let mut bound_spans = vec![];
let mut restrict_type_params = false;
let mut unsatisfied_bounds = false;
- if item_name.name == sym::count && self.is_slice_ty(actual, span) {
+ if item_name.name == sym::count && self.is_slice_ty(rcvr_ty, span) {
let msg = "consider using `len` instead";
if let SelfSource::MethodCall(_expr) = source {
err.span_suggestion_short(
@@ -444,7 +435,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
if let Some(iterator_trait) = self.tcx.get_diagnostic_item(sym::Iterator) {
let iterator_trait = self.tcx.def_path_str(iterator_trait);
- err.note(&format!("`count` is defined on `{iterator_trait}`, which `{actual}` does not implement"));
+ err.note(&format!("`count` is defined on `{iterator_trait}`, which `{rcvr_ty}` does not implement"));
}
} else if !unsatisfied_predicates.is_empty() {
let mut type_params = FxHashMap::default();
@@ -454,7 +445,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut unimplemented_traits = FxHashMap::default();
let mut unimplemented_traits_only = true;
for (predicate, _parent_pred, cause) in &unsatisfied_predicates {
- if let (ty::PredicateKind::Trait(p), Some(cause)) =
+ if let (ty::PredicateKind::Clause(ty::Clause::Trait(p)), Some(cause)) =
(predicate.kind().skip_binder(), cause.as_ref())
{
if p.trait_ref.self_ty() != rcvr_ty {
@@ -481,7 +472,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// because of some non-Clone item being iterated over.
for (predicate, _parent_pred, _cause) in &unsatisfied_predicates {
match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(p)
+ ty::PredicateKind::Clause(ty::Clause::Trait(p))
if unimplemented_traits.contains_key(&p.trait_ref.def_id) => {}
_ => {
unimplemented_traits_only = false;
@@ -493,27 +484,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut collect_type_param_suggestions =
|self_ty: Ty<'tcx>, parent_pred: ty::Predicate<'tcx>, obligation: &str| {
// We don't care about regions here, so it's fine to skip the binder here.
- if let (ty::Param(_), ty::PredicateKind::Trait(p)) =
+ if let (ty::Param(_), ty::PredicateKind::Clause(ty::Clause::Trait(p))) =
(self_ty.kind(), parent_pred.kind().skip_binder())
{
+ let hir = self.tcx.hir();
let node = match p.trait_ref.self_ty().kind() {
ty::Param(_) => {
// Account for `fn` items like in `issue-35677.rs` to
// suggest restricting its type params.
- let did = self.tcx.hir().body_owner_def_id(hir::BodyId {
- hir_id: self.body_id,
- });
- Some(
- self.tcx
- .hir()
- .get(self.tcx.hir().local_def_id_to_hir_id(did)),
- )
+ let parent_body =
+ hir.body_owner(hir::BodyId { hir_id: self.body_id });
+ Some(hir.get(parent_body))
+ }
+ ty::Adt(def, _) => {
+ def.did().as_local().map(|def_id| hir.get_by_def_id(def_id))
}
- ty::Adt(def, _) => def.did().as_local().map(|def_id| {
- self.tcx
- .hir()
- .get(self.tcx.hir().local_def_id_to_hir_id(def_id))
- }),
_ => None,
};
if let Some(hir::Node::Item(hir::Item { kind, .. })) = node {
@@ -562,7 +547,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut format_pred = |pred: ty::Predicate<'tcx>| {
let bound_predicate = pred.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Projection(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
let pred = bound_predicate.rebind(pred);
// `<Foo as Iterator>::Item = String`.
let projection_ty = pred.skip_binder().projection_ty;
@@ -585,7 +570,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
bound_span_label(projection_ty.self_ty(), &obligation, &quiet);
Some((obligation, projection_ty.self_ty()))
}
- ty::PredicateKind::Trait(poly_trait_ref) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(poly_trait_ref)) => {
let p = poly_trait_ref.trait_ref;
let self_ty = p.self_ty();
let path = p.print_only_trait_path();
@@ -605,7 +590,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.iter()
.filter_map(|(p, parent, c)| c.as_ref().map(|c| (p, parent, c)))
.filter_map(|(p, parent, c)| match c.code() {
- ObligationCauseCode::ImplDerivedObligation(ref data) => {
+ ObligationCauseCode::ImplDerivedObligation(data) => {
Some((&data.derived, p, parent, data.impl_def_id, data))
}
_ => None,
@@ -621,22 +606,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Unmet obligation comes from a `derive` macro, point at it once to
// avoid multiple span labels pointing at the same place.
Some(Node::Item(hir::Item {
- kind: hir::ItemKind::Trait(..),
- ident,
- ..
- })) if matches!(
- ident.span.ctxt().outer_expn_data().kind,
- ExpnKind::Macro(MacroKind::Derive, _)
- ) =>
- {
- let span = ident.span.ctxt().outer_expn_data().call_site;
- let mut spans: MultiSpan = span.into();
- spans.push_span_label(span, derive_msg);
- let entry = spanned_predicates.entry(spans);
- entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
- }
-
- Some(Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { of_trait, self_ty, .. }),
..
})) if matches!(
@@ -659,34 +628,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
- // Unmet obligation coming from a `trait`.
- Some(Node::Item(hir::Item {
- kind: hir::ItemKind::Trait(..),
- ident,
- span: item_span,
- ..
- })) if !matches!(
- ident.span.ctxt().outer_expn_data().kind,
- ExpnKind::Macro(MacroKind::Derive, _)
- ) =>
- {
- if let Some(pred) = parent_p {
- // Done to add the "doesn't satisfy" `span_label`.
- let _ = format_pred(*pred);
- }
- skip_list.insert(p);
- let mut spans = if cause.span != *item_span {
- let mut spans: MultiSpan = cause.span.into();
- spans.push_span_label(cause.span, unsatisfied_msg);
- spans
- } else {
- ident.span.into()
- };
- spans.push_span_label(ident.span, "in this trait");
- let entry = spanned_predicates.entry(spans);
- entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
- }
-
// Unmet obligation coming from an `impl`.
Some(Node::Item(hir::Item {
kind:
@@ -695,23 +636,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}),
span: item_span,
..
- })) if !matches!(
- self_ty.span.ctxt().outer_expn_data().kind,
- ExpnKind::Macro(MacroKind::Derive, _)
- ) && !matches!(
- of_trait.as_ref().map(|t| t
- .path
- .span
- .ctxt()
- .outer_expn_data()
- .kind),
- Some(ExpnKind::Macro(MacroKind::Derive, _))
- ) =>
- {
+ })) => {
let sized_pred =
unsatisfied_predicates.iter().any(|(pred, _, _)| {
match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => {
Some(pred.def_id())
== self.tcx.lang_items().sized_trait()
&& pred.polarity == ty::ImplPolarity::Positive
@@ -759,7 +688,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
- _ => {}
+ Some(_) => unreachable!(),
+ None => (),
}
}
let mut spanned_predicates: Vec<_> = spanned_predicates.into_iter().collect();
@@ -844,7 +774,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.map(|(_, path)| path)
.collect::<Vec<_>>()
.join("\n");
- let actual_prefix = actual.prefix_string(self.tcx);
+ let actual_prefix = rcvr_ty.prefix_string(self.tcx);
info!("unimplemented_traits.len() == {}", unimplemented_traits.len());
let (primary_message, label) =
if unimplemented_traits.len() == 1 && unimplemented_traits_only {
@@ -853,7 +783,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.next()
.map(|(_, (trait_ref, obligation))| {
if trait_ref.self_ty().references_error()
- || actual.references_error()
+ || rcvr_ty.references_error()
{
// Avoid crashing.
return (None, None);
@@ -863,7 +793,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.on_unimplemented_note(trait_ref, &obligation);
(message, label)
})
- .unwrap_or((None, None))
+ .unwrap()
} else {
(None, None)
};
@@ -889,15 +819,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let label_span_not_found = |err: &mut Diagnostic| {
if unsatisfied_predicates.is_empty() {
err.span_label(span, format!("{item_kind} not found in `{ty_str}`"));
- let is_string_or_ref_str = match actual.kind() {
+ let is_string_or_ref_str = match rcvr_ty.kind() {
ty::Ref(_, ty, _) => {
ty.is_str()
|| matches!(
ty.kind(),
- ty::Adt(adt, _) if self.tcx.is_diagnostic_item(sym::String, adt.did())
+ ty::Adt(adt, _) if Some(adt.did()) == self.tcx.lang_items().string()
)
}
- ty::Adt(adt, _) => self.tcx.is_diagnostic_item(sym::String, adt.did()),
+ ty::Adt(adt, _) => Some(adt.did()) == self.tcx.lang_items().string(),
_ => false,
};
if is_string_or_ref_str && item_name.name == sym::iter {
@@ -925,7 +855,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// different from the received one
// So we avoid suggestion method with Box<Self>
// for instance
- self.tcx.at(span).type_of(*def_id) != actual
+ self.tcx.at(span).type_of(*def_id) != rcvr_ty
&& self.tcx.at(span).type_of(*def_id) != rcvr_ty
}
(Mode::Path, false, _) => true,
@@ -972,7 +902,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If the method name is the name of a field with a function or closure type,
// give a helping note that it has to be called as `(x.f)(...)`.
if let SelfSource::MethodCall(expr) = source {
- if !self.suggest_field_call(span, rcvr_ty, expr, item_name, &mut err)
+ if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_name, &mut err)
&& lev_candidate.is_none()
&& !custom_span_label
{
@@ -982,13 +912,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
label_span_not_found(&mut err);
}
- // Don't suggest (for example) `expr.field.method()` if `expr.method()`
- // doesn't exist due to unsatisfied predicates.
+ // Don't suggest (for example) `expr.field.clone()` if `expr.clone()`
+ // can't be called due to `typeof(expr): Clone` not holding.
if unsatisfied_predicates.is_empty() {
- self.check_for_field_method(&mut err, source, span, actual, item_name);
+ self.suggest_calling_method_on_field(
+ &mut err, source, span, rcvr_ty, item_name,
+ );
}
- self.check_for_inner_self(&mut err, source, span, actual, item_name);
+ self.check_for_inner_self(&mut err, source, rcvr_ty, item_name);
bound_spans.sort();
bound_spans.dedup();
@@ -996,7 +928,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_label(span, &msg);
}
- if actual.is_numeric() && actual.is_fresh() || restrict_type_params {
+ if rcvr_ty.is_numeric() && rcvr_ty.is_fresh() || restrict_type_params {
} else {
self.suggest_traits_to_import(
&mut err,
@@ -1007,15 +939,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
source,
out_of_scope_traits,
&unsatisfied_predicates,
- &static_sources,
+ &static_candidates,
unsatisfied_bounds,
);
}
// Don't emit a suggestion if we found an actual method
// that had unsatisfied trait bounds
- if unsatisfied_predicates.is_empty() && actual.is_enum() {
- let adt_def = actual.ty_adt_def().expect("enum is not an ADT");
+ if unsatisfied_predicates.is_empty() && rcvr_ty.is_enum() {
+ let adt_def = rcvr_ty.ty_adt_def().expect("enum is not an ADT");
if let Some(suggestion) = lev_distance::find_best_match_for_name(
&adt_def.variants().iter().map(|s| s.name).collect::<Vec<_>>(),
item_name.name,
@@ -1030,7 +962,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
- if item_name.name == sym::as_str && actual.peel_refs().is_str() {
+ if item_name.name == sym::as_str && rcvr_ty.peel_refs().is_str() {
let msg = "remove this method call";
let mut fallback_span = true;
if let SelfSource::MethodCall(expr) = source {
@@ -1089,7 +1021,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
err.span_label(item_name.span, format!("multiple `{}` found", item_name));
- report_candidates(span, &mut err, &mut sources, sugg_span);
+ report_candidates(span, &mut err, &mut sources, Some(sugg_span));
err.emit();
}
@@ -1146,7 +1078,133 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
None
}
- fn suggest_field_call(
+ /// Suggest calling `Ty::method` if `.method()` isn't found because the method
+ /// doesn't take a `self` receiver.
+ fn suggest_associated_call_syntax(
+ &self,
+ err: &mut Diagnostic,
+ static_candidates: &Vec<CandidateSource>,
+ rcvr_ty: Ty<'tcx>,
+ source: SelfSource<'tcx>,
+ item_name: Ident,
+ args: Option<(&hir::Expr<'tcx>, &[hir::Expr<'tcx>])>,
+ sugg_span: Span,
+ ) {
+ let mut has_unsuggestable_args = false;
+ let ty_str = if let Some(CandidateSource::Impl(impl_did)) = static_candidates.get(0) {
+ // When the "method" is resolved through dereferencing, we really want the
+ // original type that has the associated function for accurate suggestions.
+ // (#61411)
+ let impl_ty = self.tcx.type_of(*impl_did);
+ let target_ty = self
+ .autoderef(sugg_span, rcvr_ty)
+ .find(|(rcvr_ty, _)| {
+ DeepRejectCtxt { treat_obligation_params: TreatParams::AsInfer }
+ .types_may_unify(*rcvr_ty, impl_ty)
+ })
+ .map_or(impl_ty, |(ty, _)| ty)
+ .peel_refs();
+ if let ty::Adt(def, substs) = target_ty.kind() {
+ // If there are any inferred arguments, (`{integer}`), we should replace
+ // them with underscores to allow the compiler to infer them
+ let infer_substs = self.tcx.mk_substs(substs.into_iter().map(|arg| {
+ if !arg.is_suggestable(self.tcx, true) {
+ has_unsuggestable_args = true;
+ match arg.unpack() {
+ GenericArgKind::Lifetime(_) => self
+ .next_region_var(RegionVariableOrigin::MiscVariable(
+ rustc_span::DUMMY_SP,
+ ))
+ .into(),
+ GenericArgKind::Type(_) => self
+ .next_ty_var(TypeVariableOrigin {
+ span: rustc_span::DUMMY_SP,
+ kind: TypeVariableOriginKind::MiscVariable,
+ })
+ .into(),
+ GenericArgKind::Const(arg) => self
+ .next_const_var(
+ arg.ty(),
+ ConstVariableOrigin {
+ span: rustc_span::DUMMY_SP,
+ kind: ConstVariableOriginKind::MiscVariable,
+ },
+ )
+ .into(),
+ }
+ } else {
+ arg
+ }
+ }));
+
+ self.tcx.value_path_str_with_substs(def.did(), infer_substs)
+ } else {
+ self.ty_to_value_string(target_ty)
+ }
+ } else {
+ self.ty_to_value_string(rcvr_ty.peel_refs())
+ };
+ if let SelfSource::MethodCall(_) = source {
+ let first_arg = if let Some(CandidateSource::Impl(impl_did)) = static_candidates.get(0)
+ && let Some(assoc) = self.associated_value(*impl_did, item_name)
+ && assoc.kind == ty::AssocKind::Fn
+ {
+ let sig = self.tcx.fn_sig(assoc.def_id);
+ sig.inputs().skip_binder().get(0).and_then(|first| if first.peel_refs() == rcvr_ty.peel_refs() {
+ None
+ } else {
+ Some(first.ref_mutability().map_or("", |mutbl| mutbl.ref_prefix_str()))
+ })
+ } else {
+ None
+ };
+ let mut applicability = Applicability::MachineApplicable;
+ let args = if let Some((receiver, args)) = args {
+ // The first arg is the same kind as the receiver
+ let explicit_args = if first_arg.is_some() {
+ std::iter::once(receiver).chain(args.iter()).collect::<Vec<_>>()
+ } else {
+ // There is no `Self` kind to infer the arguments from
+ if has_unsuggestable_args {
+ applicability = Applicability::HasPlaceholders;
+ }
+ args.iter().collect()
+ };
+ format!(
+ "({}{})",
+ first_arg.unwrap_or(""),
+ explicit_args
+ .iter()
+ .map(|arg| self
+ .tcx
+ .sess
+ .source_map()
+ .span_to_snippet(arg.span)
+ .unwrap_or_else(|_| {
+ applicability = Applicability::HasPlaceholders;
+ "_".to_owned()
+ }))
+ .collect::<Vec<_>>()
+ .join(", "),
+ )
+ } else {
+ applicability = Applicability::HasPlaceholders;
+ "(...)".to_owned()
+ };
+ err.span_suggestion(
+ sugg_span,
+ "use associated function syntax instead",
+ format!("{}::{}{}", ty_str, item_name, args),
+ applicability,
+ );
+ } else {
+ err.help(&format!("try with `{}::{}`", ty_str, item_name,));
+ }
+ }
+
+ /// Suggest calling a field with a type that implements the `Fn*` traits instead of a method with
+ /// the same name as the field i.e. `(a.my_fn_ptr)(10)` instead of `a.my_fn_ptr(10)`.
+ fn suggest_calling_field_as_fn(
&self,
span: Span,
rcvr_ty: Ty<'tcx>,
@@ -1261,7 +1319,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.tcx.bound_type_of(range_def_id).subst(self.tcx, &[actual.into()]);
let pick = self.probe_for_name(
- span,
Mode::MethodCall,
item_name,
IsSuggestion(true),
@@ -1408,7 +1465,66 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
false
}
- fn check_for_field_method(
+ /// For code `rect::area(...)`,
+ /// if `rect` is a local variable and `area` is a valid assoc method for it,
+ /// we try to suggest `rect.area()`
+ pub(crate) fn suggest_assoc_method_call(&self, segs: &[PathSegment<'_>]) {
+ debug!("suggest_assoc_method_call segs: {:?}", segs);
+ let [seg1, seg2] = segs else { return; };
+ let Some(mut diag) =
+ self.tcx.sess.diagnostic().steal_diagnostic(seg1.ident.span, StashKey::CallAssocMethod)
+ else { return };
+
+ let map = self.infcx.tcx.hir();
+ let body = map.body(rustc_hir::BodyId { hir_id: self.body_id });
+ struct LetVisitor<'a> {
+ result: Option<&'a hir::Expr<'a>>,
+ ident_name: Symbol,
+ }
+
+ // FIXME: This really should be taking scoping, etc into account.
+ impl<'v> Visitor<'v> for LetVisitor<'v> {
+ fn visit_stmt(&mut self, ex: &'v hir::Stmt<'v>) {
+ if let hir::StmtKind::Local(hir::Local { pat, init, .. }) = &ex.kind
+ && let Binding(_, _, ident, ..) = pat.kind
+ && ident.name == self.ident_name
+ {
+ self.result = *init;
+ } else {
+ hir::intravisit::walk_stmt(self, ex);
+ }
+ }
+ }
+
+ let mut visitor = LetVisitor { result: None, ident_name: seg1.ident.name };
+ visitor.visit_body(&body);
+
+ let parent = self.tcx.hir().get_parent_node(seg1.hir_id);
+ if let Some(Node::Expr(call_expr)) = self.tcx.hir().find(parent)
+ && let Some(expr) = visitor.result
+ && let Some(self_ty) = self.node_ty_opt(expr.hir_id)
+ {
+ let probe = self.lookup_probe(
+ seg2.ident,
+ self_ty,
+ call_expr,
+ ProbeScope::TraitsInScope,
+ );
+ if probe.is_ok() {
+ let sm = self.infcx.tcx.sess.source_map();
+ diag.span_suggestion_verbose(
+ sm.span_extend_while(seg1.ident.span.shrink_to_hi(), |c| c == ':').unwrap(),
+ "you may have meant to call an instance method",
+ ".".to_string(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ diag.emit();
+ }
+
+ /// Suggest calling a method on a field i.e. `a.field.bar()` instead of `a.bar()`
+ fn suggest_calling_method_on_field(
&self,
err: &mut Diagnostic,
source: SelfSource<'tcx>,
@@ -1439,7 +1555,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
span,
&|_, field_ty| {
self.lookup_probe(
- span,
item_name,
field_ty,
call_expr,
@@ -1487,7 +1602,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&self,
err: &mut Diagnostic,
source: SelfSource<'tcx>,
- span: Span,
actual: Ty<'tcx>,
item_name: Ident,
) {
@@ -1510,15 +1624,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return None;
}
- self.lookup_probe(
- span,
- item_name,
- field_ty,
- call_expr,
- ProbeScope::TraitsInScope,
- )
- .ok()
- .map(|pick| (variant, field, pick))
+ self.lookup_probe(item_name, field_ty, call_expr, ProbeScope::TraitsInScope)
+ .ok()
+ .map(|pick| (variant, field, pick))
})
.collect();
@@ -1583,12 +1691,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let [first] = ***substs else { return; };
let ty::GenericArgKind::Type(ty) = first.unpack() else { return; };
let Ok(pick) = self.lookup_probe(
- span,
- item_name,
- ty,
- call_expr,
- ProbeScope::TraitsInScope,
- ) else { return; };
+ item_name,
+ ty,
+ call_expr,
+ ProbeScope::TraitsInScope,
+ ) else { return; };
let name = self.ty_to_value_string(actual);
let inner_id = kind.did();
@@ -1668,7 +1775,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
let all_local_types_needing_impls =
errors.iter().all(|e| match e.obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(pred) => match pred.self_ty().kind() {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => match pred.self_ty().kind() {
ty::Adt(def, _) => def.did().is_local(),
_ => false,
},
@@ -1677,7 +1784,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut preds: Vec<_> = errors
.iter()
.filter_map(|e| match e.obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(pred) => Some(pred),
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => Some(pred),
_ => None,
})
.collect();
@@ -1748,7 +1855,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut derives = Vec::<(String, Span, Symbol)>::new();
let mut traits = Vec::<Span>::new();
for (pred, _, _) in unsatisfied_predicates {
- let ty::PredicateKind::Trait(trait_pred) = pred.kind().skip_binder() else { continue };
+ let ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) = pred.kind().skip_binder() else { continue };
let adt = match trait_pred.self_ty().ty_adt_def() {
Some(adt) if adt.did().is_local() => adt,
_ => continue,
@@ -1838,7 +1945,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let SelfSource::QPath(ty) = self_source else { return; };
for (deref_ty, _) in self.autoderef(rustc_span::DUMMY_SP, rcvr_ty).skip(1) {
if let Ok(pick) = self.probe_for_name(
- ty.span,
Mode::Path,
item_name,
IsSuggestion(true),
@@ -1865,6 +1971,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
| ty::Str
| ty::Projection(_)
| ty::Param(_) => format!("{deref_ty}"),
+ // we need to test something like <&[_]>::len or <(&[u32])>::len
+ // and Vec::function();
+ // <&[_]>::len or <&[u32]>::len doesn't need an extra "<>" between
+ // but for Adt type like Vec::function()
+ // we would suggest <[_]>::function();
+ _ if self.tcx.sess.source_map().span_wrapped_by_angle_or_parentheses(ty.span) => format!("{deref_ty}"),
_ => format!("<{deref_ty}>"),
};
err.span_suggestion_verbose(
@@ -1887,7 +1999,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Print out the type for use in value namespace.
fn ty_to_value_string(&self, ty: Ty<'tcx>) -> String {
match ty.kind() {
- ty::Adt(def, substs) => format!("{}", ty::Instance::new(def.did(), substs)),
+ ty::Adt(def, substs) => self.tcx.def_path_str_with_substs(def.did(), substs),
_ => self.ty_to_string(ty),
}
}
@@ -1901,7 +2013,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
span: Span,
) {
let output_ty = match self.get_impl_future_output_ty(ty) {
- Some(output_ty) => self.resolve_vars_if_possible(output_ty).skip_binder(),
+ Some(output_ty) => self.resolve_vars_if_possible(output_ty),
_ => return,
};
let method_exists = self.method_exists(item_name, output_ty, call.hir_id, true);
@@ -2021,7 +2133,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
let mut alt_rcvr_sugg = false;
if let (SelfSource::MethodCall(rcvr), false) = (source, unsatisfied_bounds) {
- debug!(?span, ?item_name, ?rcvr_ty, ?rcvr);
+ debug!(
+ "suggest_traits_to_import: span={:?}, item_name={:?}, rcvr_ty={:?}, rcvr={:?}",
+ span, item_name, rcvr_ty, rcvr
+ );
let skippable = [
self.tcx.lang_items().clone_trait(),
self.tcx.lang_items().deref_trait(),
@@ -2037,7 +2152,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(self.tcx.mk_mut_ref(self.tcx.lifetimes.re_erased, rcvr_ty), "&mut "),
(self.tcx.mk_imm_ref(self.tcx.lifetimes.re_erased, rcvr_ty), "&"),
] {
- match self.lookup_probe(span, item_name, *rcvr_ty, rcvr, ProbeScope::AllTraits) {
+ match self.lookup_probe(item_name, *rcvr_ty, rcvr, ProbeScope::AllTraits) {
Ok(pick) => {
// If the method is defined for the receiver we have, it likely wasn't `use`d.
// We point at the method, but we just skip the rest of the check for arbitrary
@@ -2060,7 +2175,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// suggestions are generally misleading (see #94218).
break;
}
- _ => {}
+ Err(_) => (),
}
for (rcvr_ty, pre) in &[
@@ -2071,7 +2186,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
] {
if let Some(new_rcvr_t) = *rcvr_ty
&& let Ok(pick) = self.lookup_probe(
- span,
item_name,
new_rcvr_t,
rcvr,
@@ -2151,8 +2265,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match p.kind().skip_binder() {
// Hide traits if they are present in predicates as they can be fixed without
// having to implement them.
- ty::PredicateKind::Trait(t) => t.def_id() == info.def_id,
- ty::PredicateKind::Projection(p) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(t)) => {
+ t.def_id() == info.def_id
+ }
+ ty::PredicateKind::Clause(ty::Clause::Projection(p)) => {
p.projection_ty.item_def_id == info.def_id
}
_ => false,
@@ -2452,7 +2568,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
span: method_name.span,
};
let probe = self.lookup_probe(
- expr.span,
new_name,
self_ty,
self_expr,
@@ -2565,11 +2680,7 @@ fn print_disambiguation_help<'tcx>(
let (span, sugg) = if let (ty::AssocKind::Fn, Some((receiver, args))) = (kind, args) {
let args = format!(
"({}{})",
- if rcvr_ty.is_region_ptr() {
- if rcvr_ty.is_mutable_ptr() { "&mut " } else { "&" }
- } else {
- ""
- },
+ rcvr_ty.ref_mutability().map_or("", |mutbl| mutbl.ref_prefix_str()),
std::iter::once(receiver)
.chain(args.iter())
.map(|arg| source_map.span_to_snippet(arg.span).unwrap_or_else(|_| {
diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs
index 895739976..b12d84af4 100644
--- a/compiler/rustc_hir_typeck/src/op.rs
+++ b/compiler/rustc_hir_typeck/src/op.rs
@@ -19,7 +19,7 @@ use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt as _;
-use rustc_trait_selection::traits::{FulfillmentError, TraitEngine, TraitEngineExt};
+use rustc_trait_selection::traits::FulfillmentError;
use rustc_type_ir::sty::TyKind::*;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@@ -263,14 +263,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let by_ref_binop = !op.node.is_by_value();
if is_assign == IsAssign::Yes || by_ref_binop {
if let ty::Ref(region, _, mutbl) = method.sig.inputs()[0].kind() {
- let mutbl = match mutbl {
- hir::Mutability::Not => AutoBorrowMutability::Not,
- hir::Mutability::Mut => AutoBorrowMutability::Mut {
- // Allow two-phase borrows for binops in initial deployment
- // since they desugar to methods
- allow_two_phase_borrow: AllowTwoPhase::Yes,
- },
- };
+ let mutbl = AutoBorrowMutability::new(*mutbl, AllowTwoPhase::Yes);
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(*region, mutbl)),
target: method.sig.inputs()[0],
@@ -280,14 +273,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
if by_ref_binop {
if let ty::Ref(region, _, mutbl) = method.sig.inputs()[1].kind() {
- let mutbl = match mutbl {
- hir::Mutability::Not => AutoBorrowMutability::Not,
- hir::Mutability::Mut => AutoBorrowMutability::Mut {
- // Allow two-phase borrows for binops in initial deployment
- // since they desugar to methods
- allow_two_phase_borrow: AllowTwoPhase::Yes,
- },
- };
+ // Allow two-phase borrows for binops in initial deployment
+ // since they desugar to methods
+ let mutbl = AutoBorrowMutability::new(*mutbl, AllowTwoPhase::Yes);
+
let autoref = Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(*region, mutbl)),
target: method.sig.inputs()[1],
@@ -529,8 +518,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
}
- err.emit();
- self.tcx.ty_error()
+ let reported = err.emit();
+ self.tcx.ty_error_with_guaranteed(reported)
}
};
@@ -556,9 +545,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let rm_borrow_msg = "remove the borrow to obtain an owned `String`";
let to_owned_msg = "create an owned `String` from a string reference";
+ let string_type = self.tcx.lang_items().string();
let is_std_string = |ty: Ty<'tcx>| {
- ty.ty_adt_def()
- .map_or(false, |ty_def| self.tcx.is_diagnostic_item(sym::String, ty_def.did()))
+ ty.ty_adt_def().map_or(false, |ty_def| Some(ty_def.did()) == string_type)
};
match (lhs_ty.kind(), rhs_ty.kind()) {
@@ -772,7 +761,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match (method, trait_did) {
(Some(ok), _) => {
let method = self.register_infer_ok_obligations(ok);
- self.select_obligations_where_possible(false, |_| {});
+ self.select_obligations_where_possible(|_| {});
Ok(method)
}
(None, None) => Err(vec![]),
@@ -785,9 +774,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
other_ty_expr,
expected,
);
- let mut fulfill = <dyn TraitEngine<'_>>::new(self.tcx);
- fulfill.register_predicate_obligation(self, obligation);
- Err(fulfill.select_where_possible(&self.infcx))
+ Err(rustc_trait_selection::traits::fully_solve_obligation(self, obligation))
}
}
}
diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs
index ea90da4a6..decd317d9 100644
--- a/compiler/rustc_hir_typeck/src/pat.rs
+++ b/compiler/rustc_hir_typeck/src/pat.rs
@@ -19,7 +19,6 @@ use rustc_span::lev_distance::find_best_match_for_name;
use rustc_span::source_map::{Span, Spanned};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{BytePos, DUMMY_SP};
-use rustc_trait_selection::autoderef::Autoderef;
use rustc_trait_selection::traits::{ObligationCause, Pattern};
use ty::VariantDef;
@@ -402,6 +401,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
+ if self.tcx.features().string_deref_patterns && let hir::ExprKind::Lit(Spanned { node: ast::LitKind::Str(..), .. }) = lt.kind {
+ let tcx = self.tcx;
+ let expected = self.resolve_vars_if_possible(expected);
+ pat_ty = match expected.kind() {
+ ty::Adt(def, _) if Some(def.did()) == tcx.lang_items().string() => expected,
+ ty::Str => tcx.mk_static_str(),
+ _ => pat_ty,
+ };
+ }
+
// Somewhat surprising: in this case, the subtyping relation goes the
// opposite way as the other cases. Actually what we really want is not
// a subtyping relation at all but rather that there exists a LUB
@@ -693,7 +702,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
};
let mut_var_suggestion = 'block: {
- if !matches!(mutbl, ast::Mutability::Mut) {
+ if mutbl.is_not() {
break 'block None;
}
@@ -740,7 +749,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
format!("to take parameter `{binding}` by reference, move `&{mutability}` to the type"),
vec![
(pat.span.until(inner.span), "".to_owned()),
- (ty_span.shrink_to_lo(), format!("&{}", mutbl.prefix_str())),
+ (ty_span.shrink_to_lo(), mutbl.ref_prefix_str().to_owned()),
],
Applicability::MachineApplicable
);
@@ -840,12 +849,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (res, opt_ty, segments) = path_resolution;
match res {
Res::Err => {
- self.set_tainted_by_errors();
- return tcx.ty_error();
+ let e = tcx.sess.delay_span_bug(qpath.span(), "`Res::Err` but no error emitted");
+ self.set_tainted_by_errors(e);
+ return tcx.ty_error_with_guaranteed(e);
}
- Res::Def(DefKind::AssocFn | DefKind::Ctor(_, CtorKind::Fictive | CtorKind::Fn), _) => {
- report_unexpected_variant_res(tcx, res, qpath, pat.span);
- return tcx.ty_error();
+ Res::Def(DefKind::AssocFn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::Variant, _) => {
+ let expected = "unit struct, unit variant or constant";
+ let e = report_unexpected_variant_res(tcx, res, qpath, pat.span, "E0533", expected);
+ return tcx.ty_error_with_guaranteed(e);
}
Res::SelfCtor(..)
| Res::Def(
@@ -986,65 +997,46 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
- let on_error = || {
+ let on_error = |e| {
for pat in subpats {
- self.check_pat(pat, tcx.ty_error(), def_bm, ti);
+ self.check_pat(pat, tcx.ty_error_with_guaranteed(e), def_bm, ti);
}
};
let report_unexpected_res = |res: Res| {
- let sm = tcx.sess.source_map();
- let path_str = sm
- .span_to_snippet(sm.span_until_char(pat.span, '('))
- .map_or_else(|_| String::new(), |s| format!(" `{}`", s.trim_end()));
- let msg = format!(
- "expected tuple struct or tuple variant, found {}{}",
- res.descr(),
- path_str
- );
-
- let mut err = struct_span_err!(tcx.sess, pat.span, E0164, "{msg}");
- match res {
- Res::Def(DefKind::Fn | DefKind::AssocFn, _) => {
- err.span_label(pat.span, "`fn` calls are not allowed in patterns");
- err.help(
- "for more information, visit \
- https://doc.rust-lang.org/book/ch18-00-patterns.html",
- );
- }
- _ => {
- err.span_label(pat.span, "not a tuple variant or struct");
- }
- }
- err.emit();
- on_error();
+ let expected = "tuple struct or tuple variant";
+ let e = report_unexpected_variant_res(tcx, res, qpath, pat.span, "E0164", expected);
+ on_error(e);
+ e
};
// Resolve the path and check the definition for errors.
let (res, opt_ty, segments) =
self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span);
if res == Res::Err {
- self.set_tainted_by_errors();
- on_error();
- return self.tcx.ty_error();
+ let e = tcx.sess.delay_span_bug(pat.span, "`Res:Err` but no error emitted");
+ self.set_tainted_by_errors(e);
+ on_error(e);
+ return tcx.ty_error_with_guaranteed(e);
}
// Type-check the path.
let (pat_ty, res) =
self.instantiate_value_path(segments, opt_ty, res, pat.span, pat.hir_id);
if !pat_ty.is_fn() {
- report_unexpected_res(res);
- return tcx.ty_error();
+ let e = report_unexpected_res(res);
+ return tcx.ty_error_with_guaranteed(e);
}
let variant = match res {
Res::Err => {
- self.set_tainted_by_errors();
- on_error();
- return tcx.ty_error();
+ let e = tcx.sess.delay_span_bug(pat.span, "`Res::Err` but no error emitted");
+ self.set_tainted_by_errors(e);
+ on_error(e);
+ return tcx.ty_error_with_guaranteed(e);
}
Res::Def(DefKind::AssocConst | DefKind::AssocFn, _) => {
- report_unexpected_res(res);
- return tcx.ty_error();
+ let e = report_unexpected_res(res);
+ return tcx.ty_error_with_guaranteed(e);
}
Res::Def(DefKind::Ctor(_, CtorKind::Fn), _) => tcx.expect_variant_res(res),
_ => bug!("unexpected pattern resolution: {:?}", res),
@@ -1083,9 +1075,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
} else {
// Pattern has wrong number of fields.
- self.e0023(pat.span, res, qpath, subpats, &variant.fields, expected, had_err);
- on_error();
- return tcx.ty_error();
+ let e = self.e0023(pat.span, res, qpath, subpats, &variant.fields, expected, had_err);
+ on_error(e);
+ return tcx.ty_error_with_guaranteed(e);
}
pat_ty
}
@@ -1099,7 +1091,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fields: &'tcx [ty::FieldDef],
expected: Ty<'tcx>,
had_err: bool,
- ) {
+ ) -> ErrorGuaranteed {
let subpats_ending = pluralize!(subpats.len());
let fields_ending = pluralize!(fields.len());
@@ -1246,7 +1238,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
- err.emit();
+ err.emit()
}
fn check_pat_tuple(
@@ -1278,12 +1270,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let element_tys = tcx.mk_type_list(element_tys_iter);
let pat_ty = tcx.mk_ty(ty::Tuple(element_tys));
if let Some(mut err) = self.demand_eqtype_pat_diag(span, expected, pat_ty, ti) {
- err.emit();
+ let reported = err.emit();
// Walk subpatterns with an expected type of `err` in this case to silence
// further errors being emitted when using the bindings. #50333
- let element_tys_iter = (0..max_len).map(|_| tcx.ty_error());
+ let element_tys_iter = (0..max_len).map(|_| tcx.ty_error_with_guaranteed(reported));
for (_, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) {
- self.check_pat(elem, tcx.ty_error(), def_bm, ti);
+ self.check_pat(elem, tcx.ty_error_with_guaranteed(reported), def_bm, ti);
}
tcx.mk_tup(element_tys_iter)
} else {
@@ -1468,8 +1460,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// if this is a tuple struct, then all field names will be numbers
// so if any fields in a struct pattern use shorthand syntax, they will
// be invalid identifiers (for example, Foo { 0, 1 }).
- if let (CtorKind::Fn, PatKind::Struct(qpath, field_patterns, ..)) =
- (variant.ctor_kind, &pat.kind)
+ if let (Some(CtorKind::Fn), PatKind::Struct(qpath, field_patterns, ..)) =
+ (variant.ctor_kind(), &pat.kind)
{
let has_shorthand_field_name = field_patterns.iter().any(|field| field.is_shorthand);
if has_shorthand_field_name {
@@ -1646,7 +1638,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fields: &'tcx [hir::PatField<'tcx>],
variant: &ty::VariantDef,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
- if let (CtorKind::Fn, PatKind::Struct(qpath, ..)) = (variant.ctor_kind, &pat.kind) {
+ if let (Some(CtorKind::Fn), PatKind::Struct(qpath, ..)) = (variant.ctor_kind(), &pat.kind) {
let path = rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| {
s.print_qpath(qpath, false)
});
@@ -2132,7 +2124,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&& let ty::Array(..) | ty::Slice(..) = ty.kind()
{
err.help("the semantics of slice patterns changed recently; see issue #62254");
- } else if Autoderef::new(&self.infcx, self.param_env, self.body_id, span, expected_ty, span)
+ } else if self.autoderef(span, expected_ty)
.any(|(ty, _)| matches!(ty.kind(), ty::Slice(..) | ty::Array(..)))
&& let (Some(span), true) = (ti.span, ti.origin_expr)
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
diff --git a/compiler/rustc_hir_typeck/src/place_op.rs b/compiler/rustc_hir_typeck/src/place_op.rs
index ba8cf6926..952ea1488 100644
--- a/compiler/rustc_hir_typeck/src/place_op.rs
+++ b/compiler/rustc_hir_typeck/src/place_op.rs
@@ -90,8 +90,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Applicability::MachineApplicable,
);
}
- err.emit();
- Some((self.tcx.ty_error(), self.tcx.ty_error()))
+ let reported = err.emit();
+ Some((
+ self.tcx.ty_error_with_guaranteed(reported),
+ self.tcx.ty_error_with_guaranteed(reported),
+ ))
}
/// To type-check `base_expr[index_expr]`, we progressively autoderef
diff --git a/compiler/rustc_hir_typeck/src/upvar.rs b/compiler/rustc_hir_typeck/src/upvar.rs
index 4dea40829..0f4697201 100644
--- a/compiler/rustc_hir_typeck/src/upvar.rs
+++ b/compiler/rustc_hir_typeck/src/upvar.rs
@@ -970,12 +970,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
check_trait
.map(|check_trait| {
self.infcx
- .type_implements_trait(
- check_trait,
- ty,
- self.tcx.mk_substs_trait(ty, &[]),
- self.param_env,
- )
+ .type_implements_trait(check_trait, [ty], self.param_env)
.must_apply_modulo_regions()
})
.unwrap_or(false),
@@ -999,12 +994,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
check_trait
.map(|check_trait| {
self.infcx
- .type_implements_trait(
- check_trait,
- ty,
- self.tcx.mk_substs_trait(ty, &[]),
- self.param_env,
- )
+ .type_implements_trait(check_trait, [ty], self.param_env)
.must_apply_modulo_regions()
})
.unwrap_or(false),
@@ -1347,14 +1337,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let is_drop_defined_for_ty = |ty: Ty<'tcx>| {
let drop_trait = self.tcx.require_lang_item(hir::LangItem::Drop, Some(closure_span));
- let ty_params = self.tcx.mk_substs_trait(base_path_ty, &[]);
self.infcx
- .type_implements_trait(
- drop_trait,
- ty,
- ty_params,
- self.tcx.param_env(closure_def_id),
- )
+ .type_implements_trait(drop_trait, [ty], self.tcx.param_env(closure_def_id))
.must_apply_modulo_regions()
};
@@ -2184,7 +2168,7 @@ fn determine_place_ancestry_relation<'tcx>(
place_a: &Place<'tcx>,
place_b: &Place<'tcx>,
) -> PlaceAncestryRelation {
- // If Place A and Place B, don't start off from the same root variable, they are divergent.
+ // If Place A and Place B don't start off from the same root variable, they are divergent.
if place_a.base != place_b.base {
return PlaceAncestryRelation::Divergent;
}
diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs
index 1e26daa9c..52ffd286e 100644
--- a/compiler/rustc_hir_typeck/src/writeback.rs
+++ b/compiler/rustc_hir_typeck/src/writeback.rs
@@ -83,10 +83,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
wbcx.typeck_results.treat_byte_string_as_slice =
mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
- if self.is_tainted_by_errors() {
- // FIXME(eddyb) keep track of `ErrorGuaranteed` from where the error was emitted.
- wbcx.typeck_results.tainted_by_errors =
- Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
+ if let Some(e) = self.tainted_by_errors() {
+ wbcx.typeck_results.tainted_by_errors = Some(e);
}
debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
@@ -363,9 +361,12 @@ impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
intravisit::walk_ty(self, hir_ty);
- let ty = self.fcx.node_ty(hir_ty.hir_id);
- let ty = self.resolve(ty, &hir_ty.span);
- self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
+ // If there are type checking errors, Type privacy pass will stop,
+ // so we may not get the type from hid_id, see #104513
+ if let Some(ty) = self.fcx.node_ty_opt(hir_ty.hir_id) {
+ let ty = self.resolve(ty, &hir_ty.span);
+ self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
+ }
}
fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
@@ -514,7 +515,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
if cfg!(debug_assertions) && c_sig.needs_infer() {
span_bug!(
- self.fcx.tcx.hir().span_if_local(def_id).unwrap(),
+ self.fcx.tcx.def_span(def_id),
"writeback: `{:?}` has inference variables",
c_sig
);
@@ -564,7 +565,6 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
opaque_type_key,
self.fcx.infcx.tcx,
true,
- decl.origin,
);
self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id, hidden_type);
@@ -674,10 +674,8 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
// We may have introduced e.g. `ty::Error`, if inference failed, make sure
// to mark the `TypeckResults` as tainted in that case, so that downstream
// users of the typeck results don't produce extra errors, or worse, ICEs.
- if resolver.replaced_with_error {
- // FIXME(eddyb) keep track of `ErrorGuaranteed` from where the error was emitted.
- self.typeck_results.tainted_by_errors =
- Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
+ if let Some(e) = resolver.replaced_with_error {
+ self.typeck_results.tainted_by_errors = Some(e);
}
x
@@ -708,8 +706,8 @@ struct Resolver<'cx, 'tcx> {
span: &'cx dyn Locatable,
body: &'tcx hir::Body<'tcx>,
- /// Set to `true` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
- replaced_with_error: bool,
+ /// Set to `Some` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
+ replaced_with_error: Option<ErrorGuaranteed>,
}
impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
@@ -718,12 +716,14 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
span: &'cx dyn Locatable,
body: &'tcx hir::Body<'tcx>,
) -> Resolver<'cx, 'tcx> {
- Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: false }
+ Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: None }
}
- fn report_error(&self, p: impl Into<ty::GenericArg<'tcx>>) {
- if !self.tcx.sess.has_errors().is_some() {
- self.infcx
+ fn report_error(&self, p: impl Into<ty::GenericArg<'tcx>>) -> ErrorGuaranteed {
+ match self.tcx.sess.has_errors() {
+ Some(e) => e,
+ None => self
+ .infcx
.err_ctxt()
.emit_inference_failure_err(
Some(self.body.id()),
@@ -732,7 +732,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
E0282,
false,
)
- .emit();
+ .emit(),
}
}
}
@@ -773,9 +773,9 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
}
Err(_) => {
debug!("Resolver::fold_ty: input type `{:?}` not fully resolvable", t);
- self.report_error(t);
- self.replaced_with_error = true;
- self.tcx().ty_error()
+ let e = self.report_error(t);
+ self.replaced_with_error = Some(e);
+ self.tcx().ty_error_with_guaranteed(e)
}
}
}
@@ -790,9 +790,9 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
Ok(ct) => self.tcx.erase_regions(ct),
Err(_) => {
debug!("Resolver::fold_const: input const `{:?}` not fully resolvable", ct);
- self.report_error(ct);
- self.replaced_with_error = true;
- self.tcx().const_error(ct.ty())
+ let e = self.report_error(ct);
+ self.replaced_with_error = Some(e);
+ self.tcx().const_error_with_guaranteed(ct.ty(), e)
}
}
}
diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs
index 79e2d371e..a8acaf659 100644
--- a/compiler/rustc_incremental/src/persist/dirty_clean.rs
+++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs
@@ -438,9 +438,9 @@ fn expect_associated_value(tcx: TyCtxt<'_>, item: &NestedMetaItem) -> Symbol {
}
}
-// A visitor that collects all #[rustc_clean] attributes from
-// the HIR. It is used to verify that we really ran checks for all annotated
-// nodes.
+/// A visitor that collects all `#[rustc_clean]` attributes from
+/// the HIR. It is used to verify that we really ran checks for all annotated
+/// nodes.
pub struct FindAllAttrs<'tcx> {
tcx: TyCtxt<'tcx>,
found_attrs: Vec<&'tcx Attribute>,
diff --git a/compiler/rustc_incremental/src/persist/fs.rs b/compiler/rustc_incremental/src/persist/fs.rs
index 25c1b2e1c..97ebed058 100644
--- a/compiler/rustc_incremental/src/persist/fs.rs
+++ b/compiler/rustc_incremental/src/persist/fs.rs
@@ -109,6 +109,7 @@ use rustc_data_structures::{base_n, flock};
use rustc_errors::ErrorGuaranteed;
use rustc_fs_util::{link_or_copy, LinkOrCopy};
use rustc_session::{Session, StableCrateId};
+use rustc_span::Symbol;
use std::fs as std_fs;
use std::io::{self, ErrorKind};
@@ -202,7 +203,7 @@ pub fn in_incr_comp_dir(incr_comp_session_dir: &Path, file_name: &str) -> PathBu
/// [`rustc_interface::queries::dep_graph`]: ../../rustc_interface/struct.Queries.html#structfield.dep_graph
pub fn prepare_session_directory(
sess: &Session,
- crate_name: &str,
+ crate_name: Symbol,
stable_crate_id: StableCrateId,
) -> Result<(), ErrorGuaranteed> {
if sess.opts.incremental.is_none() {
@@ -322,7 +323,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) {
let incr_comp_session_dir: PathBuf = sess.incr_comp_session_dir().clone();
- if sess.has_errors_or_delayed_span_bugs() {
+ if let Some(_) = sess.has_errors_or_delayed_span_bugs() {
// If there have been any errors during compilation, we don't want to
// publish this session directory. Rather, we'll just delete it.
@@ -657,7 +658,7 @@ fn string_to_timestamp(s: &str) -> Result<SystemTime, ()> {
Ok(UNIX_EPOCH + duration)
}
-fn crate_path(sess: &Session, crate_name: &str, stable_crate_id: StableCrateId) -> PathBuf {
+fn crate_path(sess: &Session, crate_name: Symbol, stable_crate_id: StableCrateId) -> PathBuf {
let incr_dir = sess.opts.incremental.as_ref().unwrap().clone();
let stable_crate_id = base_n::encode(stable_crate_id.to_u64() as u128, INT_ENCODE_BASE);
diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs
index 710350314..6e9dcdd98 100644
--- a/compiler/rustc_incremental/src/persist/save.rs
+++ b/compiler/rustc_incremental/src/persist/save.rs
@@ -28,7 +28,7 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
return;
}
// This is going to be deleted in finalize_session_directory, so let's not create it
- if sess.has_errors_or_delayed_span_bugs() {
+ if let Some(_) = sess.has_errors_or_delayed_span_bugs() {
return;
}
@@ -89,7 +89,7 @@ pub fn save_work_product_index(
return;
}
// This is going to be deleted in finalize_session_directory, so let's not create it
- if sess.has_errors_or_delayed_span_bugs() {
+ if let Some(_) = sess.has_errors_or_delayed_span_bugs() {
return;
}
diff --git a/compiler/rustc_index/Cargo.toml b/compiler/rustc_index/Cargo.toml
index d8ea5aa80..e1cda5a9e 100644
--- a/compiler/rustc_index/Cargo.toml
+++ b/compiler/rustc_index/Cargo.toml
@@ -7,6 +7,10 @@ edition = "2021"
[dependencies]
arrayvec = { version = "0.7", default-features = false }
-rustc_serialize = { path = "../rustc_serialize" }
-rustc_macros = { path = "../rustc_macros" }
+rustc_serialize = { path = "../rustc_serialize", optional = true }
+rustc_macros = { path = "../rustc_macros", optional = true }
smallvec = "1.8.1"
+
+[features]
+default = ["nightly"]
+nightly = ["rustc_serialize", "rustc_macros"]
diff --git a/compiler/rustc_index/src/lib.rs b/compiler/rustc_index/src/lib.rs
index 23a4c1f06..db2c79152 100644
--- a/compiler/rustc_index/src/lib.rs
+++ b/compiler/rustc_index/src/lib.rs
@@ -1,17 +1,25 @@
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
-#![feature(allow_internal_unstable)]
-#![feature(extend_one)]
-#![feature(min_specialization)]
-#![feature(new_uninit)]
-#![feature(step_trait)]
-#![feature(stmt_expr_attributes)]
-#![feature(test)]
+#![cfg_attr(
+ feature = "nightly",
+ feature(
+ allow_internal_unstable,
+ extend_one,
+ min_specialization,
+ new_uninit,
+ step_trait,
+ stmt_expr_attributes,
+ test
+ )
+)]
+#[cfg(feature = "nightly")]
pub mod bit_set;
+#[cfg(feature = "nightly")]
pub mod interval;
pub mod vec;
+#[cfg(feature = "rustc_macros")]
pub use rustc_macros::newtype_index;
/// Type size assertion. The first argument is a type and the second argument is its expected size.
diff --git a/compiler/rustc_index/src/vec.rs b/compiler/rustc_index/src/vec.rs
index 4172ce3bb..39aa27a23 100644
--- a/compiler/rustc_index/src/vec.rs
+++ b/compiler/rustc_index/src/vec.rs
@@ -1,3 +1,4 @@
+#[cfg(feature = "rustc_serialize")]
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use std::fmt;
@@ -17,10 +18,12 @@ pub trait Idx: Copy + 'static + Eq + PartialEq + Debug + Hash {
fn index(self) -> usize;
+ #[inline]
fn increment_by(&mut self, amount: usize) {
*self = self.plus(amount);
}
+ #[inline]
fn plus(self, amount: usize) -> Self {
Self::new(self.index() + amount)
}
@@ -59,12 +62,14 @@ pub struct IndexVec<I: Idx, T> {
// not the phantom data.
unsafe impl<I: Idx, T> Send for IndexVec<I, T> where T: Send {}
+#[cfg(feature = "rustc_serialize")]
impl<S: Encoder, I: Idx, T: Encodable<S>> Encodable<S> for IndexVec<I, T> {
fn encode(&self, s: &mut S) {
Encodable::encode(&self.raw, s);
}
}
+#[cfg(feature = "rustc_serialize")]
impl<D: Decoder, I: Idx, T: Decodable<D>> Decodable<D> for IndexVec<I, T> {
fn decode(d: &mut D) -> Self {
IndexVec { raw: Decodable::decode(d), _marker: PhantomData }
@@ -357,11 +362,13 @@ impl<I: Idx, T> Extend<T> for IndexVec<I, T> {
}
#[inline]
+ #[cfg(feature = "nightly")]
fn extend_one(&mut self, item: T) {
self.raw.push(item);
}
#[inline]
+ #[cfg(feature = "nightly")]
fn extend_reserve(&mut self, additional: usize) {
self.raw.reserve(additional);
}
diff --git a/compiler/rustc_infer/src/errors/mod.rs b/compiler/rustc_infer/src/errors/mod.rs
index 2131d1906..74c4c65cc 100644
--- a/compiler/rustc_infer/src/errors/mod.rs
+++ b/compiler/rustc_infer/src/errors/mod.rs
@@ -109,8 +109,9 @@ pub struct InferenceBadError<'a> {
#[derive(Subdiagnostic)]
pub enum SourceKindSubdiag<'a> {
- #[suggestion_verbose(
+ #[suggestion(
infer_source_kind_subdiag_let,
+ style = "verbose",
code = ": {type_name}",
applicability = "has-placeholders"
)]
@@ -135,8 +136,9 @@ pub enum SourceKindSubdiag<'a> {
parent_prefix: String,
parent_name: String,
},
- #[suggestion_verbose(
+ #[suggestion(
infer_source_kind_subdiag_generic_suggestion,
+ style = "verbose",
code = "::<{args}>",
applicability = "has-placeholders"
)]
@@ -150,8 +152,9 @@ pub enum SourceKindSubdiag<'a> {
#[derive(Subdiagnostic)]
pub enum SourceKindMultiSuggestion<'a> {
- #[multipart_suggestion_verbose(
+ #[multipart_suggestion(
infer_source_kind_fully_qualified,
+ style = "verbose",
applicability = "has-placeholders"
)]
FullyQualified {
@@ -163,8 +166,9 @@ pub enum SourceKindMultiSuggestion<'a> {
adjustment: &'a str,
successor_pos: &'a str,
},
- #[multipart_suggestion_verbose(
+ #[multipart_suggestion(
infer_source_kind_closure_return,
+ style = "verbose",
applicability = "has-placeholders"
)]
ClosureReturn {
@@ -176,6 +180,18 @@ pub enum SourceKindMultiSuggestion<'a> {
},
}
+#[derive(Subdiagnostic)]
+#[suggestion(
+ infer_suggest_add_let_for_letchains,
+ style = "verbose",
+ applicability = "machine-applicable",
+ code = "let "
+)]
+pub(crate) struct SuggAddLetForLetChains {
+ #[primary_span]
+ pub span: Span,
+}
+
impl<'a> SourceKindMultiSuggestion<'a> {
pub fn new_fully_qualified(
span: Span,
@@ -359,7 +375,7 @@ impl AddToDiagnostic for AddLifetimeParamsSuggestion<'_> {
return false;
};
- if !lifetime_sub.name.is_anonymous() || !lifetime_sup.name.is_anonymous() {
+ if !lifetime_sub.is_anonymous() || !lifetime_sup.is_anonymous() {
return false;
};
@@ -391,20 +407,20 @@ impl AddToDiagnostic for AddLifetimeParamsSuggestion<'_> {
let suggestion_param_name =
suggestion_param_name.map(|n| n.to_string()).unwrap_or_else(|| "'a".to_owned());
- debug!(?lifetime_sup.span);
- debug!(?lifetime_sub.span);
- let make_suggestion = |span: rustc_span::Span| {
- if span.is_empty() {
- (span, format!("{}, ", suggestion_param_name))
- } else if let Ok("&") = self.tcx.sess.source_map().span_to_snippet(span).as_deref()
- {
- (span.shrink_to_hi(), format!("{} ", suggestion_param_name))
+ debug!(?lifetime_sup.ident.span);
+ debug!(?lifetime_sub.ident.span);
+ let make_suggestion = |ident: Ident| {
+ let sugg = if ident.name == kw::Empty {
+ format!("{}, ", suggestion_param_name)
+ } else if ident.name == kw::UnderscoreLifetime && ident.span.is_empty() {
+ format!("{} ", suggestion_param_name)
} else {
- (span, suggestion_param_name.clone())
- }
+ suggestion_param_name.clone()
+ };
+ (ident.span, sugg)
};
let mut suggestions =
- vec![make_suggestion(lifetime_sub.span), make_suggestion(lifetime_sup.span)];
+ vec![make_suggestion(lifetime_sub.ident), make_suggestion(lifetime_sup.ident)];
if introduce_new {
let new_param_suggestion = if let Some(first) =
@@ -478,8 +494,9 @@ pub enum ImplicitStaticLifetimeSubdiag {
#[primary_span]
span: Span,
},
- #[suggestion_verbose(
+ #[suggestion(
infer_implicit_static_lifetime_suggestion,
+ style = "verbose",
code = " + '_",
applicability = "maybe-incorrect"
)]
diff --git a/compiler/rustc_infer/src/errors/note_and_explain.rs b/compiler/rustc_infer/src/errors/note_and_explain.rs
index 6a29d8562..7aaa5ce2f 100644
--- a/compiler/rustc_infer/src/errors/note_and_explain.rs
+++ b/compiler/rustc_infer/src/errors/note_and_explain.rs
@@ -89,10 +89,13 @@ impl<'a> DescriptionCtx<'a> {
};
me.span = Some(sp);
}
- ty::BrAnon(idx) => {
+ ty::BrAnon(idx, span) => {
me.kind = "anon_num_here";
me.num_arg = idx+1;
- me.span = Some(tcx.def_span(scope));
+ me.span = match span {
+ Some(_) => span,
+ None => Some(tcx.def_span(scope)),
+ }
},
_ => {
me.kind = "defined_here_reg";
diff --git a/compiler/rustc_infer/src/infer/at.rs b/compiler/rustc_infer/src/infer/at.rs
index 5ff3779fa..4429e4f43 100644
--- a/compiler/rustc_infer/src/infer/at.rs
+++ b/compiler/rustc_infer/src/infer/at.rs
@@ -77,10 +77,7 @@ impl<'tcx> InferCtxt<'tcx> {
err_count_on_creation: self.err_count_on_creation,
in_snapshot: self.in_snapshot.clone(),
universe: self.universe.clone(),
- normalize_fn_sig_for_diagnostic: self
- .normalize_fn_sig_for_diagnostic
- .as_ref()
- .map(|f| f.clone()),
+ intercrate: self.intercrate,
}
}
}
diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs
index a3ff70363..3dc0d60b1 100644
--- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs
+++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs
@@ -495,7 +495,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> {
}
ty::ConstKind::Bound(debruijn, _) => {
if debruijn >= self.binder_index {
- bug!("escaping bound type during canonicalization")
+ bug!("escaping bound const during canonicalization")
} else {
return ct;
}
@@ -738,7 +738,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
r: ty::Region<'tcx>,
) -> ty::Region<'tcx> {
let var = self.canonical_var(info, r.into());
- let br = ty::BoundRegion { var, kind: ty::BrAnon(var.as_u32()) };
+ let br = ty::BoundRegion { var, kind: ty::BrAnon(var.as_u32(), None) };
let region = ty::ReLateBound(self.binder_index, br);
self.tcx().mk_region(region)
}
@@ -773,10 +773,10 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
self.fold_const(bound_to)
} else {
let var = self.canonical_var(info, const_var.into());
- self.tcx().mk_const(ty::ConstS {
- kind: ty::ConstKind::Bound(self.binder_index, var),
- ty: self.fold_ty(const_var.ty()),
- })
+ self.tcx().mk_const(
+ ty::ConstKind::Bound(self.binder_index, var),
+ self.fold_ty(const_var.ty()),
+ )
}
}
}
diff --git a/compiler/rustc_infer/src/infer/canonical/mod.rs b/compiler/rustc_infer/src/infer/canonical/mod.rs
index 06ca2534d..e59715b70 100644
--- a/compiler/rustc_infer/src/infer/canonical/mod.rs
+++ b/compiler/rustc_infer/src/infer/canonical/mod.rs
@@ -41,9 +41,9 @@ impl<'tcx> InferCtxt<'tcx> {
/// inference variables and applies it to the canonical value.
/// Returns both the instantiated result *and* the substitution S.
///
- /// This is only meant to be invoked as part of constructing an
+ /// This can be invoked as part of constructing an
/// inference context at the start of a query (see
- /// `InferCtxtBuilder::enter_with_canonical`). It basically
+ /// `InferCtxtBuilder::build_with_canonical`). It basically
/// brings the canonical value "into scope" within your new infcx.
///
/// At the end of processing, the substitution S (once
@@ -63,8 +63,11 @@ impl<'tcx> InferCtxt<'tcx> {
// in them, so this code has no effect, but it is looking
// forward to the day when we *do* want to carry universes
// through into queries.
- let universes: IndexVec<ty::UniverseIndex, _> = std::iter::once(ty::UniverseIndex::ROOT)
- .chain((0..canonical.max_universe.as_u32()).map(|_| self.create_next_universe()))
+ //
+ // Instantiate the root-universe content into the current universe,
+ // and create fresh universes for the higher universes.
+ let universes: IndexVec<ty::UniverseIndex, _> = std::iter::once(self.universe())
+ .chain((1..=canonical.max_universe.as_u32()).map(|_| self.create_next_universe()))
.collect();
let canonical_inference_vars =
@@ -147,12 +150,7 @@ impl<'tcx> InferCtxt<'tcx> {
CanonicalVarKind::PlaceholderConst(ty::PlaceholderConst { universe, name }, ty) => {
let universe_mapped = universe_map(universe);
let placeholder_mapped = ty::PlaceholderConst { universe: universe_mapped, name };
- self.tcx
- .mk_const(ty::ConstS {
- kind: ty::ConstKind::Placeholder(placeholder_mapped),
- ty,
- })
- .into()
+ self.tcx.mk_const(placeholder_mapped, ty).into()
}
}
}
diff --git a/compiler/rustc_infer/src/infer/canonical/query_response.rs b/compiler/rustc_infer/src/infer/canonical/query_response.rs
index a299a3e57..996b1c40e 100644
--- a/compiler/rustc_infer/src/infer/canonical/query_response.rs
+++ b/compiler/rustc_infer/src/infer/canonical/query_response.rs
@@ -23,11 +23,10 @@ use rustc_index::vec::Idx;
use rustc_index::vec::IndexVec;
use rustc_middle::arena::ArenaAllocatable;
use rustc_middle::mir::ConstraintCategory;
-use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::relate::TypeRelation;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind};
-use rustc_middle::ty::{self, BoundVar, Const, ToPredicate, Ty, TyCtxt};
+use rustc_middle::ty::{self, BoundVar, ToPredicate, Ty, TyCtxt};
use rustc_span::Span;
use std::fmt::Debug;
use std::iter;
@@ -570,10 +569,10 @@ impl<'tcx> InferCtxt<'tcx> {
let atom = match k1.unpack() {
GenericArgKind::Lifetime(r1) => {
- ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(r1, r2))
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate(r1, r2)))
}
GenericArgKind::Type(t1) => {
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(t1, r2))
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(t1, r2)))
}
GenericArgKind::Const(..) => {
// Consts cannot outlive one another, so we don't expect to
@@ -581,9 +580,9 @@ impl<'tcx> InferCtxt<'tcx> {
span_bug!(cause.span, "unexpected const outlives {:?}", predicate);
}
};
- let predicate = predicate.0.rebind(atom).to_predicate(self.tcx);
+ let predicate = predicate.0.rebind(atom);
- Obligation::new(cause, param_env, predicate)
+ Obligation::new(self.tcx, cause, param_env, predicate)
}
/// Given two sets of values for the same set of canonical variables, unify them.
@@ -721,18 +720,14 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for QueryTypeRelatingDelegate<'_, 'tcx> {
self.obligations.push(Obligation {
cause: self.cause.clone(),
param_env: self.param_env,
- predicate: ty::Binder::dummy(ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(
- sup, sub,
+ predicate: ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::RegionOutlives(
+ ty::OutlivesPredicate(sup, sub),
)))
.to_predicate(self.infcx.tcx),
recursion_depth: 0,
});
}
- fn const_equate(&mut self, _a: Const<'tcx>, _b: Const<'tcx>) {
- span_bug!(self.cause.span(), "generic_const_exprs: unreachable `const_equate`");
- }
-
fn normalization() -> NormalizationStrategy {
NormalizationStrategy::Eager
}
@@ -741,11 +736,7 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for QueryTypeRelatingDelegate<'_, 'tcx> {
true
}
- fn register_opaque_type_obligations(
- &mut self,
- obligations: PredicateObligations<'tcx>,
- ) -> Result<(), TypeError<'tcx>> {
+ fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) {
self.obligations.extend(obligations);
- Ok(())
}
}
diff --git a/compiler/rustc_infer/src/infer/combine.rs b/compiler/rustc_infer/src/infer/combine.rs
index b5427f639..cf895ed0d 100644
--- a/compiler/rustc_infer/src/infer/combine.rs
+++ b/compiler/rustc_infer/src/infer/combine.rs
@@ -37,7 +37,7 @@ use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::error::{ExpectedFound, TypeError};
use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation};
use rustc_middle::ty::subst::SubstsRef;
-use rustc_middle::ty::{self, InferConst, ToPredicate, Ty, TyCtxt, TypeVisitable};
+use rustc_middle::ty::{self, InferConst, Ty, TyCtxt, TypeVisitable};
use rustc_middle::ty::{IntType, UintType};
use rustc_span::{Span, DUMMY_SP};
@@ -347,10 +347,10 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
if needs_wf {
self.obligations.push(Obligation::new(
+ self.tcx(),
self.trace.cause.clone(),
self.param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(b_ty.into()))
- .to_predicate(self.infcx.tcx),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(b_ty.into())),
));
}
@@ -444,9 +444,19 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
ty::PredicateKind::ConstEquate(b, a)
};
self.obligations.push(Obligation::new(
+ self.tcx(),
self.trace.cause.clone(),
self.param_env,
- ty::Binder::dummy(predicate).to_predicate(self.tcx()),
+ ty::Binder::dummy(predicate),
+ ));
+ }
+
+ pub fn mark_ambiguous(&mut self) {
+ self.obligations.push(Obligation::new(
+ self.tcx(),
+ self.trace.cause.clone(),
+ self.param_env,
+ ty::Binder::dummy(ty::PredicateKind::Ambiguous),
));
}
}
@@ -520,6 +530,11 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
+
+ fn intercrate(&self) -> bool {
+ self.infcx.intercrate
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
@@ -532,6 +547,10 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
true
}
+ fn mark_ambiguous(&mut self) {
+ span_bug!(self.cause.span, "opaque types are handled in `tys`");
+ }
+
fn binders<T>(
&mut self,
a: ty::Binder<'tcx, T>,
@@ -563,6 +582,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
&opt_variances,
a_subst,
b_subst,
+ true,
)
}
}
@@ -655,6 +675,10 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
// relatable.
Ok(t)
}
+ ty::Opaque(def_id, substs) => {
+ let s = self.relate(substs, substs)?;
+ Ok(if s == substs { t } else { self.infcx.tcx.mk_opaque(def_id, s) })
+ }
_ => relate::super_relate_tys(self, t, t),
}?;
@@ -729,7 +753,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
origin: var_value.origin,
val: ConstVariableValue::Unknown { universe: self.for_universe },
});
- Ok(self.tcx().mk_const_var(new_var_id, c.ty()))
+ Ok(self.tcx().mk_const(new_var_id, c.ty()))
}
}
}
@@ -741,10 +765,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
substs,
substs,
)?;
- Ok(self.tcx().mk_const(ty::ConstS {
- ty: c.ty(),
- kind: ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }),
- }))
+ Ok(self.tcx().mk_const(ty::UnevaluatedConst { def, substs }, c.ty()))
}
_ => relate::super_relate_consts(self, c, c),
}
@@ -797,6 +818,11 @@ impl<'tcx> TypeRelation<'tcx> for ConstInferUnifier<'_, 'tcx> {
self.infcx.tcx
}
+ fn intercrate(&self) -> bool {
+ assert!(!self.infcx.intercrate);
+ false
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
@@ -809,6 +835,10 @@ impl<'tcx> TypeRelation<'tcx> for ConstInferUnifier<'_, 'tcx> {
true
}
+ fn mark_ambiguous(&mut self) {
+ bug!()
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_variance: ty::Variance,
@@ -942,7 +972,7 @@ impl<'tcx> TypeRelation<'tcx> for ConstInferUnifier<'_, 'tcx> {
},
},
);
- Ok(self.tcx().mk_const_var(new_var_id, c.ty()))
+ Ok(self.tcx().mk_const(new_var_id, c.ty()))
}
}
}
@@ -955,10 +985,7 @@ impl<'tcx> TypeRelation<'tcx> for ConstInferUnifier<'_, 'tcx> {
substs,
)?;
- Ok(self.tcx().mk_const(ty::ConstS {
- ty: c.ty(),
- kind: ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }),
- }))
+ Ok(self.tcx().mk_const(ty::UnevaluatedConst { def, substs }, c.ty()))
}
_ => relate::super_relate_consts(self, c, c),
}
diff --git a/compiler/rustc_infer/src/infer/equate.rs b/compiler/rustc_infer/src/infer/equate.rs
index 59728148a..8682f4d3b 100644
--- a/compiler/rustc_infer/src/infer/equate.rs
+++ b/compiler/rustc_infer/src/infer/equate.rs
@@ -32,6 +32,10 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> {
self.fields.tcx()
}
+ fn intercrate(&self) -> bool {
+ self.fields.infcx.intercrate
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.fields.param_env
}
@@ -40,6 +44,10 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> {
self.a_is_expected
}
+ fn mark_ambiguous(&mut self) {
+ self.fields.mark_ambiguous();
+ }
+
fn relate_item_substs(
&mut self,
_item_def_id: DefId,
diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
index 9ff703e52..987559d7e 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
@@ -1,3 +1,4 @@
+// ignore-tidy-filelength
//! Error Reporting Code for the inference engine
//!
//! Because of the way inference, and in particular region inference,
@@ -55,10 +56,9 @@ use crate::infer::ExpectedFound;
use crate::traits::error_reporting::report_object_safety_error;
use crate::traits::{
IfExpressionCause, MatchExpressionArmCause, ObligationCause, ObligationCauseCode,
- StatementAsExpression,
};
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::{pluralize, struct_span_err, Diagnostic, ErrorGuaranteed, IntoDiagnosticArg};
use rustc_errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString, MultiSpan};
use rustc_hir as hir;
@@ -67,18 +67,19 @@ use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::lang_items::LangItem;
use rustc_hir::Node;
use rustc_middle::dep_graph::DepContext;
-use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::relate::{self, RelateResult, TypeRelation};
use rustc_middle::ty::{
- self, error::TypeError, Binder, List, Region, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable,
+ self, error::TypeError, List, Region, Ty, TyCtxt, TypeFoldable, TypeSuperVisitable,
TypeVisitable,
};
use rustc_span::{sym, symbol::kw, BytePos, DesugaringKind, Pos, Span};
use rustc_target::spec::abi;
use std::ops::{ControlFlow, Deref};
+use std::path::PathBuf;
use std::{cmp, fmt, iter};
mod note;
+mod suggest;
pub(crate) mod need_type_info;
pub use need_type_info::TypeAnnotationNeeded;
@@ -91,6 +92,8 @@ pub mod nice_region_error;
pub struct TypeErrCtxt<'a, 'tcx> {
pub infcx: &'a InferCtxt<'tcx>,
pub typeck_results: Option<std::cell::Ref<'a, ty::TypeckResults<'tcx>>>,
+ pub normalize_fn_sig: Box<dyn Fn(ty::PolyFnSig<'tcx>) -> ty::PolyFnSig<'tcx> + 'a>,
+ pub fallback_has_occurred: bool,
}
impl TypeErrCtxt<'_, '_> {
@@ -206,9 +209,12 @@ fn msg_span_from_early_bound_and_free_regions<'tcx>(
};
(text, sp)
}
- ty::BrAnon(idx) => (
+ ty::BrAnon(idx, span) => (
format!("the anonymous lifetime #{} defined here", idx + 1),
- tcx.def_span(scope)
+ match span {
+ Some(span) => span,
+ None => tcx.def_span(scope)
+ }
),
_ => (
format!("the lifetime `{}` as defined here", region),
@@ -253,6 +259,7 @@ fn label_msg_span(
}
}
+#[instrument(level = "trace", skip(tcx))]
pub fn unexpected_hidden_region_diagnostic<'tcx>(
tcx: TyCtxt<'tcx>,
span: Span,
@@ -330,33 +337,36 @@ pub fn unexpected_hidden_region_diagnostic<'tcx>(
}
impl<'tcx> InferCtxt<'tcx> {
- pub fn get_impl_future_output_ty(&self, ty: Ty<'tcx>) -> Option<Binder<'tcx, Ty<'tcx>>> {
- if let ty::Opaque(def_id, substs) = ty.kind() {
- let future_trait = self.tcx.require_lang_item(LangItem::Future, None);
- // Future::Output
- let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0];
+ pub fn get_impl_future_output_ty(&self, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
+ let (def_id, substs) = match *ty.kind() {
+ ty::Opaque(def_id, substs) => (def_id, substs),
+ ty::Projection(data)
+ if self.tcx.def_kind(data.item_def_id) == DefKind::ImplTraitPlaceholder =>
+ {
+ (data.item_def_id, data.substs)
+ }
+ _ => return None,
+ };
- let bounds = self.tcx.bound_explicit_item_bounds(*def_id);
+ let future_trait = self.tcx.require_lang_item(LangItem::Future, None);
+ let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0];
- for (predicate, _) in bounds.subst_iter_copied(self.tcx, substs) {
- let output = predicate
+ self.tcx.bound_explicit_item_bounds(def_id).subst_iter_copied(self.tcx, substs).find_map(
+ |(predicate, _)| {
+ predicate
.kind()
.map_bound(|kind| match kind {
- ty::PredicateKind::Projection(projection_predicate)
+ ty::PredicateKind::Clause(ty::Clause::Projection(projection_predicate))
if projection_predicate.projection_ty.item_def_id == item_def_id =>
{
projection_predicate.term.ty()
}
_ => None,
})
- .transpose();
- if output.is_some() {
- // We don't account for multiple `Future::Output = Ty` constraints.
- return output;
- }
- }
- }
- None
+ .no_bound_vars()
+ .flatten()
+ },
+ )
}
}
@@ -534,7 +544,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
fn print_dyn_existential(
self,
- _predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ _predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
Err(NonTrivialPath)
}
@@ -793,87 +803,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
}
}
- fn suggest_remove_semi_or_return_binding(
- &self,
- err: &mut Diagnostic,
- first_id: Option<hir::HirId>,
- first_ty: Ty<'tcx>,
- first_span: Span,
- second_id: Option<hir::HirId>,
- second_ty: Ty<'tcx>,
- second_span: Span,
- ) {
- let remove_semicolon = [
- (first_id, self.resolve_vars_if_possible(second_ty)),
- (second_id, self.resolve_vars_if_possible(first_ty)),
- ]
- .into_iter()
- .find_map(|(id, ty)| {
- let hir::Node::Block(blk) = self.tcx.hir().get(id?) else { return None };
- self.could_remove_semicolon(blk, ty)
- });
- match remove_semicolon {
- Some((sp, StatementAsExpression::NeedsBoxing)) => {
- err.multipart_suggestion(
- "consider removing this semicolon and boxing the expressions",
- vec![
- (first_span.shrink_to_lo(), "Box::new(".to_string()),
- (first_span.shrink_to_hi(), ")".to_string()),
- (second_span.shrink_to_lo(), "Box::new(".to_string()),
- (second_span.shrink_to_hi(), ")".to_string()),
- (sp, String::new()),
- ],
- Applicability::MachineApplicable,
- );
- }
- Some((sp, StatementAsExpression::CorrectType)) => {
- err.span_suggestion_short(
- sp,
- "consider removing this semicolon",
- "",
- Applicability::MachineApplicable,
- );
- }
- None => {
- for (id, ty) in [(first_id, second_ty), (second_id, first_ty)] {
- if let Some(id) = id
- && let hir::Node::Block(blk) = self.tcx.hir().get(id)
- && self.consider_returning_binding(blk, ty, err)
- {
- break;
- }
- }
- }
- }
- }
-
- fn suggest_boxing_for_return_impl_trait(
- &self,
- err: &mut Diagnostic,
- return_sp: Span,
- arm_spans: impl Iterator<Item = Span>,
- ) {
- err.multipart_suggestion(
- "you could change the return type to be a boxed trait object",
- vec![
- (return_sp.with_hi(return_sp.lo() + BytePos(4)), "Box<dyn".to_string()),
- (return_sp.shrink_to_hi(), ">".to_string()),
- ],
- Applicability::MaybeIncorrect,
- );
- let sugg = arm_spans
- .flat_map(|sp| {
- [(sp.shrink_to_lo(), "Box::new(".to_string()), (sp.shrink_to_hi(), ")".to_string())]
- .into_iter()
- })
- .collect::<Vec<_>>();
- err.multipart_suggestion(
- "if you change the return type to expect trait objects, box the returned expressions",
- sugg,
- Applicability::MaybeIncorrect,
- );
- }
-
/// Given that `other_ty` is the same as a type argument for `name` in `sub`, populate `value`
/// highlighting `name` and every type argument that isn't at `pos` (which is `other_ty`), and
/// populate `other_value` with `other_ty`.
@@ -1003,22 +932,14 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
}
}
- fn normalize_fn_sig_for_diagnostic(&self, sig: ty::PolyFnSig<'tcx>) -> ty::PolyFnSig<'tcx> {
- if let Some(normalize) = &self.normalize_fn_sig_for_diagnostic {
- normalize(self, sig)
- } else {
- sig
- }
- }
-
/// Given two `fn` signatures highlight only sub-parts that are different.
fn cmp_fn_sig(
&self,
sig1: &ty::PolyFnSig<'tcx>,
sig2: &ty::PolyFnSig<'tcx>,
) -> (DiagnosticStyledString, DiagnosticStyledString) {
- let sig1 = &self.normalize_fn_sig_for_diagnostic(*sig1);
- let sig2 = &self.normalize_fn_sig_for_diagnostic(*sig2);
+ let sig1 = &(self.normalize_fn_sig)(*sig1);
+ let sig2 = &(self.normalize_fn_sig)(*sig2);
let get_lifetimes = |sig| {
use rustc_hir::def::Namespace;
@@ -1258,7 +1179,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
let num_display_types = consts_offset - regions_len;
for (i, (ta1, ta2)) in type_arguments.take(num_display_types).enumerate() {
let i = i + regions_len;
- if ta1 == ta2 {
+ if ta1 == ta2 && !self.tcx.sess.verbose() {
values.0.push_normal("_");
values.1.push_normal("_");
} else {
@@ -1274,7 +1195,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
let const_arguments = sub1.consts().zip(sub2.consts());
for (i, (ca1, ca2)) in const_arguments.enumerate() {
let i = i + consts_offset;
- if ca1 == ca2 {
+ if ca1 == ca2 && !self.tcx.sess.verbose() {
values.0.push_normal("_");
values.1.push_normal("_");
} else {
@@ -1347,10 +1268,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
.map(|(mod_str, _)| mod_str.len() + separator_len)
.sum();
- debug!(
- "cmp: separator_len={}, split_idx={}, min_len={}",
- separator_len, split_idx, min_len
- );
+ debug!(?separator_len, ?split_idx, ?min_len, "cmp");
if split_idx >= min_len {
// paths are identical, highlight everything
@@ -1361,7 +1279,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
} else {
let (common, uniq1) = t1_str.split_at(split_idx);
let (_, uniq2) = t2_str.split_at(split_idx);
- debug!("cmp: common={}, uniq1={}, uniq2={}", common, uniq1, uniq2);
+ debug!(?common, ?uniq1, ?uniq2, "cmp");
values.0.push_normal(common);
values.0.push_highlighted(uniq1);
@@ -1453,7 +1371,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
(ty::FnPtr(sig1), ty::FnPtr(sig2)) => self.cmp_fn_sig(sig1, sig2),
_ => {
- if t1 == t2 {
+ if t1 == t2 && !self.tcx.sess.verbose() {
// The two types are the same, elide and don't highlight.
(DiagnosticStyledString::normal("_"), DiagnosticStyledString::normal("_"))
} else {
@@ -1498,9 +1416,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
values = None;
}
struct OpaqueTypesVisitor<'tcx> {
- types: FxHashMap<TyCategory, FxHashSet<Span>>,
- expected: FxHashMap<TyCategory, FxHashSet<Span>>,
- found: FxHashMap<TyCategory, FxHashSet<Span>>,
+ types: FxIndexMap<TyCategory, FxIndexSet<Span>>,
+ expected: FxIndexMap<TyCategory, FxIndexSet<Span>>,
+ found: FxIndexMap<TyCategory, FxIndexSet<Span>>,
ignore_span: Span,
tcx: TyCtxt<'tcx>,
}
@@ -1538,7 +1456,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
&self,
err: &mut Diagnostic,
target: &str,
- types: &FxHashMap<TyCategory, FxHashSet<Span>>,
+ types: &FxIndexMap<TyCategory, FxIndexSet<Span>>,
) {
for (key, values) in types.iter() {
let count = values.len();
@@ -1654,55 +1572,46 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
}
ValuePairs::Regions(_) => (false, Mismatch::Fixed("lifetime")),
};
- let vals = match self.values_str(values) {
- Some((expected, found)) => Some((expected, found)),
- None => {
- // Derived error. Cancel the emitter.
- // NOTE(eddyb) this was `.cancel()`, but `diag`
- // is borrowed, so we can't fully defuse it.
- diag.downgrade_to_delayed_bug();
- return;
- }
+ let Some(vals) = self.values_str(values) else {
+ // Derived error. Cancel the emitter.
+ // NOTE(eddyb) this was `.cancel()`, but `diag`
+ // is borrowed, so we can't fully defuse it.
+ diag.downgrade_to_delayed_bug();
+ return;
};
- (vals, exp_found, is_simple_error, Some(values))
+ (Some(vals), exp_found, is_simple_error, Some(values))
}
};
- match terr {
- // Ignore msg for object safe coercion
- // since E0038 message will be printed
- TypeError::ObjectUnsafeCoercion(_) => {}
- _ => {
- let mut label_or_note = |span: Span, msg: &str| {
- if (prefer_label && is_simple_error) || &[span] == diag.span.primary_spans() {
- diag.span_label(span, msg);
- } else {
- diag.span_note(span, msg);
- }
- };
- if let Some((sp, msg)) = secondary_span {
- if swap_secondary_and_primary {
- let terr = if let Some(infer::ValuePairs::Terms(infer::ExpectedFound {
- expected,
- ..
- })) = values
- {
- format!("expected this to be `{}`", expected)
- } else {
- terr.to_string()
- };
- label_or_note(sp, &terr);
- label_or_note(span, &msg);
- } else {
- label_or_note(span, &terr.to_string());
- label_or_note(sp, &msg);
- }
- } else {
- label_or_note(span, &terr.to_string());
- }
+ let mut label_or_note = |span: Span, msg: &str| {
+ if (prefer_label && is_simple_error) || &[span] == diag.span.primary_spans() {
+ diag.span_label(span, msg);
+ } else {
+ diag.span_note(span, msg);
}
};
- if let Some((expected, found)) = expected_found {
+ if let Some((sp, msg)) = secondary_span {
+ if swap_secondary_and_primary {
+ let terr = if let Some(infer::ValuePairs::Terms(infer::ExpectedFound {
+ expected,
+ ..
+ })) = values
+ {
+ format!("expected this to be `{}`", expected)
+ } else {
+ terr.to_string()
+ };
+ label_or_note(sp, &terr);
+ label_or_note(span, &msg);
+ } else {
+ label_or_note(span, &terr.to_string());
+ label_or_note(sp, &msg);
+ }
+ } else {
+ label_or_note(span, &terr.to_string());
+ }
+
+ if let Some((expected, found, exp_p, found_p)) = expected_found {
let (expected_label, found_label, exp_found) = match exp_found {
Mismatch::Variable(ef) => (
ef.expected.prefix_string(self.tcx),
@@ -1819,32 +1728,41 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
}
TypeError::Sorts(values) => {
let extra = expected == found;
- let sort_string = |ty: Ty<'tcx>| match (extra, ty.kind()) {
- (true, ty::Opaque(def_id, _)) => {
- let sm = self.tcx.sess.source_map();
- let pos = sm.lookup_char_pos(self.tcx.def_span(*def_id).lo());
- format!(
- " (opaque type at <{}:{}:{}>)",
- sm.filename_for_diagnostics(&pos.file.name),
- pos.line,
- pos.col.to_usize() + 1,
- )
- }
- (true, ty::Projection(proj))
- if self.tcx.def_kind(proj.item_def_id)
- == DefKind::ImplTraitPlaceholder =>
- {
- let sm = self.tcx.sess.source_map();
- let pos = sm.lookup_char_pos(self.tcx.def_span(proj.item_def_id).lo());
- format!(
- " (trait associated opaque type at <{}:{}:{}>)",
- sm.filename_for_diagnostics(&pos.file.name),
- pos.line,
- pos.col.to_usize() + 1,
- )
+ let sort_string = |ty: Ty<'tcx>, path: Option<PathBuf>| {
+ let mut s = match (extra, ty.kind()) {
+ (true, ty::Opaque(def_id, _)) => {
+ let sm = self.tcx.sess.source_map();
+ let pos = sm.lookup_char_pos(self.tcx.def_span(*def_id).lo());
+ format!(
+ " (opaque type at <{}:{}:{}>)",
+ sm.filename_for_diagnostics(&pos.file.name),
+ pos.line,
+ pos.col.to_usize() + 1,
+ )
+ }
+ (true, ty::Projection(proj))
+ if self.tcx.def_kind(proj.item_def_id)
+ == DefKind::ImplTraitPlaceholder =>
+ {
+ let sm = self.tcx.sess.source_map();
+ let pos = sm.lookup_char_pos(self.tcx.def_span(proj.item_def_id).lo());
+ format!(
+ " (trait associated opaque type at <{}:{}:{}>)",
+ sm.filename_for_diagnostics(&pos.file.name),
+ pos.line,
+ pos.col.to_usize() + 1,
+ )
+ }
+ (true, _) => format!(" ({})", ty.sort_string(self.tcx)),
+ (false, _) => "".to_string(),
+ };
+ if let Some(path) = path {
+ s.push_str(&format!(
+ "\nthe full type name has been written to '{}'",
+ path.display(),
+ ));
}
- (true, _) => format!(" ({})", ty.sort_string(self.tcx)),
- (false, _) => "".to_string(),
+ s
};
if !(values.expected.is_simple_text() && values.found.is_simple_text())
|| (exp_found.map_or(false, |ef| {
@@ -1866,14 +1784,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
expected,
&found_label,
found,
- &sort_string(values.expected),
- &sort_string(values.found),
+ &sort_string(values.expected, exp_p),
+ &sort_string(values.found, found_p),
);
}
}
- TypeError::ObjectUnsafeCoercion(_) => {
- diag.note_unsuccessful_coercion(found, expected);
- }
_ => {
debug!(
"note_type_err: exp_found={:?}, expected={:?} found={:?}",
@@ -1944,310 +1859,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
debug!(?diag);
}
- fn suggest_tuple_pattern(
- &self,
- cause: &ObligationCause<'tcx>,
- exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
- diag: &mut Diagnostic,
- ) {
- // Heavily inspired by `FnCtxt::suggest_compatible_variants`, with
- // some modifications due to that being in typeck and this being in infer.
- if let ObligationCauseCode::Pattern { .. } = cause.code() {
- if let ty::Adt(expected_adt, substs) = exp_found.expected.kind() {
- let compatible_variants: Vec<_> = expected_adt
- .variants()
- .iter()
- .filter(|variant| {
- variant.fields.len() == 1 && variant.ctor_kind == hir::def::CtorKind::Fn
- })
- .filter_map(|variant| {
- let sole_field = &variant.fields[0];
- let sole_field_ty = sole_field.ty(self.tcx, substs);
- if self.same_type_modulo_infer(sole_field_ty, exp_found.found) {
- let variant_path =
- with_no_trimmed_paths!(self.tcx.def_path_str(variant.def_id));
- // FIXME #56861: DRYer prelude filtering
- if let Some(path) = variant_path.strip_prefix("std::prelude::") {
- if let Some((_, path)) = path.split_once("::") {
- return Some(path.to_string());
- }
- }
- Some(variant_path)
- } else {
- None
- }
- })
- .collect();
- match &compatible_variants[..] {
- [] => {}
- [variant] => {
- diag.multipart_suggestion_verbose(
- &format!("try wrapping the pattern in `{}`", variant),
- vec![
- (cause.span.shrink_to_lo(), format!("{}(", variant)),
- (cause.span.shrink_to_hi(), ")".to_string()),
- ],
- Applicability::MaybeIncorrect,
- );
- }
- _ => {
- // More than one matching variant.
- diag.multipart_suggestions(
- &format!(
- "try wrapping the pattern in a variant of `{}`",
- self.tcx.def_path_str(expected_adt.did())
- ),
- compatible_variants.into_iter().map(|variant| {
- vec![
- (cause.span.shrink_to_lo(), format!("{}(", variant)),
- (cause.span.shrink_to_hi(), ")".to_string()),
- ]
- }),
- Applicability::MaybeIncorrect,
- );
- }
- }
- }
- }
- }
-
- /// A possible error is to forget to add `.await` when using futures:
- ///
- /// ```compile_fail,E0308
- /// async fn make_u32() -> u32 {
- /// 22
- /// }
- ///
- /// fn take_u32(x: u32) {}
- ///
- /// async fn foo() {
- /// let x = make_u32();
- /// take_u32(x);
- /// }
- /// ```
- ///
- /// This routine checks if the found type `T` implements `Future<Output=U>` where `U` is the
- /// expected type. If this is the case, and we are inside of an async body, it suggests adding
- /// `.await` to the tail of the expression.
- fn suggest_await_on_expect_found(
- &self,
- cause: &ObligationCause<'tcx>,
- exp_span: Span,
- exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
- diag: &mut Diagnostic,
- ) {
- debug!(
- "suggest_await_on_expect_found: exp_span={:?}, expected_ty={:?}, found_ty={:?}",
- exp_span, exp_found.expected, exp_found.found,
- );
-
- if let ObligationCauseCode::CompareImplItemObligation { .. } = cause.code() {
- return;
- }
-
- match (
- self.get_impl_future_output_ty(exp_found.expected).map(Binder::skip_binder),
- self.get_impl_future_output_ty(exp_found.found).map(Binder::skip_binder),
- ) {
- (Some(exp), Some(found)) if self.same_type_modulo_infer(exp, found) => match cause
- .code()
- {
- ObligationCauseCode::IfExpression(box IfExpressionCause { then_id, .. }) => {
- let then_span = self.find_block_span_from_hir_id(*then_id);
- diag.multipart_suggestion(
- "consider `await`ing on both `Future`s",
- vec![
- (then_span.shrink_to_hi(), ".await".to_string()),
- (exp_span.shrink_to_hi(), ".await".to_string()),
- ],
- Applicability::MaybeIncorrect,
- );
- }
- ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
- prior_arms,
- ..
- }) => {
- if let [.., arm_span] = &prior_arms[..] {
- diag.multipart_suggestion(
- "consider `await`ing on both `Future`s",
- vec![
- (arm_span.shrink_to_hi(), ".await".to_string()),
- (exp_span.shrink_to_hi(), ".await".to_string()),
- ],
- Applicability::MaybeIncorrect,
- );
- } else {
- diag.help("consider `await`ing on both `Future`s");
- }
- }
- _ => {
- diag.help("consider `await`ing on both `Future`s");
- }
- },
- (_, Some(ty)) if self.same_type_modulo_infer(exp_found.expected, ty) => {
- diag.span_suggestion_verbose(
- exp_span.shrink_to_hi(),
- "consider `await`ing on the `Future`",
- ".await",
- Applicability::MaybeIncorrect,
- );
- }
- (Some(ty), _) if self.same_type_modulo_infer(ty, exp_found.found) => match cause.code()
- {
- ObligationCauseCode::Pattern { span: Some(then_span), .. } => {
- diag.span_suggestion_verbose(
- then_span.shrink_to_hi(),
- "consider `await`ing on the `Future`",
- ".await",
- Applicability::MaybeIncorrect,
- );
- }
- ObligationCauseCode::IfExpression(box IfExpressionCause { then_id, .. }) => {
- let then_span = self.find_block_span_from_hir_id(*then_id);
- diag.span_suggestion_verbose(
- then_span.shrink_to_hi(),
- "consider `await`ing on the `Future`",
- ".await",
- Applicability::MaybeIncorrect,
- );
- }
- ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
- ref prior_arms,
- ..
- }) => {
- diag.multipart_suggestion_verbose(
- "consider `await`ing on the `Future`",
- prior_arms
- .iter()
- .map(|arm| (arm.shrink_to_hi(), ".await".to_string()))
- .collect(),
- Applicability::MaybeIncorrect,
- );
- }
- _ => {}
- },
- _ => {}
- }
- }
-
- fn suggest_accessing_field_where_appropriate(
- &self,
- cause: &ObligationCause<'tcx>,
- exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
- diag: &mut Diagnostic,
- ) {
- debug!(
- "suggest_accessing_field_where_appropriate(cause={:?}, exp_found={:?})",
- cause, exp_found
- );
- if let ty::Adt(expected_def, expected_substs) = exp_found.expected.kind() {
- if expected_def.is_enum() {
- return;
- }
-
- if let Some((name, ty)) = expected_def
- .non_enum_variant()
- .fields
- .iter()
- .filter(|field| field.vis.is_accessible_from(field.did, self.tcx))
- .map(|field| (field.name, field.ty(self.tcx, expected_substs)))
- .find(|(_, ty)| self.same_type_modulo_infer(*ty, exp_found.found))
- {
- if let ObligationCauseCode::Pattern { span: Some(span), .. } = *cause.code() {
- if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
- let suggestion = if expected_def.is_struct() {
- format!("{}.{}", snippet, name)
- } else if expected_def.is_union() {
- format!("unsafe {{ {}.{} }}", snippet, name)
- } else {
- return;
- };
- diag.span_suggestion(
- span,
- &format!(
- "you might have meant to use field `{}` whose type is `{}`",
- name, ty
- ),
- suggestion,
- Applicability::MaybeIncorrect,
- );
- }
- }
- }
- }
- }
-
- /// When encountering a case where `.as_ref()` on a `Result` or `Option` would be appropriate,
- /// suggests it.
- fn suggest_as_ref_where_appropriate(
- &self,
- span: Span,
- exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
- diag: &mut Diagnostic,
- ) {
- if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
- && let Some(msg) = self.should_suggest_as_ref(exp_found.expected, exp_found.found)
- {
- diag.span_suggestion(
- span,
- msg,
- // HACK: fix issue# 100605, suggesting convert from &Option<T> to Option<&T>, remove the extra `&`
- format!("{}.as_ref()", snippet.trim_start_matches('&')),
- Applicability::MachineApplicable,
- );
- }
- }
-
- pub fn should_suggest_as_ref(&self, expected: Ty<'tcx>, found: Ty<'tcx>) -> Option<&str> {
- if let (ty::Adt(exp_def, exp_substs), ty::Ref(_, found_ty, _)) =
- (expected.kind(), found.kind())
- {
- if let ty::Adt(found_def, found_substs) = *found_ty.kind() {
- if exp_def == &found_def {
- let have_as_ref = &[
- (
- sym::Option,
- "you can convert from `&Option<T>` to `Option<&T>` using \
- `.as_ref()`",
- ),
- (
- sym::Result,
- "you can convert from `&Result<T, E>` to \
- `Result<&T, &E>` using `.as_ref()`",
- ),
- ];
- if let Some(msg) = have_as_ref.iter().find_map(|(name, msg)| {
- self.tcx.is_diagnostic_item(*name, exp_def.did()).then_some(msg)
- }) {
- let mut show_suggestion = true;
- for (exp_ty, found_ty) in
- iter::zip(exp_substs.types(), found_substs.types())
- {
- match *exp_ty.kind() {
- ty::Ref(_, exp_ty, _) => {
- match (exp_ty.kind(), found_ty.kind()) {
- (_, ty::Param(_))
- | (_, ty::Infer(_))
- | (ty::Param(_), _)
- | (ty::Infer(_), _) => {}
- _ if self.same_type_modulo_infer(exp_ty, found_ty) => {}
- _ => show_suggestion = false,
- };
- }
- ty::Param(_) | ty::Infer(_) => {}
- _ => show_suggestion = false,
- }
- }
- if show_suggestion {
- return Some(*msg);
- }
- }
- }
- }
- }
- None
- }
-
pub fn report_and_explain_type_error(
&self,
trace: TypeTrace<'tcx>,
@@ -2332,13 +1943,18 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
}
}
}
+ // For code `if Some(..) = expr `, the type mismatch may be expected `bool` but found `()`,
+ // we try to suggest to add the missing `let` for `if let Some(..) = expr`
+ (ty::Bool, ty::Tuple(list)) => if list.len() == 0 {
+ self.suggest_let_for_letchains(&mut err, &trace.cause, span);
+ }
_ => {}
}
}
let code = trace.cause.code();
if let &MatchExpressionArm(box MatchExpressionArmCause { source, .. }) = code
&& let hir::MatchSource::TryDesugar = source
- && let Some((expected_ty, found_ty)) = self.values_str(trace.values)
+ && let Some((expected_ty, found_ty, _, _)) = self.values_str(trace.values)
{
err.note(&format!(
"`?` operator cannot convert from `{}` to `{}`",
@@ -2393,7 +2009,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
fn values_str(
&self,
values: ValuePairs<'tcx>,
- ) -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
+ ) -> Option<(DiagnosticStyledString, DiagnosticStyledString, Option<PathBuf>, Option<PathBuf>)>
+ {
match values {
infer::Regions(exp_found) => self.expected_found_str(exp_found),
infer::Terms(exp_found) => self.expected_found_str_term(exp_found),
@@ -2403,7 +2020,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
found: exp_found.found.print_only_trait_path(),
};
match self.expected_found_str(pretty_exp_found) {
- Some((expected, found)) if expected == found => {
+ Some((expected, found, _, _)) if expected == found => {
self.expected_found_str(exp_found)
}
ret => ret,
@@ -2415,7 +2032,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
found: exp_found.found.print_only_trait_path(),
};
match self.expected_found_str(pretty_exp_found) {
- Some((expected, found)) if expected == found => {
+ Some((expected, found, _, _)) if expected == found => {
self.expected_found_str(exp_found)
}
ret => ret,
@@ -2427,17 +2044,41 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
fn expected_found_str_term(
&self,
exp_found: ty::error::ExpectedFound<ty::Term<'tcx>>,
- ) -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
+ ) -> Option<(DiagnosticStyledString, DiagnosticStyledString, Option<PathBuf>, Option<PathBuf>)>
+ {
let exp_found = self.resolve_vars_if_possible(exp_found);
if exp_found.references_error() {
return None;
}
Some(match (exp_found.expected.unpack(), exp_found.found.unpack()) {
- (ty::TermKind::Ty(expected), ty::TermKind::Ty(found)) => self.cmp(expected, found),
+ (ty::TermKind::Ty(expected), ty::TermKind::Ty(found)) => {
+ let (mut exp, mut fnd) = self.cmp(expected, found);
+ // Use the terminal width as the basis to determine when to compress the printed
+ // out type, but give ourselves some leeway to avoid ending up creating a file for
+ // a type that is somewhat shorter than the path we'd write to.
+ let len = self.tcx.sess().diagnostic_width() + 40;
+ let exp_s = exp.content();
+ let fnd_s = fnd.content();
+ let mut exp_p = None;
+ let mut fnd_p = None;
+ if exp_s.len() > len {
+ let (exp_s, exp_path) = self.tcx.short_ty_string(expected);
+ exp = DiagnosticStyledString::highlighted(exp_s);
+ exp_p = exp_path;
+ }
+ if fnd_s.len() > len {
+ let (fnd_s, fnd_path) = self.tcx.short_ty_string(found);
+ fnd = DiagnosticStyledString::highlighted(fnd_s);
+ fnd_p = fnd_path;
+ }
+ (exp, fnd, exp_p, fnd_p)
+ }
_ => (
DiagnosticStyledString::highlighted(exp_found.expected.to_string()),
DiagnosticStyledString::highlighted(exp_found.found.to_string()),
+ None,
+ None,
),
})
}
@@ -2446,7 +2087,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
fn expected_found_str<T: fmt::Display + TypeFoldable<'tcx>>(
&self,
exp_found: ty::error::ExpectedFound<T>,
- ) -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
+ ) -> Option<(DiagnosticStyledString, DiagnosticStyledString, Option<PathBuf>, Option<PathBuf>)>
+ {
let exp_found = self.resolve_vars_if_possible(exp_found);
if exp_found.references_error() {
return None;
@@ -2455,6 +2097,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
Some((
DiagnosticStyledString::highlighted(exp_found.expected.to_string()),
DiagnosticStyledString::highlighted(exp_found.found.to_string()),
+ None,
+ None,
))
}
@@ -2788,36 +2432,29 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
debug!("report_sub_sup_conflict: sup_region={:?}", sup_region);
debug!("report_sub_sup_conflict: sup_origin={:?}", sup_origin);
- if let (&infer::Subtype(ref sup_trace), &infer::Subtype(ref sub_trace)) =
- (&sup_origin, &sub_origin)
+ if let infer::Subtype(ref sup_trace) = sup_origin
+ && let infer::Subtype(ref sub_trace) = sub_origin
+ && let Some((sup_expected, sup_found, _, _)) = self.values_str(sup_trace.values)
+ && let Some((sub_expected, sub_found, _, _)) = self.values_str(sub_trace.values)
+ && sub_expected == sup_expected
+ && sub_found == sup_found
{
- debug!("report_sub_sup_conflict: sup_trace={:?}", sup_trace);
- debug!("report_sub_sup_conflict: sub_trace={:?}", sub_trace);
- debug!("report_sub_sup_conflict: sup_trace.values={:?}", sup_trace.values);
- debug!("report_sub_sup_conflict: sub_trace.values={:?}", sub_trace.values);
-
- if let (Some((sup_expected, sup_found)), Some((sub_expected, sub_found))) =
- (self.values_str(sup_trace.values), self.values_str(sub_trace.values))
- {
- if sub_expected == sup_expected && sub_found == sup_found {
- note_and_explain_region(
- self.tcx,
- &mut err,
- "...but the lifetime must also be valid for ",
- sub_region,
- "...",
- None,
- );
- err.span_note(
- sup_trace.cause.span,
- &format!("...so that the {}", sup_trace.cause.as_requirement_str()),
- );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "...but the lifetime must also be valid for ",
+ sub_region,
+ "...",
+ None,
+ );
+ err.span_note(
+ sup_trace.cause.span,
+ &format!("...so that the {}", sup_trace.cause.as_requirement_str()),
+ );
- err.note_expected_found(&"", sup_expected, &"", sup_found);
- err.emit();
- return;
- }
- }
+ err.note_expected_found(&"", sup_expected, &"", sup_found);
+ err.emit();
+ return;
}
self.note_region_origin(&mut err, &sup_origin);
@@ -2862,6 +2499,11 @@ impl<'tcx> TypeRelation<'tcx> for SameTypeModuloInfer<'_, 'tcx> {
self.0.tcx
}
+ fn intercrate(&self) -> bool {
+ assert!(!self.0.intercrate);
+ false
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
// Unused, only for consts which we treat as always equal
ty::ParamEnv::empty()
@@ -2875,6 +2517,10 @@ impl<'tcx> TypeRelation<'tcx> for SameTypeModuloInfer<'_, 'tcx> {
true
}
+ fn mark_ambiguous(&mut self) {
+ bug!()
+ }
+
fn relate_with_variance<T: relate::Relate<'tcx>>(
&mut self,
_variance: ty::Variance,
@@ -3043,7 +2689,6 @@ impl<'tcx> ObligationCauseExt<'tcx> for ObligationCause<'tcx> {
TypeError::IntrinsicCast => {
Error0308("cannot coerce intrinsics to function pointers")
}
- TypeError::ObjectUnsafeCoercion(did) => Error0038(did),
_ => Error0308("mismatched types"),
},
}
@@ -3158,211 +2803,3 @@ impl<'tcx> InferCtxt<'tcx> {
}
}
}
-
-impl<'tcx> TypeErrCtxt<'_, 'tcx> {
- /// Be helpful when the user wrote `{... expr; }` and taking the `;` off
- /// is enough to fix the error.
- pub fn could_remove_semicolon(
- &self,
- blk: &'tcx hir::Block<'tcx>,
- expected_ty: Ty<'tcx>,
- ) -> Option<(Span, StatementAsExpression)> {
- let blk = blk.innermost_block();
- // Do not suggest if we have a tail expr.
- if blk.expr.is_some() {
- return None;
- }
- let last_stmt = blk.stmts.last()?;
- let hir::StmtKind::Semi(ref last_expr) = last_stmt.kind else {
- return None;
- };
- let last_expr_ty = self.typeck_results.as_ref()?.expr_ty_opt(*last_expr)?;
- let needs_box = match (last_expr_ty.kind(), expected_ty.kind()) {
- _ if last_expr_ty.references_error() => return None,
- _ if self.same_type_modulo_infer(last_expr_ty, expected_ty) => {
- StatementAsExpression::CorrectType
- }
- (ty::Opaque(last_def_id, _), ty::Opaque(exp_def_id, _))
- if last_def_id == exp_def_id =>
- {
- StatementAsExpression::CorrectType
- }
- (ty::Opaque(last_def_id, last_bounds), ty::Opaque(exp_def_id, exp_bounds)) => {
- debug!(
- "both opaque, likely future {:?} {:?} {:?} {:?}",
- last_def_id, last_bounds, exp_def_id, exp_bounds
- );
-
- let last_local_id = last_def_id.as_local()?;
- let exp_local_id = exp_def_id.as_local()?;
-
- match (
- &self.tcx.hir().expect_item(last_local_id).kind,
- &self.tcx.hir().expect_item(exp_local_id).kind,
- ) {
- (
- hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds: last_bounds, .. }),
- hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds: exp_bounds, .. }),
- ) if iter::zip(*last_bounds, *exp_bounds).all(|(left, right)| {
- match (left, right) {
- (
- hir::GenericBound::Trait(tl, ml),
- hir::GenericBound::Trait(tr, mr),
- ) if tl.trait_ref.trait_def_id() == tr.trait_ref.trait_def_id()
- && ml == mr =>
- {
- true
- }
- (
- hir::GenericBound::LangItemTrait(langl, _, _, argsl),
- hir::GenericBound::LangItemTrait(langr, _, _, argsr),
- ) if langl == langr => {
- // FIXME: consider the bounds!
- debug!("{:?} {:?}", argsl, argsr);
- true
- }
- _ => false,
- }
- }) =>
- {
- StatementAsExpression::NeedsBoxing
- }
- _ => StatementAsExpression::CorrectType,
- }
- }
- _ => return None,
- };
- let span = if last_stmt.span.from_expansion() {
- let mac_call = rustc_span::source_map::original_sp(last_stmt.span, blk.span);
- self.tcx.sess.source_map().mac_call_stmt_semi_span(mac_call)?
- } else {
- last_stmt.span.with_lo(last_stmt.span.hi() - BytePos(1))
- };
- Some((span, needs_box))
- }
-
- /// Suggest returning a local binding with a compatible type if the block
- /// has no return expression.
- pub fn consider_returning_binding(
- &self,
- blk: &'tcx hir::Block<'tcx>,
- expected_ty: Ty<'tcx>,
- err: &mut Diagnostic,
- ) -> bool {
- let blk = blk.innermost_block();
- // Do not suggest if we have a tail expr.
- if blk.expr.is_some() {
- return false;
- }
- let mut shadowed = FxHashSet::default();
- let mut candidate_idents = vec![];
- let mut find_compatible_candidates = |pat: &hir::Pat<'_>| {
- if let hir::PatKind::Binding(_, hir_id, ident, _) = &pat.kind
- && let Some(pat_ty) = self
- .typeck_results
- .as_ref()
- .and_then(|typeck_results| typeck_results.node_type_opt(*hir_id))
- {
- let pat_ty = self.resolve_vars_if_possible(pat_ty);
- if self.same_type_modulo_infer(pat_ty, expected_ty)
- && !(pat_ty, expected_ty).references_error()
- && shadowed.insert(ident.name)
- {
- candidate_idents.push((*ident, pat_ty));
- }
- }
- true
- };
-
- let hir = self.tcx.hir();
- for stmt in blk.stmts.iter().rev() {
- let hir::StmtKind::Local(local) = &stmt.kind else { continue; };
- local.pat.walk(&mut find_compatible_candidates);
- }
- match hir.find(hir.get_parent_node(blk.hir_id)) {
- Some(hir::Node::Expr(hir::Expr { hir_id, .. })) => {
- match hir.find(hir.get_parent_node(*hir_id)) {
- Some(hir::Node::Arm(hir::Arm { pat, .. })) => {
- pat.walk(&mut find_compatible_candidates);
- }
- Some(
- hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, _, body), .. })
- | hir::Node::ImplItem(hir::ImplItem {
- kind: hir::ImplItemKind::Fn(_, body),
- ..
- })
- | hir::Node::TraitItem(hir::TraitItem {
- kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body)),
- ..
- })
- | hir::Node::Expr(hir::Expr {
- kind: hir::ExprKind::Closure(hir::Closure { body, .. }),
- ..
- }),
- ) => {
- for param in hir.body(*body).params {
- param.pat.walk(&mut find_compatible_candidates);
- }
- }
- Some(hir::Node::Expr(hir::Expr {
- kind:
- hir::ExprKind::If(
- hir::Expr { kind: hir::ExprKind::Let(let_), .. },
- then_block,
- _,
- ),
- ..
- })) if then_block.hir_id == *hir_id => {
- let_.pat.walk(&mut find_compatible_candidates);
- }
- _ => {}
- }
- }
- _ => {}
- }
-
- match &candidate_idents[..] {
- [(ident, _ty)] => {
- let sm = self.tcx.sess.source_map();
- if let Some(stmt) = blk.stmts.last() {
- let stmt_span = sm.stmt_span(stmt.span, blk.span);
- let sugg = if sm.is_multiline(blk.span)
- && let Some(spacing) = sm.indentation_before(stmt_span)
- {
- format!("\n{spacing}{ident}")
- } else {
- format!(" {ident}")
- };
- err.span_suggestion_verbose(
- stmt_span.shrink_to_hi(),
- format!("consider returning the local binding `{ident}`"),
- sugg,
- Applicability::MaybeIncorrect,
- );
- } else {
- let sugg = if sm.is_multiline(blk.span)
- && let Some(spacing) = sm.indentation_before(blk.span.shrink_to_lo())
- {
- format!("\n{spacing} {ident}\n{spacing}")
- } else {
- format!(" {ident} ")
- };
- let left_span = sm.span_through_char(blk.span, '{').shrink_to_hi();
- err.span_suggestion_verbose(
- sm.span_extend_while(left_span, |c| c.is_whitespace()).unwrap_or(left_span),
- format!("consider returning the local binding `{ident}`"),
- sugg,
- Applicability::MaybeIncorrect,
- );
- }
- true
- }
- values if (1..3).contains(&values.len()) => {
- let spans = values.iter().map(|(ident, _)| ident.span).collect::<Vec<_>>();
- err.span_note(spans, "consider returning one of these bindings");
- true
- }
- _ => false,
- }
- }
-}
diff --git a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs
index 7b3178e61..8ff1639a3 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs
@@ -15,7 +15,7 @@ use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{Body, Closure, Expr, ExprKind, FnRetTy, HirId, Local, LocalSource};
use rustc_middle::hir::nested_filter;
use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
-use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMutability};
+use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Print, Printer};
use rustc_middle::ty::{self, DefIdTree, InferConst};
use rustc_middle::ty::{GenericArg, GenericArgKind, SubstsRef};
@@ -508,10 +508,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
[
..,
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(_, mut_)), target: _ },
- ] => match mut_ {
- AutoBorrowMutability::Mut { .. } => "&mut ",
- AutoBorrowMutability::Not => "&",
- },
+ ] => hir::Mutability::from(*mut_).ref_prefix_str(),
_ => "",
};
@@ -571,7 +568,7 @@ impl<'tcx> InferCtxt<'tcx> {
&self,
kind: hir::GeneratorKind,
span: Span,
- ty: Ty<'tcx>,
+ ty: ty::Term<'tcx>,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
let ty = self.resolve_vars_if_possible(ty);
let data = self.extract_inference_diagnostics_data(ty.into(), None);
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs
index c5f2a1a3f..1067ccda2 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mismatched_static_lifetime.rs
@@ -9,7 +9,7 @@ use crate::infer::error_reporting::nice_region_error::NiceRegionError;
use crate::infer::lexical_region_resolve::RegionResolutionError;
use crate::infer::{SubregionOrigin, TypeTrace};
use crate::traits::ObligationCauseCode;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::{ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
use rustc_hir::intravisit::Visitor;
@@ -73,7 +73,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
// Next, let's figure out the set of trait objects with implicit static bounds
let ty = self.tcx().type_of(*impl_def_id);
- let mut v = super::static_impl_trait::TraitObjectVisitor(FxHashSet::default());
+ let mut v = super::static_impl_trait::TraitObjectVisitor(FxIndexSet::default());
v.visit_ty(ty);
let mut traits = vec![];
for matching_def_id in v.0 {
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs
index aaf5a7af0..8a0e332f9 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/mod.rs
@@ -10,6 +10,7 @@ pub mod find_anon_type;
mod mismatched_static_lifetime;
mod named_anon_conflict;
mod placeholder_error;
+mod placeholder_relation;
mod static_impl_trait;
mod trait_impl_difference;
mod util;
@@ -52,7 +53,9 @@ impl<'cx, 'tcx> NiceRegionError<'cx, 'tcx> {
pub fn try_report_from_nll(&self) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
// Due to the improved diagnostics returned by the MIR borrow checker, only a subset of
// the nice region errors are required when running under the MIR borrow checker.
- self.try_report_named_anon_conflict().or_else(|| self.try_report_placeholder_conflict())
+ self.try_report_named_anon_conflict()
+ .or_else(|| self.try_report_placeholder_conflict())
+ .or_else(|| self.try_report_placeholder_relation())
}
pub fn try_report(&self) -> Option<ErrorGuaranteed> {
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/named_anon_conflict.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/named_anon_conflict.rs
index 76cb76d9f..3fe7c1598 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/named_anon_conflict.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/named_anon_conflict.rs
@@ -68,7 +68,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
let is_impl_item = region_info.is_impl_item;
match br {
- ty::BrNamed(_, kw::UnderscoreLifetime) | ty::BrAnon(_) => {}
+ ty::BrNamed(_, kw::UnderscoreLifetime) | ty::BrAnon(..) => {}
_ => {
/* not an anonymous region */
debug!("try_report_named_anon_conflict: not an anonymous region");
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs
index a58516829..1f554c81e 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_error.rs
@@ -399,10 +399,7 @@ impl<'tcx> NiceRegionError<'_, 'tcx> {
self_ty.highlight.maybe_highlighting_region(vid, actual_has_vid);
if self_ty.value.is_closure()
- && self
- .tcx()
- .fn_trait_kind_from_lang_item(expected_trait_ref.value.def_id)
- .is_some()
+ && self.tcx().is_fn_trait(expected_trait_ref.value.def_id)
{
let closure_sig = self_ty.map(|closure| {
if let ty::Closure(_, substs) = closure.kind() {
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_relation.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_relation.rs
new file mode 100644
index 000000000..c42240f21
--- /dev/null
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/placeholder_relation.rs
@@ -0,0 +1,79 @@
+use crate::infer::{
+ error_reporting::nice_region_error::NiceRegionError, RegionResolutionError, SubregionOrigin,
+};
+use rustc_data_structures::intern::Interned;
+use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
+use rustc_middle::ty::{self, RePlaceholder, Region};
+
+impl<'tcx> NiceRegionError<'_, 'tcx> {
+ /// Emitted wwhen given a `ConcreteFailure` when relating two placeholders.
+ pub(super) fn try_report_placeholder_relation(
+ &self,
+ ) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
+ match &self.error {
+ Some(RegionResolutionError::ConcreteFailure(
+ SubregionOrigin::RelateRegionParamBound(span),
+ Region(Interned(RePlaceholder(ty::Placeholder { name: sub_name, .. }), _)),
+ Region(Interned(RePlaceholder(ty::Placeholder { name: sup_name, .. }), _)),
+ )) => {
+ let msg = "lifetime bound not satisfied";
+ let mut err = self.tcx().sess.struct_span_err(*span, msg);
+ let (sub_span, sub_symbol) = match sub_name {
+ ty::BrNamed(def_id, symbol) => {
+ (Some(self.tcx().def_span(def_id)), Some(symbol))
+ }
+ ty::BrAnon(_, span) => (*span, None),
+ ty::BrEnv => (None, None),
+ };
+ let (sup_span, sup_symbol) = match sup_name {
+ ty::BrNamed(def_id, symbol) => {
+ (Some(self.tcx().def_span(def_id)), Some(symbol))
+ }
+ ty::BrAnon(_, span) => (*span, None),
+ ty::BrEnv => (None, None),
+ };
+ match (sub_span, sup_span, sub_symbol, sup_symbol) {
+ (Some(sub_span), Some(sup_span), Some(sub_symbol), Some(sup_symbol)) => {
+ err.span_note(
+ sub_span,
+ format!("the lifetime `{sub_symbol}` defined here..."),
+ );
+ err.span_note(
+ sup_span,
+ format!("...must outlive the lifetime `{sup_symbol}` defined here"),
+ );
+ }
+ (Some(sub_span), Some(sup_span), _, Some(sup_symbol)) => {
+ err.span_note(sub_span, format!("the lifetime defined here..."));
+ err.span_note(
+ sup_span,
+ format!("...must outlive the lifetime `{sup_symbol}` defined here"),
+ );
+ }
+ (Some(sub_span), Some(sup_span), Some(sub_symbol), _) => {
+ err.span_note(
+ sub_span,
+ format!("the lifetime `{sub_symbol}` defined here..."),
+ );
+ err.span_note(
+ sup_span,
+ format!("...must outlive the lifetime defined here"),
+ );
+ }
+ (Some(sub_span), Some(sup_span), _, _) => {
+ err.span_note(sub_span, format!("the lifetime defined here..."));
+ err.span_note(
+ sup_span,
+ format!("...must outlive the lifetime defined here"),
+ );
+ }
+ _ => {}
+ }
+ err.note("this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information)");
+ Some(err)
+ }
+
+ _ => None,
+ }
+ }
+}
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
index 9bf755d7f..09f9aa3c8 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
@@ -4,7 +4,7 @@ use crate::infer::error_reporting::nice_region_error::NiceRegionError;
use crate::infer::lexical_region_resolve::RegionResolutionError;
use crate::infer::{SubregionOrigin, TypeTrace};
use crate::traits::{ObligationCauseCode, UnifyReceiverContext};
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::{struct_span_err, Applicability, Diagnostic, ErrorGuaranteed, MultiSpan};
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_ty, Visitor};
@@ -236,7 +236,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
// Same case of `impl Foo for dyn Bar { fn qux(&self) {} }` introducing a `'static`
// lifetime as above, but called using a fully-qualified path to the method:
// `Foo::qux(bar)`.
- let mut v = TraitObjectVisitor(FxHashSet::default());
+ let mut v = TraitObjectVisitor(FxIndexSet::default());
v.visit_ty(param.param_ty);
if let Some((ident, self_ty)) =
self.get_impl_ident_and_self_ty_from_trait(item_def_id, &v.0)
@@ -314,10 +314,10 @@ pub fn suggest_new_region_bound(
.iter()
.filter_map(|arg| match arg {
GenericBound::Outlives(Lifetime {
- name: LifetimeName::Static,
- span,
+ res: LifetimeName::Static,
+ ident,
..
- }) => Some(*span),
+ }) => Some(ident.span),
_ => None,
})
.next()
@@ -342,10 +342,10 @@ pub fn suggest_new_region_bound(
.bounds
.iter()
.filter_map(|arg| match arg {
- GenericBound::Outlives(Lifetime { name, span, .. })
- if name.ident().to_string() == lifetime_name =>
+ GenericBound::Outlives(Lifetime { ident, .. })
+ if ident.name.to_string() == lifetime_name =>
{
- Some(*span)
+ Some(ident.span)
}
_ => None,
})
@@ -361,8 +361,8 @@ pub fn suggest_new_region_bound(
);
}
}
- TyKind::TraitObject(_, lt, _) => match lt.name {
- LifetimeName::ImplicitObjectLifetimeDefault => {
+ TyKind::TraitObject(_, lt, _) => {
+ if let LifetimeName::ImplicitObjectLifetimeDefault = lt.res {
err.span_suggestion_verbose(
fn_return.span.shrink_to_hi(),
&format!(
@@ -374,15 +374,14 @@ pub fn suggest_new_region_bound(
&plus_lt,
Applicability::MaybeIncorrect,
);
- }
- name if name.ident().to_string() != lifetime_name => {
+ } else if lt.ident.name.to_string() != lifetime_name {
// With this check we avoid suggesting redundant bounds. This
// would happen if there are nested impl/dyn traits and only
// one of them has the bound we'd suggest already there, like
// in `impl Foo<X = dyn Bar> + '_`.
if let Some(explicit_static) = &explicit_static {
err.span_suggestion_verbose(
- lt.span,
+ lt.ident.span,
&format!("{} the trait object's {}", consider, explicit_static),
&lifetime_name,
Applicability::MaybeIncorrect,
@@ -397,8 +396,7 @@ pub fn suggest_new_region_bound(
);
}
}
- _ => {}
- },
+ }
_ => {}
}
}
@@ -408,7 +406,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
fn get_impl_ident_and_self_ty_from_trait(
&self,
def_id: DefId,
- trait_objects: &FxHashSet<DefId>,
+ trait_objects: &FxIndexSet<DefId>,
) -> Option<(Ident, &'tcx hir::Ty<'tcx>)> {
let tcx = self.tcx();
match tcx.hir().get_if_local(def_id) {
@@ -490,7 +488,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
return false;
};
- let mut v = TraitObjectVisitor(FxHashSet::default());
+ let mut v = TraitObjectVisitor(FxIndexSet::default());
v.visit_ty(ty);
// Get the `Ident` of the method being called and the corresponding `impl` (to point at
@@ -506,7 +504,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
fn suggest_constrain_dyn_trait_in_impl(
&self,
err: &mut Diagnostic,
- found_dids: &FxHashSet<DefId>,
+ found_dids: &FxIndexSet<DefId>,
ident: Ident,
self_ty: &hir::Ty<'_>,
) -> bool {
@@ -538,7 +536,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
}
/// Collect all the trait objects in a type that could have received an implicit `'static` lifetime.
-pub struct TraitObjectVisitor(pub FxHashSet<DefId>);
+pub struct TraitObjectVisitor(pub FxIndexSet<DefId>);
impl<'tcx> TypeVisitor<'tcx> for TraitObjectVisitor {
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
@@ -561,7 +559,7 @@ impl<'a, 'tcx> Visitor<'tcx> for HirTraitObjectVisitor<'a> {
fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx>) {
if let TyKind::TraitObject(
poly_trait_refs,
- Lifetime { name: LifetimeName::ImplicitObjectLifetimeDefault, .. },
+ Lifetime { res: LifetimeName::ImplicitObjectLifetimeDefault, .. },
_,
) = t.kind
{
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/util.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/util.rs
index f1461d701..fd26d7d29 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/util.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/util.rs
@@ -149,6 +149,8 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
region: ty::BoundRegionKind,
) -> bool {
let late_bound_regions = self.tcx().collect_referenced_late_bound_regions(&ty);
+ // We are only checking is any region meets the condition so order doesn't matter
+ #[allow(rustc::potential_query_instability)]
late_bound_regions.iter().any(|r| *r == region)
}
diff --git a/compiler/rustc_infer/src/infer/error_reporting/note.rs b/compiler/rustc_infer/src/infer/error_reporting/note.rs
index 41b115f33..d2dffa4a0 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/note.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/note.rs
@@ -16,7 +16,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
infer::Subtype(ref trace) => RegionOriginNote::WithRequirement {
span: trace.cause.span,
requirement: ObligationCauseAsDiagArg(trace.cause.clone()),
- expected_found: self.values_str(trace.values),
+ expected_found: self.values_str(trace.values).map(|(e, f, _, _)| (e, f)),
}
.add_to_diagnostic(err),
infer::Reborrow(span) => {
diff --git a/compiler/rustc_infer/src/infer/error_reporting/note_region.rs b/compiler/rustc_infer/src/infer/error_reporting/note_region.rs
new file mode 100644
index 000000000..41b115f33
--- /dev/null
+++ b/compiler/rustc_infer/src/infer/error_reporting/note_region.rs
@@ -0,0 +1,427 @@
+use crate::errors::RegionOriginNote;
+use crate::infer::error_reporting::{note_and_explain_region, TypeErrCtxt};
+use crate::infer::{self, SubregionOrigin};
+use rustc_errors::{
+ fluent, struct_span_err, AddToDiagnostic, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
+};
+use rustc_middle::traits::ObligationCauseCode;
+use rustc_middle::ty::error::TypeError;
+use rustc_middle::ty::{self, Region};
+
+use super::ObligationCauseAsDiagArg;
+
+impl<'tcx> TypeErrCtxt<'_, 'tcx> {
+ pub(super) fn note_region_origin(&self, err: &mut Diagnostic, origin: &SubregionOrigin<'tcx>) {
+ match *origin {
+ infer::Subtype(ref trace) => RegionOriginNote::WithRequirement {
+ span: trace.cause.span,
+ requirement: ObligationCauseAsDiagArg(trace.cause.clone()),
+ expected_found: self.values_str(trace.values),
+ }
+ .add_to_diagnostic(err),
+ infer::Reborrow(span) => {
+ RegionOriginNote::Plain { span, msg: fluent::infer_reborrow }.add_to_diagnostic(err)
+ }
+ infer::ReborrowUpvar(span, ref upvar_id) => {
+ let var_name = self.tcx.hir().name(upvar_id.var_path.hir_id);
+ RegionOriginNote::WithName {
+ span,
+ msg: fluent::infer_reborrow,
+ name: &var_name.to_string(),
+ continues: false,
+ }
+ .add_to_diagnostic(err);
+ }
+ infer::RelateObjectBound(span) => {
+ RegionOriginNote::Plain { span, msg: fluent::infer_relate_object_bound }
+ .add_to_diagnostic(err);
+ }
+ infer::DataBorrowed(ty, span) => {
+ RegionOriginNote::WithName {
+ span,
+ msg: fluent::infer_data_borrowed,
+ name: &self.ty_to_string(ty),
+ continues: false,
+ }
+ .add_to_diagnostic(err);
+ }
+ infer::ReferenceOutlivesReferent(ty, span) => {
+ RegionOriginNote::WithName {
+ span,
+ msg: fluent::infer_reference_outlives_referent,
+ name: &self.ty_to_string(ty),
+ continues: false,
+ }
+ .add_to_diagnostic(err);
+ }
+ infer::RelateParamBound(span, ty, opt_span) => {
+ RegionOriginNote::WithName {
+ span,
+ msg: fluent::infer_relate_param_bound,
+ name: &self.ty_to_string(ty),
+ continues: opt_span.is_some(),
+ }
+ .add_to_diagnostic(err);
+ if let Some(span) = opt_span {
+ RegionOriginNote::Plain { span, msg: fluent::infer_relate_param_bound_2 }
+ .add_to_diagnostic(err);
+ }
+ }
+ infer::RelateRegionParamBound(span) => {
+ RegionOriginNote::Plain { span, msg: fluent::infer_relate_region_param_bound }
+ .add_to_diagnostic(err);
+ }
+ infer::CompareImplItemObligation { span, .. } => {
+ RegionOriginNote::Plain { span, msg: fluent::infer_compare_impl_item_obligation }
+ .add_to_diagnostic(err);
+ }
+ infer::CheckAssociatedTypeBounds { ref parent, .. } => {
+ self.note_region_origin(err, &parent);
+ }
+ infer::AscribeUserTypeProvePredicate(span) => {
+ RegionOriginNote::Plain {
+ span,
+ msg: fluent::infer_ascribe_user_type_prove_predicate,
+ }
+ .add_to_diagnostic(err);
+ }
+ }
+ }
+
+ pub(super) fn report_concrete_failure(
+ &self,
+ origin: SubregionOrigin<'tcx>,
+ sub: Region<'tcx>,
+ sup: Region<'tcx>,
+ ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
+ match origin {
+ infer::Subtype(box trace) => {
+ let terr = TypeError::RegionsDoesNotOutlive(sup, sub);
+ let mut err = self.report_and_explain_type_error(trace, terr);
+ match (*sub, *sup) {
+ (ty::RePlaceholder(_), ty::RePlaceholder(_)) => {}
+ (ty::RePlaceholder(_), _) => {
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "",
+ sup,
+ " doesn't meet the lifetime requirements",
+ None,
+ );
+ }
+ (_, ty::RePlaceholder(_)) => {
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "the required lifetime does not necessarily outlive ",
+ sub,
+ "",
+ None,
+ );
+ }
+ _ => {
+ note_and_explain_region(self.tcx, &mut err, "", sup, "...", None);
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "...does not necessarily outlive ",
+ sub,
+ "",
+ None,
+ );
+ }
+ }
+ err
+ }
+ infer::Reborrow(span) => {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0312,
+ "lifetime of reference outlives lifetime of borrowed content..."
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "...the reference is valid for ",
+ sub,
+ "...",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "...but the borrowed content is only valid for ",
+ sup,
+ "",
+ None,
+ );
+ err
+ }
+ infer::ReborrowUpvar(span, ref upvar_id) => {
+ let var_name = self.tcx.hir().name(upvar_id.var_path.hir_id);
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0313,
+ "lifetime of borrowed pointer outlives lifetime of captured variable `{}`...",
+ var_name
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "...the borrowed pointer is valid for ",
+ sub,
+ "...",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ &format!("...but `{}` is only valid for ", var_name),
+ sup,
+ "",
+ None,
+ );
+ err
+ }
+ infer::RelateObjectBound(span) => {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0476,
+ "lifetime of the source pointer does not outlive lifetime bound of the \
+ object type"
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "object type is valid for ",
+ sub,
+ "",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "source pointer is only valid for ",
+ sup,
+ "",
+ None,
+ );
+ err
+ }
+ infer::RelateParamBound(span, ty, opt_span) => {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0477,
+ "the type `{}` does not fulfill the required lifetime",
+ self.ty_to_string(ty)
+ );
+ match *sub {
+ ty::ReStatic => note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "type must satisfy ",
+ sub,
+ if opt_span.is_some() { " as required by this binding" } else { "" },
+ opt_span,
+ ),
+ _ => note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "type must outlive ",
+ sub,
+ if opt_span.is_some() { " as required by this binding" } else { "" },
+ opt_span,
+ ),
+ }
+ err
+ }
+ infer::RelateRegionParamBound(span) => {
+ let mut err =
+ struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied");
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "lifetime parameter instantiated with ",
+ sup,
+ "",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "but lifetime parameter must outlive ",
+ sub,
+ "",
+ None,
+ );
+ err
+ }
+ infer::DataBorrowed(ty, span) => {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0490,
+ "a value of type `{}` is borrowed for too long",
+ self.ty_to_string(ty)
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "the type is valid for ",
+ sub,
+ "",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "but the borrow lasts for ",
+ sup,
+ "",
+ None,
+ );
+ err
+ }
+ infer::ReferenceOutlivesReferent(ty, span) => {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0491,
+ "in type `{}`, reference has a longer lifetime than the data it references",
+ self.ty_to_string(ty)
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "the pointer is valid for ",
+ sub,
+ "",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "but the referenced data is only valid for ",
+ sup,
+ "",
+ None,
+ );
+ err
+ }
+ infer::CompareImplItemObligation { span, impl_item_def_id, trait_item_def_id } => self
+ .report_extra_impl_obligation(
+ span,
+ impl_item_def_id,
+ trait_item_def_id,
+ &format!("`{}: {}`", sup, sub),
+ ),
+ infer::CheckAssociatedTypeBounds { impl_item_def_id, trait_item_def_id, parent } => {
+ let mut err = self.report_concrete_failure(*parent, sub, sup);
+
+ let trait_item_span = self.tcx.def_span(trait_item_def_id);
+ let item_name = self.tcx.item_name(impl_item_def_id.to_def_id());
+ err.span_label(
+ trait_item_span,
+ format!("definition of `{}` from trait", item_name),
+ );
+
+ let trait_predicates = self.tcx.explicit_predicates_of(trait_item_def_id);
+ let impl_predicates = self.tcx.explicit_predicates_of(impl_item_def_id);
+
+ let impl_predicates: rustc_data_structures::fx::FxHashSet<_> =
+ impl_predicates.predicates.into_iter().map(|(pred, _)| pred).collect();
+ let clauses: Vec<_> = trait_predicates
+ .predicates
+ .into_iter()
+ .filter(|&(pred, _)| !impl_predicates.contains(pred))
+ .map(|(pred, _)| format!("{}", pred))
+ .collect();
+
+ if !clauses.is_empty() {
+ let generics = self.tcx.hir().get_generics(impl_item_def_id).unwrap();
+ let where_clause_span = generics.tail_span_for_predicate_suggestion();
+
+ let suggestion = format!(
+ "{} {}",
+ generics.add_where_or_trailing_comma(),
+ clauses.join(", "),
+ );
+ err.span_suggestion(
+ where_clause_span,
+ &format!(
+ "try copying {} from the trait",
+ if clauses.len() > 1 { "these clauses" } else { "this clause" }
+ ),
+ suggestion,
+ rustc_errors::Applicability::MaybeIncorrect,
+ );
+ }
+
+ err
+ }
+ infer::AscribeUserTypeProvePredicate(span) => {
+ let mut err =
+ struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied");
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "lifetime instantiated with ",
+ sup,
+ "",
+ None,
+ );
+ note_and_explain_region(
+ self.tcx,
+ &mut err,
+ "but lifetime must outlive ",
+ sub,
+ "",
+ None,
+ );
+ err
+ }
+ }
+ }
+
+ pub(super) fn report_placeholder_failure(
+ &self,
+ placeholder_origin: SubregionOrigin<'tcx>,
+ sub: Region<'tcx>,
+ sup: Region<'tcx>,
+ ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
+ // I can't think how to do better than this right now. -nikomatsakis
+ debug!(?placeholder_origin, ?sub, ?sup, "report_placeholder_failure");
+ match placeholder_origin {
+ infer::Subtype(box ref trace)
+ if matches!(
+ &trace.cause.code().peel_derives(),
+ ObligationCauseCode::BindingObligation(..)
+ | ObligationCauseCode::ExprBindingObligation(..)
+ ) =>
+ {
+ // Hack to get around the borrow checker because trace.cause has an `Rc`.
+ if let ObligationCauseCode::BindingObligation(_, span)
+ | ObligationCauseCode::ExprBindingObligation(_, span, ..) =
+ &trace.cause.code().peel_derives()
+ {
+ let span = *span;
+ let mut err = self.report_concrete_failure(placeholder_origin, sub, sup);
+ err.span_note(span, "the lifetime requirement is introduced here");
+ err
+ } else {
+ unreachable!()
+ }
+ }
+ infer::Subtype(box trace) => {
+ let terr = TypeError::RegionsPlaceholderMismatch;
+ return self.report_and_explain_type_error(trace, terr);
+ }
+ _ => return self.report_concrete_failure(placeholder_origin, sub, sup),
+ }
+ }
+}
diff --git a/compiler/rustc_infer/src/infer/error_reporting/suggest.rs b/compiler/rustc_infer/src/infer/error_reporting/suggest.rs
new file mode 100644
index 000000000..73b5a2cc4
--- /dev/null
+++ b/compiler/rustc_infer/src/infer/error_reporting/suggest.rs
@@ -0,0 +1,672 @@
+use hir::def::CtorKind;
+use hir::intravisit::{walk_expr, walk_stmt, Visitor};
+use rustc_data_structures::fx::FxIndexSet;
+use rustc_errors::{Applicability, Diagnostic};
+use rustc_hir as hir;
+use rustc_middle::traits::{
+ IfExpressionCause, MatchExpressionArmCause, ObligationCause, ObligationCauseCode,
+ StatementAsExpression,
+};
+use rustc_middle::ty::print::with_no_trimmed_paths;
+use rustc_middle::ty::{self as ty, Ty, TypeVisitable};
+use rustc_span::{sym, BytePos, Span};
+
+use crate::errors::SuggAddLetForLetChains;
+
+use super::TypeErrCtxt;
+
+impl<'tcx> TypeErrCtxt<'_, 'tcx> {
+ pub(super) fn suggest_remove_semi_or_return_binding(
+ &self,
+ err: &mut Diagnostic,
+ first_id: Option<hir::HirId>,
+ first_ty: Ty<'tcx>,
+ first_span: Span,
+ second_id: Option<hir::HirId>,
+ second_ty: Ty<'tcx>,
+ second_span: Span,
+ ) {
+ let remove_semicolon = [
+ (first_id, self.resolve_vars_if_possible(second_ty)),
+ (second_id, self.resolve_vars_if_possible(first_ty)),
+ ]
+ .into_iter()
+ .find_map(|(id, ty)| {
+ let hir::Node::Block(blk) = self.tcx.hir().get(id?) else { return None };
+ self.could_remove_semicolon(blk, ty)
+ });
+ match remove_semicolon {
+ Some((sp, StatementAsExpression::NeedsBoxing)) => {
+ err.multipart_suggestion(
+ "consider removing this semicolon and boxing the expressions",
+ vec![
+ (first_span.shrink_to_lo(), "Box::new(".to_string()),
+ (first_span.shrink_to_hi(), ")".to_string()),
+ (second_span.shrink_to_lo(), "Box::new(".to_string()),
+ (second_span.shrink_to_hi(), ")".to_string()),
+ (sp, String::new()),
+ ],
+ Applicability::MachineApplicable,
+ );
+ }
+ Some((sp, StatementAsExpression::CorrectType)) => {
+ err.span_suggestion_short(
+ sp,
+ "consider removing this semicolon",
+ "",
+ Applicability::MachineApplicable,
+ );
+ }
+ None => {
+ for (id, ty) in [(first_id, second_ty), (second_id, first_ty)] {
+ if let Some(id) = id
+ && let hir::Node::Block(blk) = self.tcx.hir().get(id)
+ && self.consider_returning_binding(blk, ty, err)
+ {
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ pub(super) fn suggest_boxing_for_return_impl_trait(
+ &self,
+ err: &mut Diagnostic,
+ return_sp: Span,
+ arm_spans: impl Iterator<Item = Span>,
+ ) {
+ err.multipart_suggestion(
+ "you could change the return type to be a boxed trait object",
+ vec![
+ (return_sp.with_hi(return_sp.lo() + BytePos(4)), "Box<dyn".to_string()),
+ (return_sp.shrink_to_hi(), ">".to_string()),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ let sugg = arm_spans
+ .flat_map(|sp| {
+ [(sp.shrink_to_lo(), "Box::new(".to_string()), (sp.shrink_to_hi(), ")".to_string())]
+ .into_iter()
+ })
+ .collect::<Vec<_>>();
+ err.multipart_suggestion(
+ "if you change the return type to expect trait objects, box the returned expressions",
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ }
+
+ pub(super) fn suggest_tuple_pattern(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
+ diag: &mut Diagnostic,
+ ) {
+ // Heavily inspired by `FnCtxt::suggest_compatible_variants`, with
+ // some modifications due to that being in typeck and this being in infer.
+ if let ObligationCauseCode::Pattern { .. } = cause.code() {
+ if let ty::Adt(expected_adt, substs) = exp_found.expected.kind() {
+ let compatible_variants: Vec<_> = expected_adt
+ .variants()
+ .iter()
+ .filter(|variant| {
+ variant.fields.len() == 1 && variant.ctor_kind() == Some(CtorKind::Fn)
+ })
+ .filter_map(|variant| {
+ let sole_field = &variant.fields[0];
+ let sole_field_ty = sole_field.ty(self.tcx, substs);
+ if self.same_type_modulo_infer(sole_field_ty, exp_found.found) {
+ let variant_path =
+ with_no_trimmed_paths!(self.tcx.def_path_str(variant.def_id));
+ // FIXME #56861: DRYer prelude filtering
+ if let Some(path) = variant_path.strip_prefix("std::prelude::") {
+ if let Some((_, path)) = path.split_once("::") {
+ return Some(path.to_string());
+ }
+ }
+ Some(variant_path)
+ } else {
+ None
+ }
+ })
+ .collect();
+ match &compatible_variants[..] {
+ [] => {}
+ [variant] => {
+ diag.multipart_suggestion_verbose(
+ &format!("try wrapping the pattern in `{}`", variant),
+ vec![
+ (cause.span.shrink_to_lo(), format!("{}(", variant)),
+ (cause.span.shrink_to_hi(), ")".to_string()),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ }
+ _ => {
+ // More than one matching variant.
+ diag.multipart_suggestions(
+ &format!(
+ "try wrapping the pattern in a variant of `{}`",
+ self.tcx.def_path_str(expected_adt.did())
+ ),
+ compatible_variants.into_iter().map(|variant| {
+ vec![
+ (cause.span.shrink_to_lo(), format!("{}(", variant)),
+ (cause.span.shrink_to_hi(), ")".to_string()),
+ ]
+ }),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+ }
+ }
+
+ /// A possible error is to forget to add `.await` when using futures:
+ ///
+ /// ```compile_fail,E0308
+ /// async fn make_u32() -> u32 {
+ /// 22
+ /// }
+ ///
+ /// fn take_u32(x: u32) {}
+ ///
+ /// async fn foo() {
+ /// let x = make_u32();
+ /// take_u32(x);
+ /// }
+ /// ```
+ ///
+ /// This routine checks if the found type `T` implements `Future<Output=U>` where `U` is the
+ /// expected type. If this is the case, and we are inside of an async body, it suggests adding
+ /// `.await` to the tail of the expression.
+ pub(super) fn suggest_await_on_expect_found(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ exp_span: Span,
+ exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
+ diag: &mut Diagnostic,
+ ) {
+ debug!(
+ "suggest_await_on_expect_found: exp_span={:?}, expected_ty={:?}, found_ty={:?}",
+ exp_span, exp_found.expected, exp_found.found,
+ );
+
+ if let ObligationCauseCode::CompareImplItemObligation { .. } = cause.code() {
+ return;
+ }
+
+ match (
+ self.get_impl_future_output_ty(exp_found.expected),
+ self.get_impl_future_output_ty(exp_found.found),
+ ) {
+ (Some(exp), Some(found)) if self.same_type_modulo_infer(exp, found) => match cause
+ .code()
+ {
+ ObligationCauseCode::IfExpression(box IfExpressionCause { then_id, .. }) => {
+ let then_span = self.find_block_span_from_hir_id(*then_id);
+ diag.multipart_suggestion(
+ "consider `await`ing on both `Future`s",
+ vec![
+ (then_span.shrink_to_hi(), ".await".to_string()),
+ (exp_span.shrink_to_hi(), ".await".to_string()),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ }
+ ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
+ prior_arms,
+ ..
+ }) => {
+ if let [.., arm_span] = &prior_arms[..] {
+ diag.multipart_suggestion(
+ "consider `await`ing on both `Future`s",
+ vec![
+ (arm_span.shrink_to_hi(), ".await".to_string()),
+ (exp_span.shrink_to_hi(), ".await".to_string()),
+ ],
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ diag.help("consider `await`ing on both `Future`s");
+ }
+ }
+ _ => {
+ diag.help("consider `await`ing on both `Future`s");
+ }
+ },
+ (_, Some(ty)) if self.same_type_modulo_infer(exp_found.expected, ty) => {
+ diag.span_suggestion_verbose(
+ exp_span.shrink_to_hi(),
+ "consider `await`ing on the `Future`",
+ ".await",
+ Applicability::MaybeIncorrect,
+ );
+ }
+ (Some(ty), _) if self.same_type_modulo_infer(ty, exp_found.found) => match cause.code()
+ {
+ ObligationCauseCode::Pattern { span: Some(then_span), .. } => {
+ diag.span_suggestion_verbose(
+ then_span.shrink_to_hi(),
+ "consider `await`ing on the `Future`",
+ ".await",
+ Applicability::MaybeIncorrect,
+ );
+ }
+ ObligationCauseCode::IfExpression(box IfExpressionCause { then_id, .. }) => {
+ let then_span = self.find_block_span_from_hir_id(*then_id);
+ diag.span_suggestion_verbose(
+ then_span.shrink_to_hi(),
+ "consider `await`ing on the `Future`",
+ ".await",
+ Applicability::MaybeIncorrect,
+ );
+ }
+ ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
+ ref prior_arms,
+ ..
+ }) => {
+ diag.multipart_suggestion_verbose(
+ "consider `await`ing on the `Future`",
+ prior_arms
+ .iter()
+ .map(|arm| (arm.shrink_to_hi(), ".await".to_string()))
+ .collect(),
+ Applicability::MaybeIncorrect,
+ );
+ }
+ _ => {}
+ },
+ _ => {}
+ }
+ }
+
+ pub(super) fn suggest_accessing_field_where_appropriate(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
+ diag: &mut Diagnostic,
+ ) {
+ debug!(
+ "suggest_accessing_field_where_appropriate(cause={:?}, exp_found={:?})",
+ cause, exp_found
+ );
+ if let ty::Adt(expected_def, expected_substs) = exp_found.expected.kind() {
+ if expected_def.is_enum() {
+ return;
+ }
+
+ if let Some((name, ty)) = expected_def
+ .non_enum_variant()
+ .fields
+ .iter()
+ .filter(|field| field.vis.is_accessible_from(field.did, self.tcx))
+ .map(|field| (field.name, field.ty(self.tcx, expected_substs)))
+ .find(|(_, ty)| self.same_type_modulo_infer(*ty, exp_found.found))
+ {
+ if let ObligationCauseCode::Pattern { span: Some(span), .. } = *cause.code() {
+ if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
+ let suggestion = if expected_def.is_struct() {
+ format!("{}.{}", snippet, name)
+ } else if expected_def.is_union() {
+ format!("unsafe {{ {}.{} }}", snippet, name)
+ } else {
+ return;
+ };
+ diag.span_suggestion(
+ span,
+ &format!(
+ "you might have meant to use field `{}` whose type is `{}`",
+ name, ty
+ ),
+ suggestion,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ }
+ }
+ }
+ }
+
+ /// When encountering a case where `.as_ref()` on a `Result` or `Option` would be appropriate,
+ /// suggests it.
+ pub(super) fn suggest_as_ref_where_appropriate(
+ &self,
+ span: Span,
+ exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
+ diag: &mut Diagnostic,
+ ) {
+ if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
+ && let Some(msg) = self.should_suggest_as_ref(exp_found.expected, exp_found.found)
+ {
+ diag.span_suggestion(
+ span,
+ msg,
+ // HACK: fix issue# 100605, suggesting convert from &Option<T> to Option<&T>, remove the extra `&`
+ format!("{}.as_ref()", snippet.trim_start_matches('&')),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+
+ pub fn should_suggest_as_ref(&self, expected: Ty<'tcx>, found: Ty<'tcx>) -> Option<&str> {
+ if let (ty::Adt(exp_def, exp_substs), ty::Ref(_, found_ty, _)) =
+ (expected.kind(), found.kind())
+ {
+ if let ty::Adt(found_def, found_substs) = *found_ty.kind() {
+ if exp_def == &found_def {
+ let have_as_ref = &[
+ (
+ sym::Option,
+ "you can convert from `&Option<T>` to `Option<&T>` using \
+ `.as_ref()`",
+ ),
+ (
+ sym::Result,
+ "you can convert from `&Result<T, E>` to \
+ `Result<&T, &E>` using `.as_ref()`",
+ ),
+ ];
+ if let Some(msg) = have_as_ref.iter().find_map(|(name, msg)| {
+ self.tcx.is_diagnostic_item(*name, exp_def.did()).then_some(msg)
+ }) {
+ let mut show_suggestion = true;
+ for (exp_ty, found_ty) in
+ std::iter::zip(exp_substs.types(), found_substs.types())
+ {
+ match *exp_ty.kind() {
+ ty::Ref(_, exp_ty, _) => {
+ match (exp_ty.kind(), found_ty.kind()) {
+ (_, ty::Param(_))
+ | (_, ty::Infer(_))
+ | (ty::Param(_), _)
+ | (ty::Infer(_), _) => {}
+ _ if self.same_type_modulo_infer(exp_ty, found_ty) => {}
+ _ => show_suggestion = false,
+ };
+ }
+ ty::Param(_) | ty::Infer(_) => {}
+ _ => show_suggestion = false,
+ }
+ }
+ if show_suggestion {
+ return Some(*msg);
+ }
+ }
+ }
+ }
+ }
+ None
+ }
+
+ /// Try to find code with pattern `if Some(..) = expr`
+ /// use a `visitor` to mark the `if` which its span contains given error span,
+ /// and then try to find a assignment in the `cond` part, which span is equal with error span
+ pub(super) fn suggest_let_for_letchains(
+ &self,
+ err: &mut Diagnostic,
+ cause: &ObligationCause<'_>,
+ span: Span,
+ ) {
+ let hir = self.tcx.hir();
+ let fn_hir_id = hir.get_parent_node(cause.body_id);
+ if let Some(node) = self.tcx.hir().find(fn_hir_id) &&
+ let hir::Node::Item(hir::Item {
+ kind: hir::ItemKind::Fn(_sig, _, body_id), ..
+ }) = node {
+ let body = hir.body(*body_id);
+
+ /// Find the if expression with given span
+ struct IfVisitor {
+ pub result: bool,
+ pub found_if: bool,
+ pub err_span: Span,
+ }
+
+ impl<'v> Visitor<'v> for IfVisitor {
+ fn visit_expr(&mut self, ex: &'v hir::Expr<'v>) {
+ if self.result { return; }
+ match ex.kind {
+ hir::ExprKind::If(cond, _, _) => {
+ self.found_if = true;
+ walk_expr(self, cond);
+ self.found_if = false;
+ }
+ _ => walk_expr(self, ex),
+ }
+ }
+
+ fn visit_stmt(&mut self, ex: &'v hir::Stmt<'v>) {
+ if let hir::StmtKind::Local(hir::Local {
+ span, pat: hir::Pat{..}, ty: None, init: Some(_), ..
+ }) = &ex.kind
+ && self.found_if
+ && span.eq(&self.err_span) {
+ self.result = true;
+ }
+ walk_stmt(self, ex);
+ }
+
+ fn visit_body(&mut self, body: &'v hir::Body<'v>) {
+ hir::intravisit::walk_body(self, body);
+ }
+ }
+
+ let mut visitor = IfVisitor { err_span: span, found_if: false, result: false };
+ visitor.visit_body(&body);
+ if visitor.result {
+ err.subdiagnostic(SuggAddLetForLetChains{span: span.shrink_to_lo()});
+ }
+ }
+ }
+}
+
+impl<'tcx> TypeErrCtxt<'_, 'tcx> {
+ /// Be helpful when the user wrote `{... expr; }` and taking the `;` off
+ /// is enough to fix the error.
+ pub fn could_remove_semicolon(
+ &self,
+ blk: &'tcx hir::Block<'tcx>,
+ expected_ty: Ty<'tcx>,
+ ) -> Option<(Span, StatementAsExpression)> {
+ let blk = blk.innermost_block();
+ // Do not suggest if we have a tail expr.
+ if blk.expr.is_some() {
+ return None;
+ }
+ let last_stmt = blk.stmts.last()?;
+ let hir::StmtKind::Semi(ref last_expr) = last_stmt.kind else {
+ return None;
+ };
+ let last_expr_ty = self.typeck_results.as_ref()?.expr_ty_opt(*last_expr)?;
+ let needs_box = match (last_expr_ty.kind(), expected_ty.kind()) {
+ _ if last_expr_ty.references_error() => return None,
+ _ if self.same_type_modulo_infer(last_expr_ty, expected_ty) => {
+ StatementAsExpression::CorrectType
+ }
+ (ty::Opaque(last_def_id, _), ty::Opaque(exp_def_id, _))
+ if last_def_id == exp_def_id =>
+ {
+ StatementAsExpression::CorrectType
+ }
+ (ty::Opaque(last_def_id, last_bounds), ty::Opaque(exp_def_id, exp_bounds)) => {
+ debug!(
+ "both opaque, likely future {:?} {:?} {:?} {:?}",
+ last_def_id, last_bounds, exp_def_id, exp_bounds
+ );
+
+ let last_local_id = last_def_id.as_local()?;
+ let exp_local_id = exp_def_id.as_local()?;
+
+ match (
+ &self.tcx.hir().expect_item(last_local_id).kind,
+ &self.tcx.hir().expect_item(exp_local_id).kind,
+ ) {
+ (
+ hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds: last_bounds, .. }),
+ hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds: exp_bounds, .. }),
+ ) if std::iter::zip(*last_bounds, *exp_bounds).all(|(left, right)| {
+ match (left, right) {
+ (
+ hir::GenericBound::Trait(tl, ml),
+ hir::GenericBound::Trait(tr, mr),
+ ) if tl.trait_ref.trait_def_id() == tr.trait_ref.trait_def_id()
+ && ml == mr =>
+ {
+ true
+ }
+ (
+ hir::GenericBound::LangItemTrait(langl, _, _, argsl),
+ hir::GenericBound::LangItemTrait(langr, _, _, argsr),
+ ) if langl == langr => {
+ // FIXME: consider the bounds!
+ debug!("{:?} {:?}", argsl, argsr);
+ true
+ }
+ _ => false,
+ }
+ }) =>
+ {
+ StatementAsExpression::NeedsBoxing
+ }
+ _ => StatementAsExpression::CorrectType,
+ }
+ }
+ _ => return None,
+ };
+ let span = if last_stmt.span.from_expansion() {
+ let mac_call = rustc_span::source_map::original_sp(last_stmt.span, blk.span);
+ self.tcx.sess.source_map().mac_call_stmt_semi_span(mac_call)?
+ } else {
+ last_stmt.span.with_lo(last_stmt.span.hi() - BytePos(1))
+ };
+ Some((span, needs_box))
+ }
+
+ /// Suggest returning a local binding with a compatible type if the block
+ /// has no return expression.
+ pub fn consider_returning_binding(
+ &self,
+ blk: &'tcx hir::Block<'tcx>,
+ expected_ty: Ty<'tcx>,
+ err: &mut Diagnostic,
+ ) -> bool {
+ let blk = blk.innermost_block();
+ // Do not suggest if we have a tail expr.
+ if blk.expr.is_some() {
+ return false;
+ }
+ let mut shadowed = FxIndexSet::default();
+ let mut candidate_idents = vec![];
+ let mut find_compatible_candidates = |pat: &hir::Pat<'_>| {
+ if let hir::PatKind::Binding(_, hir_id, ident, _) = &pat.kind
+ && let Some(pat_ty) = self
+ .typeck_results
+ .as_ref()
+ .and_then(|typeck_results| typeck_results.node_type_opt(*hir_id))
+ {
+ let pat_ty = self.resolve_vars_if_possible(pat_ty);
+ if self.same_type_modulo_infer(pat_ty, expected_ty)
+ && !(pat_ty, expected_ty).references_error()
+ && shadowed.insert(ident.name)
+ {
+ candidate_idents.push((*ident, pat_ty));
+ }
+ }
+ true
+ };
+
+ let hir = self.tcx.hir();
+ for stmt in blk.stmts.iter().rev() {
+ let hir::StmtKind::Local(local) = &stmt.kind else { continue; };
+ local.pat.walk(&mut find_compatible_candidates);
+ }
+ match hir.find(hir.get_parent_node(blk.hir_id)) {
+ Some(hir::Node::Expr(hir::Expr { hir_id, .. })) => {
+ match hir.find(hir.get_parent_node(*hir_id)) {
+ Some(hir::Node::Arm(hir::Arm { pat, .. })) => {
+ pat.walk(&mut find_compatible_candidates);
+ }
+ Some(
+ hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, _, body), .. })
+ | hir::Node::ImplItem(hir::ImplItem {
+ kind: hir::ImplItemKind::Fn(_, body),
+ ..
+ })
+ | hir::Node::TraitItem(hir::TraitItem {
+ kind: hir::TraitItemKind::Fn(_, hir::TraitFn::Provided(body)),
+ ..
+ })
+ | hir::Node::Expr(hir::Expr {
+ kind: hir::ExprKind::Closure(hir::Closure { body, .. }),
+ ..
+ }),
+ ) => {
+ for param in hir.body(*body).params {
+ param.pat.walk(&mut find_compatible_candidates);
+ }
+ }
+ Some(hir::Node::Expr(hir::Expr {
+ kind:
+ hir::ExprKind::If(
+ hir::Expr { kind: hir::ExprKind::Let(let_), .. },
+ then_block,
+ _,
+ ),
+ ..
+ })) if then_block.hir_id == *hir_id => {
+ let_.pat.walk(&mut find_compatible_candidates);
+ }
+ _ => {}
+ }
+ }
+ _ => {}
+ }
+
+ match &candidate_idents[..] {
+ [(ident, _ty)] => {
+ let sm = self.tcx.sess.source_map();
+ if let Some(stmt) = blk.stmts.last() {
+ let stmt_span = sm.stmt_span(stmt.span, blk.span);
+ let sugg = if sm.is_multiline(blk.span)
+ && let Some(spacing) = sm.indentation_before(stmt_span)
+ {
+ format!("\n{spacing}{ident}")
+ } else {
+ format!(" {ident}")
+ };
+ err.span_suggestion_verbose(
+ stmt_span.shrink_to_hi(),
+ format!("consider returning the local binding `{ident}`"),
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ } else {
+ let sugg = if sm.is_multiline(blk.span)
+ && let Some(spacing) = sm.indentation_before(blk.span.shrink_to_lo())
+ {
+ format!("\n{spacing} {ident}\n{spacing}")
+ } else {
+ format!(" {ident} ")
+ };
+ let left_span = sm.span_through_char(blk.span, '{').shrink_to_hi();
+ err.span_suggestion_verbose(
+ sm.span_extend_while(left_span, |c| c.is_whitespace()).unwrap_or(left_span),
+ format!("consider returning the local binding `{ident}`"),
+ sugg,
+ Applicability::MaybeIncorrect,
+ );
+ }
+ true
+ }
+ values if (1..3).contains(&values.len()) => {
+ let spans = values.iter().map(|(ident, _)| ident.span).collect::<Vec<_>>();
+ err.span_note(spans, "consider returning one of these bindings");
+ true
+ }
+ _ => false,
+ }
+ }
+}
diff --git a/compiler/rustc_infer/src/infer/free_regions.rs b/compiler/rustc_infer/src/infer/free_regions.rs
index 728d691a2..2402a7ea7 100644
--- a/compiler/rustc_infer/src/infer/free_regions.rs
+++ b/compiler/rustc_infer/src/infer/free_regions.rs
@@ -29,10 +29,10 @@ impl<'a, 'tcx> RegionRelations<'a, 'tcx> {
#[derive(Clone, Debug)]
pub struct FreeRegionMap<'tcx> {
- // Stores the relation `a < b`, where `a` and `b` are regions.
- //
- // Invariant: only free regions like `'x` or `'static` are stored
- // in this relation, not scopes.
+ /// Stores the relation `a < b`, where `a` and `b` are regions.
+ ///
+ /// Invariant: only free regions like `'x` or `'static` are stored
+ /// in this relation, not scopes.
pub(crate) relation: TransitiveRelation<Region<'tcx>>,
}
diff --git a/compiler/rustc_infer/src/infer/freshen.rs b/compiler/rustc_infer/src/infer/freshen.rs
index ff5d1a05a..f6946929b 100644
--- a/compiler/rustc_infer/src/infer/freshen.rs
+++ b/compiler/rustc_infer/src/infer/freshen.rs
@@ -102,7 +102,7 @@ impl<'a, 'tcx> TypeFreshener<'a, 'tcx> {
Entry::Vacant(entry) => {
let index = self.const_freshen_count;
self.const_freshen_count += 1;
- let ct = self.infcx.tcx.mk_const_infer(freshener(index), ty);
+ let ct = self.infcx.tcx.mk_const(freshener(index), ty);
entry.insert(ct);
ct
}
@@ -248,6 +248,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> {
ty::ConstKind::Param(_)
| ty::ConstKind::Value(_)
| ty::ConstKind::Unevaluated(..)
+ | ty::ConstKind::Expr(..)
| ty::ConstKind::Error(_) => ct.super_fold_with(self),
}
}
diff --git a/compiler/rustc_infer/src/infer/glb.rs b/compiler/rustc_infer/src/infer/glb.rs
index 6ffefcb7a..7f27b35a5 100644
--- a/compiler/rustc_infer/src/infer/glb.rs
+++ b/compiler/rustc_infer/src/infer/glb.rs
@@ -30,6 +30,11 @@ impl<'tcx> TypeRelation<'tcx> for Glb<'_, '_, 'tcx> {
"Glb"
}
+ fn intercrate(&self) -> bool {
+ assert!(!self.fields.infcx.intercrate);
+ false
+ }
+
fn tcx(&self) -> TyCtxt<'tcx> {
self.fields.tcx()
}
@@ -42,6 +47,10 @@ impl<'tcx> TypeRelation<'tcx> for Glb<'_, '_, 'tcx> {
self.a_is_expected
}
+ fn mark_ambiguous(&mut self) {
+ bug!("mark_ambiguous used outside of coherence");
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
variance: ty::Variance,
diff --git a/compiler/rustc_infer/src/infer/higher_ranked/mod.rs b/compiler/rustc_infer/src/infer/higher_ranked/mod.rs
index 28c87a115..817ae10c7 100644
--- a/compiler/rustc_infer/src/infer/higher_ranked/mod.rs
+++ b/compiler/rustc_infer/src/infer/higher_ranked/mod.rs
@@ -94,13 +94,8 @@ impl<'tcx> InferCtxt<'tcx> {
}))
},
consts: &mut |bound_var: ty::BoundVar, ty| {
- self.tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Placeholder(ty::PlaceholderConst {
- universe: next_universe,
- name: bound_var,
- }),
- ty,
- })
+ self.tcx
+ .mk_const(ty::PlaceholderConst { universe: next_universe, name: bound_var }, ty)
},
};
diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs
index 5f13b2b3d..ba990acfe 100644
--- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs
+++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs
@@ -842,6 +842,9 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
// are placeholders as upper bounds, but the universe of the
// variable `'a`, or some variable that `'a` has to outlive, doesn't
// permit those placeholders.
+ //
+ // We only iterate to find the min, which means it doesn't cause reproducibility issues
+ #[allow(rustc::potential_query_instability)]
let min_universe = lower_vid_bounds
.into_iter()
.map(|vid| self.var_infos[vid].universe)
diff --git a/compiler/rustc_infer/src/infer/lub.rs b/compiler/rustc_infer/src/infer/lub.rs
index d6e56fcb7..97ed4729b 100644
--- a/compiler/rustc_infer/src/infer/lub.rs
+++ b/compiler/rustc_infer/src/infer/lub.rs
@@ -30,6 +30,11 @@ impl<'tcx> TypeRelation<'tcx> for Lub<'_, '_, 'tcx> {
"Lub"
}
+ fn intercrate(&self) -> bool {
+ assert!(!self.fields.infcx.intercrate);
+ false
+ }
+
fn tcx(&self) -> TyCtxt<'tcx> {
self.fields.tcx()
}
@@ -42,6 +47,10 @@ impl<'tcx> TypeRelation<'tcx> for Lub<'_, '_, 'tcx> {
self.a_is_expected
}
+ fn mark_ambiguous(&mut self) {
+ bug!("mark_ambiguous used outside of coherence");
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
variance: ty::Variance,
diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs
index ffb020398..706cd4124 100644
--- a/compiler/rustc_infer/src/infer/mod.rs
+++ b/compiler/rustc_infer/src/infer/mod.rs
@@ -10,6 +10,7 @@ pub(crate) use self::undo_log::{InferCtxtUndoLogs, Snapshot, UndoLog};
use crate::traits::{self, ObligationCause, PredicateObligations, TraitEngine, TraitEngineExt};
+use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::undo_log::Rollback;
@@ -22,7 +23,6 @@ use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKin
use rustc_middle::mir::interpret::{ErrorHandled, EvalToValTreeResult};
use rustc_middle::mir::ConstraintCategory;
use rustc_middle::traits::select;
-use rustc_middle::ty::abstract_const::{AbstractConst, FailureKind};
use rustc_middle::ty::error::{ExpectedFound, TypeError};
use rustc_middle::ty::fold::BoundVarReplacerDelegate;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
@@ -33,7 +33,7 @@ pub use rustc_middle::ty::IntVarValue;
use rustc_middle::ty::{self, GenericParamDefKind, InferConst, Ty, TyCtxt};
use rustc_middle::ty::{ConstVid, FloatVid, IntVid, TyVid};
use rustc_span::symbol::Symbol;
-use rustc_span::{Span, DUMMY_SP};
+use rustc_span::Span;
use std::cell::{Cell, RefCell};
use std::fmt;
@@ -80,7 +80,6 @@ pub struct InferOk<'tcx, T> {
}
pub type InferResult<'tcx, T> = Result<InferOk<'tcx, T>, TypeError<'tcx>>;
-pub type Bound<T> = Option<T>;
pub type UnitResult<'tcx> = RelateResult<'tcx, ()>; // "unify result"
pub type FixupResult<'tcx, T> = Result<T, FixupError<'tcx>>; // "fixup result"
@@ -294,7 +293,7 @@ pub struct InferCtxt<'tcx> {
/// the set of predicates on which errors have been reported, to
/// avoid reporting the same error twice.
- pub reported_trait_errors: RefCell<FxHashMap<Span, Vec<ty::Predicate<'tcx>>>>,
+ pub reported_trait_errors: RefCell<FxIndexMap<Span, Vec<ty::Predicate<'tcx>>>>,
pub reported_closure_mismatch: RefCell<FxHashSet<(Span, Option<Span>)>>,
@@ -334,8 +333,25 @@ pub struct InferCtxt<'tcx> {
/// bound.
universe: Cell<ty::UniverseIndex>,
- normalize_fn_sig_for_diagnostic:
- Option<Lrc<dyn Fn(&InferCtxt<'tcx>, ty::PolyFnSig<'tcx>) -> ty::PolyFnSig<'tcx>>>,
+ /// During coherence we have to assume that other crates may add
+ /// additional impls which we currently don't know about.
+ ///
+ /// To deal with this evaluation should be conservative
+ /// and consider the possibility of impls from outside this crate.
+ /// This comes up primarily when resolving ambiguity. Imagine
+ /// there is some trait reference `$0: Bar` where `$0` is an
+ /// inference variable. If `intercrate` is true, then we can never
+ /// say for sure that this reference is not implemented, even if
+ /// there are *no impls at all for `Bar`*, because `$0` could be
+ /// bound to some type that in a downstream crate that implements
+ /// `Bar`.
+ ///
+ /// Outside of coherence we set this to false because we are only
+ /// interested in types that the user could actually have written.
+ /// In other words, we consider `$0: Bar` to be unimplemented if
+ /// there is no type that the user could *actually name* that
+ /// would satisfy it. This avoids crippling inference, basically.
+ pub intercrate: bool,
}
/// See the `error_reporting` module for more details.
@@ -551,8 +567,8 @@ pub struct InferCtxtBuilder<'tcx> {
tcx: TyCtxt<'tcx>,
defining_use_anchor: DefiningAnchor,
considering_regions: bool,
- normalize_fn_sig_for_diagnostic:
- Option<Lrc<dyn Fn(&InferCtxt<'tcx>, ty::PolyFnSig<'tcx>) -> ty::PolyFnSig<'tcx>>>,
+ /// Whether we are in coherence mode.
+ intercrate: bool,
}
pub trait TyCtxtInferExt<'tcx> {
@@ -565,7 +581,7 @@ impl<'tcx> TyCtxtInferExt<'tcx> for TyCtxt<'tcx> {
tcx: self,
defining_use_anchor: DefiningAnchor::Error,
considering_regions: true,
- normalize_fn_sig_for_diagnostic: None,
+ intercrate: false,
}
}
}
@@ -582,16 +598,13 @@ impl<'tcx> InferCtxtBuilder<'tcx> {
self
}
- pub fn ignoring_regions(mut self) -> Self {
- self.considering_regions = false;
+ pub fn intercrate(mut self) -> Self {
+ self.intercrate = true;
self
}
- pub fn with_normalize_fn_sig_for_diagnostic(
- mut self,
- fun: Lrc<dyn Fn(&InferCtxt<'tcx>, ty::PolyFnSig<'tcx>) -> ty::PolyFnSig<'tcx>>,
- ) -> Self {
- self.normalize_fn_sig_for_diagnostic = Some(fun);
+ pub fn ignoring_regions(mut self) -> Self {
+ self.considering_regions = false;
self
}
@@ -616,12 +629,7 @@ impl<'tcx> InferCtxtBuilder<'tcx> {
}
pub fn build(&mut self) -> InferCtxt<'tcx> {
- let InferCtxtBuilder {
- tcx,
- defining_use_anchor,
- considering_regions,
- ref normalize_fn_sig_for_diagnostic,
- } = *self;
+ let InferCtxtBuilder { tcx, defining_use_anchor, considering_regions, intercrate } = *self;
InferCtxt {
tcx,
defining_use_anchor,
@@ -637,9 +645,7 @@ impl<'tcx> InferCtxtBuilder<'tcx> {
in_snapshot: Cell::new(false),
skip_leak_check: Cell::new(false),
universe: Cell::new(ty::UniverseIndex::ROOT),
- normalize_fn_sig_for_diagnostic: normalize_fn_sig_for_diagnostic
- .as_ref()
- .map(|f| f.clone()),
+ intercrate,
}
}
}
@@ -677,35 +683,14 @@ pub struct CombinedSnapshot<'tcx> {
impl<'tcx> InferCtxt<'tcx> {
/// Creates a `TypeErrCtxt` for emitting various inference errors.
- /// During typeck, use `FnCtxt::infer_err` instead.
+ /// During typeck, use `FnCtxt::err_ctxt` instead.
pub fn err_ctxt(&self) -> TypeErrCtxt<'_, 'tcx> {
- TypeErrCtxt { infcx: self, typeck_results: None }
- }
-
- /// calls `tcx.try_unify_abstract_consts` after
- /// canonicalizing the consts.
- #[instrument(skip(self), level = "debug")]
- pub fn try_unify_abstract_consts(
- &self,
- a: ty::UnevaluatedConst<'tcx>,
- b: ty::UnevaluatedConst<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- ) -> bool {
- // Reject any attempt to unify two unevaluated constants that contain inference
- // variables, since inference variables in queries lead to ICEs.
- if a.substs.has_non_region_infer()
- || b.substs.has_non_region_infer()
- || param_env.has_non_region_infer()
- {
- debug!("a or b or param_env contain infer vars in its substs -> cannot unify");
- return false;
+ TypeErrCtxt {
+ infcx: self,
+ typeck_results: None,
+ fallback_has_occurred: false,
+ normalize_fn_sig: Box::new(|fn_sig| fn_sig),
}
-
- let param_env_and = param_env.and((a, b));
- let erased = self.tcx.erase_regions(param_env_and);
- debug!("after erase_regions: {:?}", erased);
-
- self.tcx.try_unify_abstract_consts(erased)
}
pub fn is_in_snapshot(&self) -> bool {
@@ -777,32 +762,6 @@ impl<'tcx> InferCtxt<'tcx> {
}
}
- /// Clear the "currently in a snapshot" flag, invoke the closure,
- /// then restore the flag to its original value. This flag is a
- /// debugging measure designed to detect cases where we start a
- /// snapshot, create type variables, and register obligations
- /// which may involve those type variables in the fulfillment cx,
- /// potentially leaving "dangling type variables" behind.
- /// In such cases, an assertion will fail when attempting to
- /// register obligations, within a snapshot. Very useful, much
- /// better than grovelling through megabytes of `RUSTC_LOG` output.
- ///
- /// HOWEVER, in some cases the flag is unhelpful. In particular, we
- /// sometimes create a "mini-fulfilment-cx" in which we enroll
- /// obligations. As long as this fulfillment cx is fully drained
- /// before we return, this is not a problem, as there won't be any
- /// escaping obligations in the main cx. In those cases, you can
- /// use this function.
- pub fn save_and_restore_in_snapshot_flag<F, R>(&self, func: F) -> R
- where
- F: FnOnce(&Self) -> R,
- {
- let flag = self.in_snapshot.replace(false);
- let result = func(self);
- self.in_snapshot.set(flag);
- result
- }
-
fn start_snapshot(&self) -> CombinedSnapshot<'tcx> {
debug!("start_snapshot()");
@@ -1087,7 +1046,7 @@ impl<'tcx> InferCtxt<'tcx> {
}
pub fn next_const_var(&self, ty: Ty<'tcx>, origin: ConstVariableOrigin) -> ty::Const<'tcx> {
- self.tcx.mk_const_var(self.next_const_var_id(origin), ty)
+ self.tcx.mk_const(self.next_const_var_id(origin), ty)
}
pub fn next_const_var_in_universe(
@@ -1101,7 +1060,7 @@ impl<'tcx> InferCtxt<'tcx> {
.borrow_mut()
.const_unification_table()
.new_key(ConstVarValue { origin, val: ConstVariableValue::Unknown { universe } });
- self.tcx.mk_const_var(vid, ty)
+ self.tcx.mk_const(vid, ty)
}
pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> {
@@ -1217,7 +1176,7 @@ impl<'tcx> InferCtxt<'tcx> {
origin,
val: ConstVariableValue::Unknown { universe: self.universe() },
});
- self.tcx.mk_const_var(const_var_id, self.tcx.type_of(param.def_id)).into()
+ self.tcx.mk_const(const_var_id, self.tcx.type_of(param.def_id)).into()
}
}
}
@@ -1233,7 +1192,8 @@ impl<'tcx> InferCtxt<'tcx> {
/// reporting errors that often occur as a result of earlier
/// errors, but where it's hard to be 100% sure (e.g., unresolved
/// inference variables, regionck errors).
- pub fn is_tainted_by_errors(&self) -> bool {
+ #[must_use = "this method does not have any side effects"]
+ pub fn tainted_by_errors(&self) -> Option<ErrorGuaranteed> {
debug!(
"is_tainted_by_errors(err_count={}, err_count_on_creation={}, \
tainted_by_errors={})",
@@ -1242,19 +1202,25 @@ impl<'tcx> InferCtxt<'tcx> {
self.tainted_by_errors.get().is_some()
);
+ if let Some(e) = self.tainted_by_errors.get() {
+ return Some(e);
+ }
+
if self.tcx.sess.err_count() > self.err_count_on_creation {
- return true; // errors reported since this infcx was made
+ // errors reported since this infcx was made
+ let e = self.tcx.sess.has_errors().unwrap();
+ self.set_tainted_by_errors(e);
+ return Some(e);
}
- self.tainted_by_errors.get().is_some()
+
+ None
}
/// Set the "tainted by errors" flag to true. We call this when we
/// observe an error from a prior pass.
- pub fn set_tainted_by_errors(&self) {
- debug!("set_tainted_by_errors()");
- self.tainted_by_errors.set(Some(
- self.tcx.sess.delay_span_bug(DUMMY_SP, "`InferCtxt` incorrectly tainted by errors"),
- ));
+ pub fn set_tainted_by_errors(&self, e: ErrorGuaranteed) {
+ debug!("set_tainted_by_errors(ErrorGuaranteed)");
+ self.tainted_by_errors.set(Some(e));
}
pub fn skip_region_resolution(&self) {
@@ -1295,7 +1261,7 @@ impl<'tcx> InferCtxt<'tcx> {
let mut inner = self.inner.borrow_mut();
let inner = &mut *inner;
assert!(
- self.is_tainted_by_errors() || inner.region_obligations.is_empty(),
+ self.tainted_by_errors().is_some() || inner.region_obligations.is_empty(),
"region_obligations not empty: {:#?}",
inner.region_obligations
);
@@ -1436,16 +1402,15 @@ impl<'tcx> InferCtxt<'tcx> {
value.fold_with(&mut r)
}
- /// Returns the first unresolved variable contained in `T`. In the
- /// process of visiting `T`, this will resolve (where possible)
- /// type variables in `T`, but it never constructs the final,
- /// resolved type, so it's more efficient than
- /// `resolve_vars_if_possible()`.
- pub fn unresolved_type_vars<T>(&self, value: &T) -> Option<(Ty<'tcx>, Option<Span>)>
+ /// Returns the first unresolved type or const variable contained in `T`.
+ pub fn first_unresolved_const_or_ty_var<T>(
+ &self,
+ value: &T,
+ ) -> Option<(ty::Term<'tcx>, Option<Span>)>
where
T: TypeVisitable<'tcx>,
{
- value.visit_with(&mut resolve::UnresolvedTypeFinder::new(self)).break_value()
+ value.visit_with(&mut resolve::UnresolvedTypeOrConstFinder::new(self)).break_value()
}
pub fn probe_const_var(
@@ -1596,7 +1561,7 @@ impl<'tcx> InferCtxt<'tcx> {
span: Option<Span>,
) -> Result<ty::Const<'tcx>, ErrorHandled> {
match self.const_eval_resolve(param_env, unevaluated, span) {
- Ok(Some(val)) => Ok(ty::Const::from_value(self.tcx, val, ty)),
+ Ok(Some(val)) => Ok(self.tcx.mk_const(val, ty)),
Ok(None) => {
let tcx = self.tcx;
let def_id = unevaluated.def.did;
@@ -1634,26 +1599,25 @@ impl<'tcx> InferCtxt<'tcx> {
// Postpone the evaluation of constants whose substs depend on inference
// variables
+ let tcx = self.tcx;
if substs.has_non_region_infer() {
- let ac = AbstractConst::new(self.tcx, unevaluated);
- match ac {
- Ok(None) => {
- substs = InternalSubsts::identity_for_item(self.tcx, unevaluated.def.did);
- param_env = self.tcx.param_env(unevaluated.def.did);
- }
- Ok(Some(ct)) => {
- if ct.unify_failure_kind(self.tcx) == FailureKind::Concrete {
- substs = replace_param_and_infer_substs_with_placeholder(self.tcx, substs);
- } else {
- return Err(ErrorHandled::TooGeneric);
- }
+ if let Some(ct) = tcx.bound_abstract_const(unevaluated.def)? {
+ let ct = tcx.expand_abstract_consts(ct.subst(tcx, substs));
+ if let Err(e) = ct.error_reported() {
+ return Err(ErrorHandled::Reported(e));
+ } else if ct.has_non_region_infer() || ct.has_non_region_param() {
+ return Err(ErrorHandled::TooGeneric);
+ } else {
+ substs = replace_param_and_infer_substs_with_placeholder(tcx, substs);
}
- Err(guar) => return Err(ErrorHandled::Reported(guar)),
+ } else {
+ substs = InternalSubsts::identity_for_item(tcx, unevaluated.def.did);
+ param_env = tcx.param_env(unevaluated.def.did);
}
}
- let param_env_erased = self.tcx.erase_regions(param_env);
- let substs_erased = self.tcx.erase_regions(substs);
+ let param_env_erased = tcx.erase_regions(param_env);
+ let substs_erased = tcx.erase_regions(substs);
debug!(?param_env_erased);
debug!(?substs_erased);
@@ -1661,7 +1625,7 @@ impl<'tcx> InferCtxt<'tcx> {
// The return value is the evaluated value which doesn't contain any reference to inference
// variables, thus we don't need to substitute back the original values.
- self.tcx.const_eval_resolve_for_typeck(param_env_erased, unevaluated, span)
+ tcx.const_eval_resolve_for_typeck(param_env_erased, unevaluated, span)
}
/// `ty_or_const_infer_var_changed` is equivalent to one of these two:
@@ -1729,10 +1693,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
&self,
generic_param_scope: LocalDefId,
outlives_env: &OutlivesEnvironment<'tcx>,
- ) {
+ ) -> Option<ErrorGuaranteed> {
let errors = self.resolve_regions(outlives_env);
- if !self.is_tainted_by_errors() {
+ if let None = self.tainted_by_errors() {
// As a heuristic, just skip reporting region errors
// altogether if other errors have been reported while
// this infcx was in use. This is totally hokey but
@@ -1740,6 +1704,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
// errors from silly ones.
self.report_region_errors(generic_param_scope, &errors);
}
+
+ (!errors.is_empty()).then(|| {
+ self.tcx.sess.delay_span_bug(rustc_span::DUMMY_SP, "error should have been emitted")
+ })
}
// [Note-Type-error-reporting]
@@ -2065,13 +2033,13 @@ fn replace_param_and_infer_substs_with_placeholder<'tcx>(
if ty.has_non_region_param() || ty.has_non_region_infer() {
bug!("const `{ct}`'s type should not reference params or types");
}
- tcx.mk_const(ty::ConstS {
- ty,
- kind: ty::ConstKind::Placeholder(ty::PlaceholderConst {
+ tcx.mk_const(
+ ty::PlaceholderConst {
universe: ty::UniverseIndex::ROOT,
name: ty::BoundVar::from_usize(idx),
- }),
- })
+ },
+ ty,
+ )
.into()
}
_ => arg,
diff --git a/compiler/rustc_infer/src/infer/nll_relate/mod.rs b/compiler/rustc_infer/src/infer/nll_relate/mod.rs
index 600f94f09..f6bc4db0d 100644
--- a/compiler/rustc_infer/src/infer/nll_relate/mod.rs
+++ b/compiler/rustc_infer/src/infer/nll_relate/mod.rs
@@ -25,7 +25,7 @@ use crate::infer::combine::ConstEquateRelation;
use crate::infer::InferCtxt;
use crate::infer::{ConstVarValue, ConstVariableValue};
use crate::infer::{TypeVariableOrigin, TypeVariableOriginKind};
-use crate::traits::PredicateObligation;
+use crate::traits::{Obligation, PredicateObligation};
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::error::TypeError;
@@ -92,11 +92,7 @@ pub trait TypeRelatingDelegate<'tcx> {
info: ty::VarianceDiagInfo<'tcx>,
);
- fn const_equate(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>);
- fn register_opaque_type_obligations(
- &mut self,
- obligations: Vec<PredicateObligation<'tcx>>,
- ) -> Result<(), TypeError<'tcx>>;
+ fn register_obligations(&mut self, obligations: Vec<PredicateObligation<'tcx>>);
/// Creates a new universe index. Used when instantiating placeholders.
fn create_next_universe(&mut self) -> ty::UniverseIndex;
@@ -419,7 +415,7 @@ where
.infcx
.handle_opaque_type(a, b, true, &cause, self.delegate.param_env())?
.obligations;
- self.delegate.register_opaque_type_obligations(obligations)?;
+ self.delegate.register_obligations(obligations);
trace!(a = ?a.kind(), b = ?b.kind(), "opaque type instantiated");
Ok(a)
}
@@ -531,6 +527,10 @@ where
self.infcx.tcx
}
+ fn intercrate(&self) -> bool {
+ self.infcx.intercrate
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.delegate.param_env()
}
@@ -543,6 +543,17 @@ where
true
}
+ fn mark_ambiguous(&mut self) {
+ let cause = ObligationCause::dummy_with_span(self.delegate.span());
+ let param_env = self.delegate.param_env();
+ self.delegate.register_obligations(vec![Obligation::new(
+ self.tcx(),
+ cause,
+ param_env,
+ ty::Binder::dummy(ty::PredicateKind::Ambiguous),
+ )]);
+ }
+
#[instrument(skip(self, info), level = "trace", ret)]
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
@@ -556,8 +567,9 @@ where
self.ambient_variance_info = self.ambient_variance_info.xform(info);
debug!(?self.ambient_variance);
-
- let r = self.relate(a, b)?;
+ // In a bivariant context this always succeeds.
+ let r =
+ if self.ambient_variance == ty::Variance::Bivariant { a } else { self.relate(a, b)? };
self.ambient_variance = old_ambient_variance;
@@ -799,8 +811,12 @@ impl<'tcx, D> ConstEquateRelation<'tcx> for TypeRelating<'_, 'tcx, D>
where
D: TypeRelatingDelegate<'tcx>,
{
- fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) {
- self.delegate.const_equate(a, b);
+ fn const_equate_obligation(&mut self, _a: ty::Const<'tcx>, _b: ty::Const<'tcx>) {
+ // We don't have to worry about the equality of consts during borrow checking
+ // as consts always have a static lifetime.
+ // FIXME(oli-obk): is this really true? We can at least have HKL and with
+ // inline consts we may have further lifetimes that may be unsound to treat as
+ // 'static.
}
}
@@ -897,6 +913,11 @@ where
self.infcx.tcx
}
+ fn intercrate(&self) -> bool {
+ assert!(!self.infcx.intercrate);
+ false
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.delegate.param_env()
}
@@ -909,6 +930,10 @@ where
true
}
+ fn mark_ambiguous(&mut self) {
+ bug!()
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
variance: ty::Variance,
@@ -1062,7 +1087,7 @@ where
origin: var_value.origin,
val: ConstVariableValue::Unknown { universe: self.universe },
});
- Ok(self.tcx().mk_const_var(new_var_id, a.ty()))
+ Ok(self.tcx().mk_const(new_var_id, a.ty()))
}
}
}
diff --git a/compiler/rustc_infer/src/infer/note.rs b/compiler/rustc_infer/src/infer/note.rs
new file mode 100644
index 000000000..2ccbd164f
--- /dev/null
+++ b/compiler/rustc_infer/src/infer/note.rs
@@ -0,0 +1,203 @@
+impl<'tcx> TypeErrCtxt<'_, 'tcx> {
+ fn note_error_origin(
+ &self,
+ err: &mut Diagnostic,
+ cause: &ObligationCause<'tcx>,
+ exp_found: Option<ty::error::ExpectedFound<Ty<'tcx>>>,
+ terr: TypeError<'tcx>,
+ ) {
+ match *cause.code() {
+ ObligationCauseCode::Pattern { origin_expr: true, span: Some(span), root_ty } => {
+ let ty = self.resolve_vars_if_possible(root_ty);
+ if !matches!(ty.kind(), ty::Infer(ty::InferTy::TyVar(_) | ty::InferTy::FreshTy(_)))
+ {
+ // don't show type `_`
+ if span.desugaring_kind() == Some(DesugaringKind::ForLoop)
+ && let ty::Adt(def, substs) = ty.kind()
+ && Some(def.did()) == self.tcx.get_diagnostic_item(sym::Option)
+ {
+ err.span_label(span, format!("this is an iterator with items of type `{}`", substs.type_at(0)));
+ } else {
+ err.span_label(span, format!("this expression has type `{}`", ty));
+ }
+ }
+ if let Some(ty::error::ExpectedFound { found, .. }) = exp_found
+ && ty.is_box() && ty.boxed_ty() == found
+ && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
+ {
+ err.span_suggestion(
+ span,
+ "consider dereferencing the boxed value",
+ format!("*{}", snippet),
+ Applicability::MachineApplicable,
+ );
+ }
+ }
+ ObligationCauseCode::Pattern { origin_expr: false, span: Some(span), .. } => {
+ err.span_label(span, "expected due to this");
+ }
+ ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
+ arm_block_id,
+ arm_span,
+ arm_ty,
+ prior_arm_block_id,
+ prior_arm_span,
+ prior_arm_ty,
+ source,
+ ref prior_arms,
+ scrut_hir_id,
+ opt_suggest_box_span,
+ scrut_span,
+ ..
+ }) => match source {
+ hir::MatchSource::TryDesugar => {
+ if let Some(ty::error::ExpectedFound { expected, .. }) = exp_found {
+ let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id);
+ let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind {
+ let arg_expr = args.first().expect("try desugaring call w/out arg");
+ self.typeck_results.as_ref().and_then(|typeck_results| {
+ typeck_results.expr_ty_opt(arg_expr)
+ })
+ } else {
+ bug!("try desugaring w/out call expr as scrutinee");
+ };
+
+ match scrut_ty {
+ Some(ty) if expected == ty => {
+ let source_map = self.tcx.sess.source_map();
+ err.span_suggestion(
+ source_map.end_point(cause.span),
+ "try removing this `?`",
+ "",
+ Applicability::MachineApplicable,
+ );
+ }
+ _ => {}
+ }
+ }
+ }
+ _ => {
+ // `prior_arm_ty` can be `!`, `expected` will have better info when present.
+ let t = self.resolve_vars_if_possible(match exp_found {
+ Some(ty::error::ExpectedFound { expected, .. }) => expected,
+ _ => prior_arm_ty,
+ });
+ let source_map = self.tcx.sess.source_map();
+ let mut any_multiline_arm = source_map.is_multiline(arm_span);
+ if prior_arms.len() <= 4 {
+ for sp in prior_arms {
+ any_multiline_arm |= source_map.is_multiline(*sp);
+ err.span_label(*sp, format!("this is found to be of type `{}`", t));
+ }
+ } else if let Some(sp) = prior_arms.last() {
+ any_multiline_arm |= source_map.is_multiline(*sp);
+ err.span_label(
+ *sp,
+ format!("this and all prior arms are found to be of type `{}`", t),
+ );
+ }
+ let outer_error_span = if any_multiline_arm {
+ // Cover just `match` and the scrutinee expression, not
+ // the entire match body, to reduce diagram noise.
+ cause.span.shrink_to_lo().to(scrut_span)
+ } else {
+ cause.span
+ };
+ let msg = "`match` arms have incompatible types";
+ err.span_label(outer_error_span, msg);
+ self.suggest_remove_semi_or_return_binding(
+ err,
+ prior_arm_block_id,
+ prior_arm_ty,
+ prior_arm_span,
+ arm_block_id,
+ arm_ty,
+ arm_span,
+ );
+ if let Some(ret_sp) = opt_suggest_box_span {
+ // Get return type span and point to it.
+ self.suggest_boxing_for_return_impl_trait(
+ err,
+ ret_sp,
+ prior_arms.iter().chain(std::iter::once(&arm_span)).map(|s| *s),
+ );
+ }
+ }
+ },
+ ObligationCauseCode::IfExpression(box IfExpressionCause {
+ then_id,
+ else_id,
+ then_ty,
+ else_ty,
+ outer_span,
+ opt_suggest_box_span,
+ }) => {
+ let then_span = self.find_block_span_from_hir_id(then_id);
+ let else_span = self.find_block_span_from_hir_id(else_id);
+ err.span_label(then_span, "expected because of this");
+ if let Some(sp) = outer_span {
+ err.span_label(sp, "`if` and `else` have incompatible types");
+ }
+ self.suggest_remove_semi_or_return_binding(
+ err,
+ Some(then_id),
+ then_ty,
+ then_span,
+ Some(else_id),
+ else_ty,
+ else_span,
+ );
+ if let Some(ret_sp) = opt_suggest_box_span {
+ self.suggest_boxing_for_return_impl_trait(
+ err,
+ ret_sp,
+ [then_span, else_span].into_iter(),
+ );
+ }
+ }
+ ObligationCauseCode::LetElse => {
+ err.help("try adding a diverging expression, such as `return` or `panic!(..)`");
+ err.help("...or use `match` instead of `let...else`");
+ }
+ _ => {
+ if let ObligationCauseCode::BindingObligation(_, span)
+ | ObligationCauseCode::ExprBindingObligation(_, span, ..)
+ = cause.code().peel_derives()
+ && let TypeError::RegionsPlaceholderMismatch = terr
+ {
+ err.span_note( * span,
+ "the lifetime requirement is introduced here");
+ }
+ }
+ }
+ }
+}
+
+impl<'tcx> InferCtxt<'tcx> {
+ /// Given a [`hir::Block`], get the span of its last expression or
+ /// statement, peeling off any inner blocks.
+ pub fn find_block_span(&self, block: &'tcx hir::Block<'tcx>) -> Span {
+ let block = block.innermost_block();
+ if let Some(expr) = &block.expr {
+ expr.span
+ } else if let Some(stmt) = block.stmts.last() {
+ // possibly incorrect trailing `;` in the else arm
+ stmt.span
+ } else {
+ // empty block; point at its entirety
+ block.span
+ }
+ }
+
+ /// Given a [`hir::HirId`] for a block, get the span of its last expression
+ /// or statement, peeling off any inner blocks.
+ pub fn find_block_span_from_hir_id(&self, hir_id: hir::HirId) -> Span {
+ match self.tcx.hir().get(hir_id) {
+ hir::Node::Block(blk) => self.find_block_span(blk),
+ // The parser was in a weird state if either of these happen, but
+ // it's better not to panic.
+ hir::Node::Expr(e) => e.span,
+ _ => rustc_span::DUMMY_SP,
+ }
+ }
+}
diff --git a/compiler/rustc_infer/src/infer/opaque_types.rs b/compiler/rustc_infer/src/infer/opaque_types.rs
index a982f11f7..524f7a39e 100644
--- a/compiler/rustc_infer/src/infer/opaque_types.rs
+++ b/compiler/rustc_infer/src/infer/opaque_types.rs
@@ -332,32 +332,11 @@ impl<'tcx> InferCtxt<'tcx> {
concrete_ty: Ty<'tcx>,
span: Span,
) {
- let def_id = opaque_type_key.def_id;
-
- let tcx = self.tcx;
-
let concrete_ty = self.resolve_vars_if_possible(concrete_ty);
-
debug!(?concrete_ty);
- let first_own_region = match self.opaque_ty_origin_unchecked(def_id, span) {
- hir::OpaqueTyOrigin::FnReturn(..) | hir::OpaqueTyOrigin::AsyncFn(..) => {
- // We lower
- //
- // fn foo<'l0..'ln>() -> impl Trait<'l0..'lm>
- //
- // into
- //
- // type foo::<'p0..'pn>::Foo<'q0..'qm>
- // fn foo<l0..'ln>() -> foo::<'static..'static>::Foo<'l0..'lm>.
- //
- // For these types we only iterate over `'l0..lm` below.
- tcx.generics_of(def_id).parent_count
- }
- // These opaque type inherit all lifetime parameters from their
- // parent, so we have to check them all.
- hir::OpaqueTyOrigin::TyAlias => 0,
- };
+ let variances = self.tcx.variances_of(opaque_type_key.def_id);
+ debug!(?variances);
// For a case like `impl Foo<'a, 'b>`, we would generate a constraint
// `'r in ['a, 'b, 'static]` for each region `'r` that appears in the
@@ -370,9 +349,12 @@ impl<'tcx> InferCtxt<'tcx> {
// type can be equal to any of the region parameters of the
// opaque type definition.
let choice_regions: Lrc<Vec<ty::Region<'tcx>>> = Lrc::new(
- opaque_type_key.substs[first_own_region..]
+ opaque_type_key
+ .substs
.iter()
- .filter_map(|arg| match arg.unpack() {
+ .enumerate()
+ .filter(|(i, _)| variances[*i] == ty::Variance::Invariant)
+ .filter_map(|(_, arg)| match arg.unpack() {
GenericArgKind::Lifetime(r) => Some(r),
GenericArgKind::Type(_) | GenericArgKind::Const(_) => None,
})
@@ -381,6 +363,7 @@ impl<'tcx> InferCtxt<'tcx> {
);
concrete_ty.visit_with(&mut ConstrainOpaqueTypeRegionVisitor {
+ tcx: self.tcx,
op: |r| self.member_constraint(opaque_type_key, span, concrete_ty, r, &choice_regions),
});
}
@@ -427,24 +410,25 @@ impl<'tcx> InferCtxt<'tcx> {
}
}
-// Visitor that requires that (almost) all regions in the type visited outlive
-// `least_region`. We cannot use `push_outlives_components` because regions in
-// closure signatures are not included in their outlives components. We need to
-// ensure all regions outlive the given bound so that we don't end up with,
-// say, `ReVar` appearing in a return type and causing ICEs when other
-// functions end up with region constraints involving regions from other
-// functions.
-//
-// We also cannot use `for_each_free_region` because for closures it includes
-// the regions parameters from the enclosing item.
-//
-// We ignore any type parameters because impl trait values are assumed to
-// capture all the in-scope type parameters.
-struct ConstrainOpaqueTypeRegionVisitor<OP> {
- op: OP,
+/// Visitor that requires that (almost) all regions in the type visited outlive
+/// `least_region`. We cannot use `push_outlives_components` because regions in
+/// closure signatures are not included in their outlives components. We need to
+/// ensure all regions outlive the given bound so that we don't end up with,
+/// say, `ReVar` appearing in a return type and causing ICEs when other
+/// functions end up with region constraints involving regions from other
+/// functions.
+///
+/// We also cannot use `for_each_free_region` because for closures it includes
+/// the regions parameters from the enclosing item.
+///
+/// We ignore any type parameters because impl trait values are assumed to
+/// capture all the in-scope type parameters.
+pub struct ConstrainOpaqueTypeRegionVisitor<'tcx, OP: FnMut(ty::Region<'tcx>)> {
+ pub tcx: TyCtxt<'tcx>,
+ pub op: OP,
}
-impl<'tcx, OP> TypeVisitor<'tcx> for ConstrainOpaqueTypeRegionVisitor<OP>
+impl<'tcx, OP> TypeVisitor<'tcx> for ConstrainOpaqueTypeRegionVisitor<'tcx, OP>
where
OP: FnMut(ty::Region<'tcx>),
{
@@ -490,6 +474,31 @@ where
substs.as_generator().yield_ty().visit_with(self);
substs.as_generator().resume_ty().visit_with(self);
}
+
+ ty::Opaque(def_id, ref substs) => {
+ // Skip lifetime paramters that are not captures.
+ let variances = self.tcx.variances_of(*def_id);
+
+ for (v, s) in std::iter::zip(variances, substs.iter()) {
+ if *v != ty::Variance::Bivariant {
+ s.visit_with(self);
+ }
+ }
+ }
+
+ ty::Projection(proj)
+ if self.tcx.def_kind(proj.item_def_id) == DefKind::ImplTraitPlaceholder =>
+ {
+ // Skip lifetime paramters that are not captures.
+ let variances = self.tcx.variances_of(proj.item_def_id);
+
+ for (v, s) in std::iter::zip(variances, proj.substs.iter()) {
+ if *v != ty::Variance::Bivariant {
+ s.visit_with(self);
+ }
+ }
+ }
+
_ => {
ty.super_visit_with(self);
}
@@ -586,7 +595,9 @@ impl<'tcx> InferCtxt<'tcx> {
ct_op: |ct| ct,
});
- if let ty::PredicateKind::Projection(projection) = predicate.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(projection)) =
+ predicate.kind().skip_binder()
+ {
if projection.term.references_error() {
// No point on adding these obligations since there's a type error involved.
return Ok(InferOk { value: (), obligations: vec![] });
@@ -595,7 +606,12 @@ impl<'tcx> InferCtxt<'tcx> {
}
// Require that the predicate holds for the concrete type.
debug!(?predicate);
- obligations.push(traits::Obligation::new(cause.clone(), param_env, predicate));
+ obligations.push(traits::Obligation::new(
+ self.tcx,
+ cause.clone(),
+ param_env,
+ predicate,
+ ));
}
Ok(InferOk { value: (), obligations })
}
diff --git a/compiler/rustc_infer/src/infer/opaque_types/table.rs b/compiler/rustc_infer/src/infer/opaque_types/table.rs
index 4d124554a..c146902d5 100644
--- a/compiler/rustc_infer/src/infer/opaque_types/table.rs
+++ b/compiler/rustc_infer/src/infer/opaque_types/table.rs
@@ -9,10 +9,10 @@ use super::{OpaqueTypeDecl, OpaqueTypeMap};
#[derive(Default, Debug, Clone)]
pub struct OpaqueTypeStorage<'tcx> {
- // Opaque types found in explicit return types and their
- // associated fresh inference variable. Writeback resolves these
- // variables to get the concrete type, which can be used to
- // 'de-opaque' OpaqueTypeDecl, after typeck is done with all functions.
+ /// Opaque types found in explicit return types and their
+ /// associated fresh inference variable. Writeback resolves these
+ /// variables to get the concrete type, which can be used to
+ /// 'de-opaque' OpaqueTypeDecl, after typeck is done with all functions.
pub opaque_types: OpaqueTypeMap<'tcx>,
}
diff --git a/compiler/rustc_infer/src/infer/outlives/mod.rs b/compiler/rustc_infer/src/infer/outlives/mod.rs
index 2d19d1823..4daa25767 100644
--- a/compiler/rustc_infer/src/infer/outlives/mod.rs
+++ b/compiler/rustc_infer/src/infer/outlives/mod.rs
@@ -19,19 +19,21 @@ pub fn explicit_outlives_bounds<'tcx>(
.map(ty::Predicate::kind)
.filter_map(ty::Binder::no_bound_vars)
.filter_map(move |kind| match kind {
- ty::PredicateKind::Projection(..)
- | ty::PredicateKind::Trait(..)
+ ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::Clause(ty::Clause::Trait(..))
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
- | ty::PredicateKind::TypeOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..))
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
- ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(r_a, r_b)) => {
- Some(OutlivesBound::RegionSubRegion(r_b, r_a))
- }
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate(
+ r_a,
+ r_b,
+ ))) => Some(OutlivesBound::RegionSubRegion(r_b, r_a)),
})
}
diff --git a/compiler/rustc_infer/src/infer/outlives/obligations.rs b/compiler/rustc_infer/src/infer/outlives/obligations.rs
index 6ca884799..abb46ce3b 100644
--- a/compiler/rustc_infer/src/infer/outlives/obligations.rs
+++ b/compiler/rustc_infer/src/infer/outlives/obligations.rs
@@ -68,6 +68,7 @@ use crate::infer::{
};
use crate::traits::{ObligationCause, ObligationCauseCode};
use rustc_data_structures::undo_log::UndoLogs;
+use rustc_errors::ErrorGuaranteed;
use rustc_hir::def_id::DefId;
use rustc_hir::def_id::LocalDefId;
use rustc_middle::mir::ConstraintCategory;
@@ -177,7 +178,7 @@ impl<'tcx> InferCtxt<'tcx> {
&self,
generic_param_scope: LocalDefId,
outlives_env: &OutlivesEnvironment<'tcx>,
- ) {
+ ) -> Option<ErrorGuaranteed> {
self.process_registered_region_obligations(
outlives_env.region_bound_pairs(),
outlives_env.param_env,
diff --git a/compiler/rustc_infer/src/infer/outlives/test_type_match.rs b/compiler/rustc_infer/src/infer/outlives/test_type_match.rs
index a5c21f0fb..10b474efd 100644
--- a/compiler/rustc_infer/src/infer/outlives/test_type_match.rs
+++ b/compiler/rustc_infer/src/infer/outlives/test_type_match.rs
@@ -136,6 +136,11 @@ impl<'tcx> TypeRelation<'tcx> for Match<'tcx> {
fn tag(&self) -> &'static str {
"Match"
}
+
+ fn intercrate(&self) -> bool {
+ false
+ }
+
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
@@ -146,14 +151,21 @@ impl<'tcx> TypeRelation<'tcx> for Match<'tcx> {
true
} // irrelevant
+ fn mark_ambiguous(&mut self) {
+ bug!()
+ }
+
+ #[instrument(level = "trace", skip(self))]
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
- _: ty::Variance,
+ variance: ty::Variance,
_: ty::VarianceDiagInfo<'tcx>,
a: T,
b: T,
) -> RelateResult<'tcx, T> {
- self.relate(a, b)
+ // Opaque types substs have lifetime parameters.
+ // We must not check them to be equal, as we never insert anything to make them so.
+ if variance != ty::Bivariant { self.relate(a, b) } else { Ok(a) }
}
#[instrument(skip(self), level = "debug")]
diff --git a/compiler/rustc_infer/src/infer/projection.rs b/compiler/rustc_infer/src/infer/projection.rs
index 9f12bc972..eb6deee29 100644
--- a/compiler/rustc_infer/src/infer/projection.rs
+++ b/compiler/rustc_infer/src/infer/projection.rs
@@ -1,5 +1,5 @@
use rustc_middle::traits::ObligationCause;
-use rustc_middle::ty::{self, ToPredicate, Ty};
+use rustc_middle::ty::{self, Ty};
use crate::traits::{Obligation, PredicateObligation};
@@ -28,12 +28,8 @@ impl<'tcx> InferCtxt<'tcx> {
});
let projection =
ty::Binder::dummy(ty::ProjectionPredicate { projection_ty, term: ty_var.into() });
- let obligation = Obligation::with_depth(
- cause,
- recursion_depth,
- param_env,
- projection.to_predicate(self.tcx),
- );
+ let obligation =
+ Obligation::with_depth(self.tcx, cause, recursion_depth, param_env, projection);
obligations.push(obligation);
ty_var
}
diff --git a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs
index 90858e307..22b4bbb17 100644
--- a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs
+++ b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs
@@ -1,6 +1,7 @@
use super::*;
use crate::infer::CombinedSnapshot;
use rustc_data_structures::{
+ fx::FxIndexMap,
graph::{scc::Sccs, vec_graph::VecGraph},
undo_log::UndoLogs,
};
@@ -371,7 +372,7 @@ rustc_index::newtype_index! {
/// an edge `R1 -> R2` in the graph.
struct MiniGraph<'tcx> {
/// Map from a region to the index of the node in the graph.
- nodes: FxHashMap<ty::Region<'tcx>, LeakCheckNode>,
+ nodes: FxIndexMap<ty::Region<'tcx>, LeakCheckNode>,
/// Map from node index to SCC, and stores the successors of each SCC. All
/// the regions in the same SCC are equal to one another, and if `S1 -> S2`,
@@ -388,7 +389,7 @@ impl<'tcx> MiniGraph<'tcx> {
where
'tcx: 'a,
{
- let mut nodes = FxHashMap::default();
+ let mut nodes = FxIndexMap::default();
let mut edges = Vec::new();
// Note that if `R2: R1`, we get a callback `r1, r2`, so `target` is first parameter.
@@ -438,7 +439,7 @@ impl<'tcx> MiniGraph<'tcx> {
}
fn add_node(
- nodes: &mut FxHashMap<ty::Region<'tcx>, LeakCheckNode>,
+ nodes: &mut FxIndexMap<ty::Region<'tcx>, LeakCheckNode>,
r: ty::Region<'tcx>,
) -> LeakCheckNode {
let l = nodes.len();
diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs
index 67b3da687..985c5d360 100644
--- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs
+++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs
@@ -7,7 +7,7 @@ use super::{
InferCtxtUndoLogs, MiscVariable, RegionVariableOrigin, Rollback, Snapshot, SubregionOrigin,
};
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_data_structures::intern::Interned;
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::undo_log::UndoLogs;
@@ -125,7 +125,7 @@ pub struct RegionConstraintData<'tcx> {
/// we record the fact that `'a <= 'b` is implied by the fn
/// signature, and then ignore the constraint when solving
/// equations. This is a bit of a hack but seems to work.
- pub givens: FxHashSet<(Region<'tcx>, ty::RegionVid)>,
+ pub givens: FxIndexSet<(Region<'tcx>, ty::RegionVid)>,
}
/// Represents a constraint that influences the inference process.
diff --git a/compiler/rustc_infer/src/infer/resolve.rs b/compiler/rustc_infer/src/infer/resolve.rs
index 4db4ff238..8671f8d45 100644
--- a/compiler/rustc_infer/src/infer/resolve.rs
+++ b/compiler/rustc_infer/src/infer/resolve.rs
@@ -1,5 +1,6 @@
use super::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use super::{FixupError, FixupResult, InferCtxt, Span};
+use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
use rustc_middle::ty::fold::{FallibleTypeFolder, TypeFolder, TypeSuperFoldable};
use rustc_middle::ty::visit::{TypeSuperVisitable, TypeVisitor};
use rustc_middle::ty::{self, Const, InferConst, Ty, TyCtxt, TypeFoldable, TypeVisitable};
@@ -110,48 +111,77 @@ impl<'a, 'tcx> TypeFolder<'tcx> for OpportunisticRegionResolver<'a, 'tcx> {
/// type variables that don't yet have a value. The first unresolved type is stored.
/// It does not construct the fully resolved type (which might
/// involve some hashing and so forth).
-pub struct UnresolvedTypeFinder<'a, 'tcx> {
+pub struct UnresolvedTypeOrConstFinder<'a, 'tcx> {
infcx: &'a InferCtxt<'tcx>,
}
-impl<'a, 'tcx> UnresolvedTypeFinder<'a, 'tcx> {
+impl<'a, 'tcx> UnresolvedTypeOrConstFinder<'a, 'tcx> {
pub fn new(infcx: &'a InferCtxt<'tcx>) -> Self {
- UnresolvedTypeFinder { infcx }
+ UnresolvedTypeOrConstFinder { infcx }
}
}
-impl<'a, 'tcx> TypeVisitor<'tcx> for UnresolvedTypeFinder<'a, 'tcx> {
- type BreakTy = (Ty<'tcx>, Option<Span>);
+impl<'a, 'tcx> TypeVisitor<'tcx> for UnresolvedTypeOrConstFinder<'a, 'tcx> {
+ type BreakTy = (ty::Term<'tcx>, Option<Span>);
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
let t = self.infcx.shallow_resolve(t);
- if t.has_infer_types() {
- if let ty::Infer(infer_ty) = *t.kind() {
- // Since we called `shallow_resolve` above, this must
- // be an (as yet...) unresolved inference variable.
- let ty_var_span = if let ty::TyVar(ty_vid) = infer_ty {
- let mut inner = self.infcx.inner.borrow_mut();
- let ty_vars = &inner.type_variables();
- if let TypeVariableOrigin {
- kind: TypeVariableOriginKind::TypeParameterDefinition(_, _),
- span,
- } = *ty_vars.var_origin(ty_vid)
- {
- Some(span)
- } else {
- None
- }
+ if let ty::Infer(infer_ty) = *t.kind() {
+ // Since we called `shallow_resolve` above, this must
+ // be an (as yet...) unresolved inference variable.
+ let ty_var_span = if let ty::TyVar(ty_vid) = infer_ty {
+ let mut inner = self.infcx.inner.borrow_mut();
+ let ty_vars = &inner.type_variables();
+ if let TypeVariableOrigin {
+ kind: TypeVariableOriginKind::TypeParameterDefinition(_, _),
+ span,
+ } = *ty_vars.var_origin(ty_vid)
+ {
+ Some(span)
} else {
None
- };
- ControlFlow::Break((t, ty_var_span))
+ }
} else {
- // Otherwise, visit its contents.
- t.super_visit_with(self)
- }
+ None
+ };
+ ControlFlow::Break((t.into(), ty_var_span))
+ } else if !t.has_non_region_infer() {
+ // All const/type variables in inference types must already be resolved,
+ // no need to visit the contents.
+ ControlFlow::CONTINUE
} else {
- // All type variables in inference types must already be resolved,
- // - no need to visit the contents, continue visiting.
+ // Otherwise, keep visiting.
+ t.super_visit_with(self)
+ }
+ }
+
+ fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
+ let ct = self.infcx.shallow_resolve(ct);
+ if let ty::ConstKind::Infer(i) = ct.kind() {
+ // Since we called `shallow_resolve` above, this must
+ // be an (as yet...) unresolved inference variable.
+ let ct_var_span = if let ty::InferConst::Var(vid) = i {
+ let mut inner = self.infcx.inner.borrow_mut();
+ let ct_vars = &mut inner.const_unification_table();
+ if let ConstVariableOrigin {
+ span,
+ kind: ConstVariableOriginKind::ConstParameterDefinition(_, _),
+ } = ct_vars.probe_value(vid).origin
+ {
+ Some(span)
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+ ControlFlow::Break((ct.into(), ct_var_span))
+ } else if !ct.has_non_region_infer() {
+ // All const/type variables in inference types must already be resolved,
+ // no need to visit the contents.
ControlFlow::CONTINUE
+ } else {
+ // Otherwise, keep visiting.
+ ct.super_visit_with(self)
}
}
}
diff --git a/compiler/rustc_infer/src/infer/sub.rs b/compiler/rustc_infer/src/infer/sub.rs
index 97354ba5d..2c6987cc3 100644
--- a/compiler/rustc_infer/src/infer/sub.rs
+++ b/compiler/rustc_infer/src/infer/sub.rs
@@ -6,7 +6,7 @@ use crate::traits::Obligation;
use rustc_middle::ty::relate::{Cause, Relate, RelateResult, TypeRelation};
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::TyVar;
-use rustc_middle::ty::{self, ToPredicate, Ty, TyCtxt};
+use rustc_middle::ty::{self, Ty, TyCtxt};
use std::mem;
/// Ensures `a` is made a subtype of `b`. Returns `a` on success.
@@ -35,6 +35,11 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
fn tag(&self) -> &'static str {
"Sub"
}
+
+ fn intercrate(&self) -> bool {
+ self.fields.infcx.intercrate
+ }
+
fn tcx(&self) -> TyCtxt<'tcx> {
self.fields.infcx.tcx
}
@@ -47,6 +52,10 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
self.a_is_expected
}
+ fn mark_ambiguous(&mut self) {
+ self.fields.mark_ambiguous()
+ }
+
fn with_cause<F, R>(&mut self, cause: Cause, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
@@ -95,14 +104,14 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
// can't make progress on `A <: B` if both A and B are
// type variables, so record an obligation.
self.fields.obligations.push(Obligation::new(
+ self.tcx(),
self.fields.trace.cause.clone(),
self.fields.param_env,
ty::Binder::dummy(ty::PredicateKind::Subtype(ty::SubtypePredicate {
a_is_expected: self.a_is_expected,
a,
b,
- }))
- .to_predicate(self.tcx()),
+ })),
));
Ok(a)
@@ -116,9 +125,9 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
Ok(a)
}
- (&ty::Error(_), _) | (_, &ty::Error(_)) => {
- infcx.set_tainted_by_errors();
- Ok(self.tcx().ty_error())
+ (&ty::Error(e), _) | (_, &ty::Error(e)) => {
+ infcx.set_tainted_by_errors(e);
+ Ok(self.tcx().ty_error_with_guaranteed(e))
}
(&ty::Opaque(a_def_id, _), &ty::Opaque(b_def_id, _)) if a_def_id == b_def_id => {
diff --git a/compiler/rustc_infer/src/lib.rs b/compiler/rustc_infer/src/lib.rs
index e040634ed..4c119a443 100644
--- a/compiler/rustc_infer/src/lib.rs
+++ b/compiler/rustc_infer/src/lib.rs
@@ -12,7 +12,6 @@
//!
//! This API is completely unstable and subject to change.
-#![allow(rustc::potential_query_instability)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(box_patterns)]
#![feature(control_flow_enum)]
diff --git a/compiler/rustc_infer/src/traits/engine.rs b/compiler/rustc_infer/src/traits/engine.rs
index b2b985a22..d3519f4b3 100644
--- a/compiler/rustc_infer/src/traits/engine.rs
+++ b/compiler/rustc_infer/src/traits/engine.rs
@@ -8,14 +8,6 @@ use super::FulfillmentError;
use super::{ObligationCause, PredicateObligation};
pub trait TraitEngine<'tcx>: 'tcx {
- fn normalize_projection_type(
- &mut self,
- infcx: &InferCtxt<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- projection_ty: ty::ProjectionTy<'tcx>,
- cause: ObligationCause<'tcx>,
- ) -> Ty<'tcx>;
-
/// Requires that `ty` must implement the trait with `def_id` in
/// the given environment. This trait must not have any type
/// parameters (except for `Self`).
@@ -27,7 +19,7 @@ pub trait TraitEngine<'tcx>: 'tcx {
def_id: DefId,
cause: ObligationCause<'tcx>,
) {
- let trait_ref = ty::TraitRef { def_id, substs: infcx.tcx.mk_substs_trait(ty, &[]) };
+ let trait_ref = infcx.tcx.mk_trait_ref(def_id, [ty]);
self.register_predicate_obligation(
infcx,
Obligation {
diff --git a/compiler/rustc_infer/src/traits/error_reporting/mod.rs b/compiler/rustc_infer/src/traits/error_reporting/mod.rs
index f8b5009a5..4d5351958 100644
--- a/compiler/rustc_infer/src/traits/error_reporting/mod.rs
+++ b/compiler/rustc_infer/src/traits/error_reporting/mod.rs
@@ -1,7 +1,7 @@
use super::ObjectSafetyViolation;
use crate::infer::InferCtxt;
-use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::{struct_span_err, DiagnosticBuilder, ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
@@ -56,7 +56,7 @@ pub fn report_object_safety_error<'tcx>(
);
err.span_label(span, format!("`{}` cannot be made into an object", trait_str));
- let mut reported_violations = FxHashSet::default();
+ let mut reported_violations = FxIndexSet::default();
let mut multi_span = vec![];
let mut messages = vec![];
for violation in violations {
diff --git a/compiler/rustc_infer/src/traits/mod.rs b/compiler/rustc_infer/src/traits/mod.rs
index c8600ded9..026713b6a 100644
--- a/compiler/rustc_infer/src/traits/mod.rs
+++ b/compiler/rustc_infer/src/traits/mod.rs
@@ -10,7 +10,7 @@ pub mod util;
use rustc_hir as hir;
use rustc_middle::ty::error::{ExpectedFound, TypeError};
-use rustc_middle::ty::{self, Const, Ty, TyCtxt};
+use rustc_middle::ty::{self, Const, ToPredicate, Ty, TyCtxt};
use rustc_span::Span;
pub use self::FulfillmentErrorCode::*;
@@ -70,8 +70,8 @@ impl<'tcx> PredicateObligation<'tcx> {
pub fn without_const(mut self, tcx: TyCtxt<'tcx>) -> PredicateObligation<'tcx> {
self.param_env = self.param_env.without_const();
- if let ty::PredicateKind::Trait(trait_pred) = self.predicate.kind().skip_binder() && trait_pred.is_const_if_const() {
- self.predicate = tcx.mk_predicate(self.predicate.kind().map_bound(|_| ty::PredicateKind::Trait(trait_pred.without_const())));
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) = self.predicate.kind().skip_binder() && trait_pred.is_const_if_const() {
+ self.predicate = tcx.mk_predicate(self.predicate.kind().map_bound(|_| ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred.without_const()))));
}
self
}
@@ -124,38 +124,41 @@ pub enum FulfillmentErrorCode<'tcx> {
impl<'tcx, O> Obligation<'tcx, O> {
pub fn new(
+ tcx: TyCtxt<'tcx>,
cause: ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
- predicate: O,
+ predicate: impl ToPredicate<'tcx, O>,
) -> Obligation<'tcx, O> {
- Obligation { cause, param_env, recursion_depth: 0, predicate }
+ Self::with_depth(tcx, cause, 0, param_env, predicate)
}
pub fn with_depth(
+ tcx: TyCtxt<'tcx>,
cause: ObligationCause<'tcx>,
recursion_depth: usize,
param_env: ty::ParamEnv<'tcx>,
- predicate: O,
+ predicate: impl ToPredicate<'tcx, O>,
) -> Obligation<'tcx, O> {
+ let predicate = predicate.to_predicate(tcx);
Obligation { cause, param_env, recursion_depth, predicate }
}
pub fn misc(
+ tcx: TyCtxt<'tcx>,
span: Span,
body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
- trait_ref: O,
+ trait_ref: impl ToPredicate<'tcx, O>,
) -> Obligation<'tcx, O> {
- Obligation::new(ObligationCause::misc(span, body_id), param_env, trait_ref)
+ Obligation::new(tcx, ObligationCause::misc(span, body_id), param_env, trait_ref)
}
- pub fn with<P>(&self, value: P) -> Obligation<'tcx, P> {
- Obligation {
- cause: self.cause.clone(),
- param_env: self.param_env,
- recursion_depth: self.recursion_depth,
- predicate: value,
- }
+ pub fn with<P>(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ value: impl ToPredicate<'tcx, P>,
+ ) -> Obligation<'tcx, P> {
+ Obligation::with_depth(tcx, self.cause.clone(), self.recursion_depth, self.param_env, value)
}
}
diff --git a/compiler/rustc_infer/src/traits/util.rs b/compiler/rustc_infer/src/traits/util.rs
index e12c069dc..512e6079f 100644
--- a/compiler/rustc_infer/src/traits/util.rs
+++ b/compiler/rustc_infer/src/traits/util.rs
@@ -141,7 +141,7 @@ impl<'tcx> Elaborator<'tcx> {
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(data) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => {
// Get predicates declared on the trait.
let predicates = tcx.super_predicates_of(data.def_id());
@@ -184,7 +184,7 @@ impl<'tcx> Elaborator<'tcx> {
// Currently, we do not "elaborate" predicates like `X -> Y`,
// though conceivably we might.
}
- ty::PredicateKind::Projection(..) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(..)) => {
// Nothing to elaborate in a projection predicate.
}
ty::PredicateKind::ClosureKind(..) => {
@@ -198,10 +198,13 @@ impl<'tcx> Elaborator<'tcx> {
// Currently, we do not elaborate const-equate
// predicates.
}
- ty::PredicateKind::RegionOutlives(..) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) => {
// Nothing to elaborate from `'a: 'b`.
}
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_max, r_min)) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty_max,
+ r_min,
+ ))) => {
// We know that `T: 'a` for some type `T`. We can
// often elaborate this. For example, if we know that
// `[U]: 'a`, that implies that `U: 'a`. Similarly, if
@@ -231,16 +234,16 @@ impl<'tcx> Elaborator<'tcx> {
if r.is_late_bound() {
None
} else {
- Some(ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(
- r, r_min,
+ Some(ty::PredicateKind::Clause(ty::Clause::RegionOutlives(
+ ty::OutlivesPredicate(r, r_min),
)))
}
}
Component::Param(p) => {
let ty = tcx.mk_ty_param(p.index, p.name);
- Some(ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(
- ty, r_min,
+ Some(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
+ ty::OutlivesPredicate(ty, r_min),
)))
}
@@ -248,8 +251,8 @@ impl<'tcx> Elaborator<'tcx> {
Component::Opaque(def_id, substs) => {
let ty = tcx.mk_opaque(def_id, substs);
- Some(ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(
- ty, r_min,
+ Some(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
+ ty::OutlivesPredicate(ty, r_min),
)))
}
@@ -258,8 +261,8 @@ impl<'tcx> Elaborator<'tcx> {
// With this, we can deduce that `<Bar as Baz>::Assoc: 'a`.
let ty =
tcx.mk_projection(projection.item_def_id, projection.substs);
- Some(ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(
- ty, r_min,
+ Some(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
+ ty::OutlivesPredicate(ty, r_min),
)))
}
@@ -285,6 +288,7 @@ impl<'tcx> Elaborator<'tcx> {
ty::PredicateKind::TypeWellFormedFromEnv(..) => {
// Nothing to elaborate
}
+ ty::PredicateKind::Ambiguous => {}
}
}
}
diff --git a/compiler/rustc_interface/Cargo.toml b/compiler/rustc_interface/Cargo.toml
index 6a4c5b4d3..e67dec31d 100644
--- a/compiler/rustc_interface/Cargo.toml
+++ b/compiler/rustc_interface/Cargo.toml
@@ -48,15 +48,9 @@ rustc_resolve = { path = "../rustc_resolve" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_ty_utils = { path = "../rustc_ty_utils" }
-[target.'cfg(unix)'.dependencies]
-libc = "0.2"
-
-[target.'cfg(windows)'.dependencies]
-winapi = { version = "0.3", features = ["libloaderapi"] }
-
[dev-dependencies]
rustc_target = { path = "../rustc_target" }
[features]
llvm = ['rustc_codegen_llvm']
-rustc_use_parallel_compiler = ['rayon', 'rustc-rayon-core', 'rustc_query_impl/rustc_use_parallel_compiler']
+rustc_use_parallel_compiler = ['rayon', 'rustc-rayon-core', 'rustc_query_impl/rustc_use_parallel_compiler', 'rustc_errors/rustc_use_parallel_compiler']
diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs
index 89aaa0b95..4c22ab68a 100644
--- a/compiler/rustc_interface/src/interface.rs
+++ b/compiler/rustc_interface/src/interface.rs
@@ -194,7 +194,7 @@ pub fn parse_check_cfg(specs: Vec<String>) -> CheckCfg {
for val in values {
if let Some(LitKind::Str(s, _)) =
- val.literal().map(|lit| &lit.kind)
+ val.lit().map(|lit| &lit.kind)
{
ident_values.insert(s.to_string());
} else {
@@ -304,7 +304,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
parse_sess_created(&mut sess.parse_sess);
}
- let temps_dir = sess.opts.unstable_opts.temps_dir.as_ref().map(|o| PathBuf::from(&o));
+ let temps_dir = sess.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
let compiler = Compiler {
sess: Lrc::new(sess),
diff --git a/compiler/rustc_interface/src/lib.rs b/compiler/rustc_interface/src/lib.rs
index a41a749ee..542b638bb 100644
--- a/compiler/rustc_interface/src/lib.rs
+++ b/compiler/rustc_interface/src/lib.rs
@@ -1,4 +1,5 @@
#![feature(box_patterns)]
+#![feature(decl_macro)]
#![feature(internal_output_capture)]
#![feature(thread_spawn_unchecked)]
#![feature(once_cell)]
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
index 7f1d21bf1..f808c1438 100644
--- a/compiler/rustc_interface/src/passes.rs
+++ b/compiler/rustc_interface/src/passes.rs
@@ -12,6 +12,7 @@ use rustc_ast::{self as ast, visit};
use rustc_borrowck as mir_borrowck;
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_data_structures::parallel;
+use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
use rustc_errors::{ErrorGuaranteed, PResult};
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
@@ -157,7 +158,7 @@ pub fn create_resolver(
sess: Lrc<Session>,
metadata_loader: Box<MetadataLoaderDyn>,
krate: &ast::Crate,
- crate_name: &str,
+ crate_name: Symbol,
) -> BoxedResolver {
trace!("create_resolver");
BoxedResolver::new(sess, move |sess, resolver_arenas| {
@@ -170,7 +171,7 @@ pub fn register_plugins<'a>(
metadata_loader: &'a dyn MetadataLoader,
register_lints: impl Fn(&Session, &mut LintStore),
mut krate: ast::Crate,
- crate_name: &str,
+ crate_name: Symbol,
) -> Result<(ast::Crate, LintStore)> {
krate = sess.time("attributes_injection", || {
rustc_builtin_macros::cmdline_attrs::inject(
@@ -207,10 +208,7 @@ pub fn register_plugins<'a>(
});
}
- let mut lint_store = rustc_lint::new_lint_store(
- sess.opts.unstable_opts.no_interleave_lints,
- sess.enable_internal_lints(),
- );
+ let mut lint_store = rustc_lint::new_lint_store(sess.enable_internal_lints());
register_lints(sess, &mut lint_store);
let registrars =
@@ -230,19 +228,21 @@ fn pre_expansion_lint<'a>(
lint_store: &LintStore,
registered_tools: &RegisteredTools,
check_node: impl EarlyCheckNode<'a>,
- node_name: &str,
+ node_name: Symbol,
) {
- sess.prof.generic_activity_with_arg("pre_AST_expansion_lint_checks", node_name).run(|| {
- rustc_lint::check_ast_node(
- sess,
- true,
- lint_store,
- registered_tools,
- None,
- rustc_lint::BuiltinCombinedPreExpansionLintPass::new(),
- check_node,
- );
- });
+ sess.prof.generic_activity_with_arg("pre_AST_expansion_lint_checks", node_name.as_str()).run(
+ || {
+ rustc_lint::check_ast_node(
+ sess,
+ true,
+ lint_store,
+ registered_tools,
+ None,
+ rustc_lint::BuiltinCombinedPreExpansionLintPass::new(),
+ check_node,
+ );
+ },
+ );
}
// Cannot implement directly for `LintStore` due to trait coherence.
@@ -256,7 +256,7 @@ impl LintStoreExpand for LintStoreExpandImpl<'_> {
node_id: ast::NodeId,
attrs: &[ast::Attribute],
items: &[rustc_ast::ptr::P<ast::Item>],
- name: &str,
+ name: Symbol,
) {
pre_expansion_lint(sess, self.0, registered_tools, (node_id, attrs, items), name);
}
@@ -270,7 +270,7 @@ pub fn configure_and_expand(
sess: &Session,
lint_store: &LintStore,
mut krate: ast::Crate,
- crate_name: &str,
+ crate_name: Symbol,
resolver: &mut Resolver<'_>,
) -> Result<ast::Crate> {
trace!("configure_and_expand");
@@ -464,7 +464,7 @@ fn generated_output_paths(
sess: &Session,
outputs: &OutputFilenames,
exact_name: bool,
- crate_name: &str,
+ crate_name: Symbol,
) -> Vec<PathBuf> {
let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.keys() {
@@ -663,7 +663,7 @@ pub fn prepare_outputs(
compiler: &Compiler,
krate: &ast::Crate,
boxed_resolver: &RefCell<BoxedResolver>,
- crate_name: &str,
+ crate_name: Symbol,
) -> Result<OutputFilenames> {
let _timer = sess.timer("prepare_outputs");
@@ -773,7 +773,7 @@ pub fn create_global_ctxt<'tcx>(
dep_graph: DepGraph,
resolver: Rc<RefCell<BoxedResolver>>,
outputs: OutputFilenames,
- crate_name: &str,
+ crate_name: Symbol,
queries: &'tcx OnceCell<TcxQueries<'tcx>>,
global_ctxt: &'tcx OnceCell<GlobalCtxt<'tcx>>,
arena: &'tcx WorkerLocal<Arena<'tcx>>,
@@ -804,6 +804,12 @@ pub fn create_global_ctxt<'tcx>(
TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache)
});
+ let ty::ResolverOutputs {
+ definitions,
+ global_ctxt: untracked_resolutions,
+ ast_lowering: untracked_resolver_for_lowering,
+ } = resolver_outputs;
+
let gcx = sess.time("setup_global_ctxt", || {
global_ctxt.get_or_init(move || {
TyCtxt::create_global_ctxt(
@@ -811,7 +817,8 @@ pub fn create_global_ctxt<'tcx>(
lint_store,
arena,
hir_arena,
- resolver_outputs,
+ definitions,
+ untracked_resolutions,
krate,
dep_graph,
queries.on_disk_cache.as_ref().map(OnDiskCache::as_dyn),
@@ -823,7 +830,12 @@ pub fn create_global_ctxt<'tcx>(
})
});
- QueryContext { gcx }
+ let mut qcx = QueryContext { gcx };
+ qcx.enter(|tcx| {
+ tcx.feed_unit_query()
+ .resolver_for_lowering(tcx.arena.alloc(Steal::new(untracked_resolver_for_lowering)))
+ });
+ qcx
}
/// Runs the resolution, type-checking, region checking and other
@@ -968,12 +980,10 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
pub fn start_codegen<'tcx>(
codegen_backend: &dyn CodegenBackend,
tcx: TyCtxt<'tcx>,
- outputs: &OutputFilenames,
) -> Box<dyn Any> {
info!("Pre-codegen\n{:?}", tcx.debug_stats());
- let (metadata, need_metadata_module) =
- rustc_metadata::fs::encode_and_write_metadata(tcx, outputs);
+ let (metadata, need_metadata_module) = rustc_metadata::fs::encode_and_write_metadata(tcx);
let codegen = tcx.sess.time("codegen_crate", move || {
codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
@@ -989,7 +999,7 @@ pub fn start_codegen<'tcx>(
info!("Post-codegen\n{:?}", tcx.debug_stats());
if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
- if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx, outputs) {
+ if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx) {
tcx.sess.emit_err(CantEmitMIR { error });
tcx.sess.abort_if_errors();
}
diff --git a/compiler/rustc_interface/src/proc_macro_decls.rs b/compiler/rustc_interface/src/proc_macro_decls.rs
index 4c236c693..9bf7778bf 100644
--- a/compiler/rustc_interface/src/proc_macro_decls.rs
+++ b/compiler/rustc_interface/src/proc_macro_decls.rs
@@ -4,21 +4,16 @@ use rustc_middle::ty::TyCtxt;
use rustc_span::symbol::sym;
fn proc_macro_decls_static(tcx: TyCtxt<'_>, (): ()) -> Option<LocalDefId> {
- let mut finder = Finder { tcx, decls: None };
+ let mut decls = None;
for id in tcx.hir().items() {
- let attrs = finder.tcx.hir().attrs(id.hir_id());
- if finder.tcx.sess.contains_name(attrs, sym::rustc_proc_macro_decls) {
- finder.decls = Some(id.owner_id.def_id);
+ let attrs = tcx.hir().attrs(id.hir_id());
+ if tcx.sess.contains_name(attrs, sym::rustc_proc_macro_decls) {
+ decls = Some(id.owner_id.def_id);
}
}
- finder.decls
-}
-
-struct Finder<'tcx> {
- tcx: TyCtxt<'tcx>,
- decls: Option<LocalDefId>,
+ decls
}
pub(crate) fn provide(providers: &mut Providers) {
diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs
index 91d180e1e..39e1f2204 100644
--- a/compiler/rustc_interface/src/queries.rs
+++ b/compiler/rustc_interface/src/queries.rs
@@ -17,9 +17,11 @@ use rustc_query_impl::Queries as TcxQueries;
use rustc_session::config::{self, OutputFilenames, OutputType};
use rustc_session::{output::find_crate_name, Session};
use rustc_span::symbol::sym;
+use rustc_span::Symbol;
use std::any::Any;
use std::cell::{Ref, RefCell, RefMut};
use std::rc::Rc;
+use std::sync::Arc;
/// Represent the result of a query.
///
@@ -33,11 +35,7 @@ pub struct Query<T> {
impl<T> Query<T> {
fn compute<F: FnOnce() -> Result<T>>(&self, f: F) -> Result<&Query<T>> {
- let mut result = self.result.borrow_mut();
- if result.is_none() {
- *result = Some(f());
- }
- result.as_ref().unwrap().as_ref().map(|_| self).map_err(|err| *err)
+ self.result.borrow_mut().get_or_insert_with(f).as_ref().map(|_| self).map_err(|&err| err)
}
/// Takes ownership of the query result. Further attempts to take or peek the query
@@ -77,7 +75,7 @@ pub struct Queries<'tcx> {
dep_graph_future: Query<Option<DepGraphFuture>>,
parse: Query<ast::Crate>,
- crate_name: Query<String>,
+ crate_name: Query<Symbol>,
register_plugins: Query<(ast::Crate, Lrc<LintStore>)>,
expansion: Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>,
dep_graph: Query<DepGraph>,
@@ -138,7 +136,7 @@ impl<'tcx> Queries<'tcx> {
&*self.codegen_backend().metadata_loader(),
self.compiler.register_lints.as_deref().unwrap_or_else(|| empty),
krate,
- &crate_name,
+ crate_name,
)?;
// Compute the dependency graph (in the background). We want to do
@@ -152,7 +150,7 @@ impl<'tcx> Queries<'tcx> {
})
}
- pub fn crate_name(&self) -> Result<&Query<String>> {
+ pub fn crate_name(&self) -> Result<&Query<Symbol>> {
self.crate_name.compute(|| {
Ok({
let parse_result = self.parse()?;
@@ -168,7 +166,7 @@ impl<'tcx> Queries<'tcx> {
) -> Result<&Query<(Lrc<ast::Crate>, Rc<RefCell<BoxedResolver>>, Lrc<LintStore>)>> {
trace!("expansion");
self.expansion.compute(|| {
- let crate_name = self.crate_name()?.peek().clone();
+ let crate_name = *self.crate_name()?.peek();
let (krate, lint_store) = self.register_plugins()?.take();
let _timer = self.session().timer("configure_and_expand");
let sess = self.session();
@@ -176,10 +174,10 @@ impl<'tcx> Queries<'tcx> {
sess.clone(),
self.codegen_backend().metadata_loader(),
&krate,
- &crate_name,
+ crate_name,
);
let krate = resolver.access(|resolver| {
- passes::configure_and_expand(sess, &lint_store, krate, &crate_name, resolver)
+ passes::configure_and_expand(sess, &lint_store, krate, crate_name, resolver)
})?;
Ok((Lrc::new(krate), Rc::new(RefCell::new(resolver)), lint_store))
})
@@ -204,21 +202,21 @@ impl<'tcx> Queries<'tcx> {
pub fn prepare_outputs(&self) -> Result<&Query<OutputFilenames>> {
self.prepare_outputs.compute(|| {
let (krate, boxed_resolver, _) = &*self.expansion()?.peek();
- let crate_name = self.crate_name()?.peek();
+ let crate_name = *self.crate_name()?.peek();
passes::prepare_outputs(
self.session(),
self.compiler,
krate,
&*boxed_resolver,
- &crate_name,
+ crate_name,
)
})
}
pub fn global_ctxt(&'tcx self) -> Result<&Query<QueryContext<'tcx>>> {
self.global_ctxt.compute(|| {
- let crate_name = self.crate_name()?.peek().clone();
- let outputs = self.prepare_outputs()?.peek().clone();
+ let crate_name = *self.crate_name()?.peek();
+ let outputs = self.prepare_outputs()?.take();
let dep_graph = self.dep_graph()?.peek().clone();
let (krate, resolver, lint_store) = self.expansion()?.take();
Ok(passes::create_global_ctxt(
@@ -228,7 +226,7 @@ impl<'tcx> Queries<'tcx> {
dep_graph,
resolver,
outputs,
- &crate_name,
+ crate_name,
&self.queries,
&self.gcx,
&self.arena,
@@ -239,7 +237,6 @@ impl<'tcx> Queries<'tcx> {
pub fn ongoing_codegen(&'tcx self) -> Result<&Query<Box<dyn Any>>> {
self.ongoing_codegen.compute(|| {
- let outputs = self.prepare_outputs()?;
self.global_ctxt()?.peek_mut().enter(|tcx| {
tcx.analysis(()).ok();
@@ -253,7 +250,7 @@ impl<'tcx> Queries<'tcx> {
// Hook for UI tests.
Self::check_for_rustc_errors_attr(tcx);
- Ok(passes::start_codegen(&***self.codegen_backend(), tcx, &*outputs.peek()))
+ Ok(passes::start_codegen(&***self.codegen_backend(), tcx))
})
})
}
@@ -297,8 +294,10 @@ impl<'tcx> Queries<'tcx> {
let codegen_backend = self.codegen_backend().clone();
let dep_graph = self.dep_graph()?.peek().clone();
- let prepare_outputs = self.prepare_outputs()?.take();
- let crate_hash = self.global_ctxt()?.peek_mut().enter(|tcx| tcx.crate_hash(LOCAL_CRATE));
+ let (crate_hash, prepare_outputs) = self
+ .global_ctxt()?
+ .peek_mut()
+ .enter(|tcx| (tcx.crate_hash(LOCAL_CRATE), tcx.output_filenames(()).clone()));
let ongoing_codegen = self.ongoing_codegen()?.take();
Ok(Linker {
@@ -320,7 +319,7 @@ pub struct Linker {
// compilation outputs
dep_graph: DepGraph,
- prepare_outputs: OutputFilenames,
+ prepare_outputs: Arc<OutputFilenames>,
crate_hash: Svh,
ongoing_codegen: Box<dyn Any>,
}
diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs
index eb8e65a6d..2b8f6557c 100644
--- a/compiler/rustc_interface/src/tests.rs
+++ b/compiler/rustc_interface/src/tests.rs
@@ -666,7 +666,6 @@ fn test_unstable_options_tracking_hash() {
untracked!(mir_pretty_relative_line_numbers, true);
untracked!(nll_facts, true);
untracked!(no_analysis, true);
- untracked!(no_interleave_lints, true);
untracked!(no_leak_check, true);
untracked!(no_parallel_llvm, true);
untracked!(parse_only, true);
@@ -690,6 +689,7 @@ fn test_unstable_options_tracking_hash() {
untracked!(time_llvm_passes, true);
untracked!(time_passes, true);
untracked!(trace_macros, true);
+ untracked!(track_diagnostics, true);
untracked!(trim_diagnostic_paths, false);
untracked!(ui_testing, true);
untracked!(unpretty, Some("expanded".to_string()));
@@ -747,6 +747,7 @@ fn test_unstable_options_tracking_hash() {
tracked!(link_only, true);
tracked!(llvm_plugins, vec![String::from("plugin_name")]);
tracked!(location_detail, LocationDetail { file: true, line: false, column: false });
+ tracked!(maximal_hir_to_mir_coverage, true);
tracked!(merge_functions, Some(MergeFunctions::Disabled));
tracked!(mir_emit_retag, true);
tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]);
diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs
index 519b8a7fc..4142964a0 100644
--- a/compiler/rustc_interface/src/util.rs
+++ b/compiler/rustc_interface/src/util.rs
@@ -9,6 +9,7 @@ use rustc_session as session;
use rustc_session::config::CheckCfg;
use rustc_session::config::{self, CrateType};
use rustc_session::config::{ErrorOutputType, Input, OutputFilenames};
+use rustc_session::filesearch::sysroot_candidates;
use rustc_session::lint::{self, BuiltinLintDiagnostics, LintBuffer};
use rustc_session::parse::CrateConfig;
use rustc_session::{early_error, filesearch, output, Session};
@@ -67,10 +68,7 @@ pub fn create_session(
let codegen_backend = if let Some(make_codegen_backend) = make_codegen_backend {
make_codegen_backend(&sopts)
} else {
- get_codegen_backend(
- &sopts.maybe_sysroot,
- sopts.unstable_opts.codegen_backend.as_ref().map(|name| &name[..]),
- )
+ get_codegen_backend(&sopts.maybe_sysroot, sopts.unstable_opts.codegen_backend.as_deref())
};
// target_override is documented to be called before init(), so this is okay
@@ -78,7 +76,7 @@ pub fn create_session(
let bundle = match rustc_errors::fluent_bundle(
sopts.maybe_sysroot.clone(),
- sysroot_candidates(),
+ sysroot_candidates().to_vec(),
sopts.unstable_opts.translate_lang.clone(),
sopts.unstable_opts.translate_additional_ftl.as_deref(),
sopts.unstable_opts.translate_directionality_markers,
@@ -259,7 +257,7 @@ pub fn rustc_path<'a>() -> Option<&'a Path> {
const BIN_PATH: &str = env!("RUSTC_INSTALL_BINDIR");
- RUSTC_PATH.get_or_init(|| get_rustc_path_inner(BIN_PATH)).as_ref().map(|v| &**v)
+ RUSTC_PATH.get_or_init(|| get_rustc_path_inner(BIN_PATH)).as_deref()
}
fn get_rustc_path_inner(bin_path: &str) -> Option<PathBuf> {
@@ -273,100 +271,6 @@ fn get_rustc_path_inner(bin_path: &str) -> Option<PathBuf> {
})
}
-fn sysroot_candidates() -> Vec<PathBuf> {
- let target = session::config::host_triple();
- let mut sysroot_candidates = vec![filesearch::get_or_default_sysroot()];
- let path = current_dll_path().and_then(|s| s.canonicalize().ok());
- if let Some(dll) = path {
- // use `parent` twice to chop off the file name and then also the
- // directory containing the dll which should be either `lib` or `bin`.
- if let Some(path) = dll.parent().and_then(|p| p.parent()) {
- // The original `path` pointed at the `rustc_driver` crate's dll.
- // Now that dll should only be in one of two locations. The first is
- // in the compiler's libdir, for example `$sysroot/lib/*.dll`. The
- // other is the target's libdir, for example
- // `$sysroot/lib/rustlib/$target/lib/*.dll`.
- //
- // We don't know which, so let's assume that if our `path` above
- // ends in `$target` we *could* be in the target libdir, and always
- // assume that we may be in the main libdir.
- sysroot_candidates.push(path.to_owned());
-
- if path.ends_with(target) {
- sysroot_candidates.extend(
- path.parent() // chop off `$target`
- .and_then(|p| p.parent()) // chop off `rustlib`
- .and_then(|p| p.parent()) // chop off `lib`
- .map(|s| s.to_owned()),
- );
- }
- }
- }
-
- return sysroot_candidates;
-
- #[cfg(unix)]
- fn current_dll_path() -> Option<PathBuf> {
- use std::ffi::{CStr, OsStr};
- use std::os::unix::prelude::*;
-
- unsafe {
- let addr = current_dll_path as usize as *mut _;
- let mut info = mem::zeroed();
- if libc::dladdr(addr, &mut info) == 0 {
- info!("dladdr failed");
- return None;
- }
- if info.dli_fname.is_null() {
- info!("dladdr returned null pointer");
- return None;
- }
- let bytes = CStr::from_ptr(info.dli_fname).to_bytes();
- let os = OsStr::from_bytes(bytes);
- Some(PathBuf::from(os))
- }
- }
-
- #[cfg(windows)]
- fn current_dll_path() -> Option<PathBuf> {
- use std::ffi::OsString;
- use std::io;
- use std::os::windows::prelude::*;
- use std::ptr;
-
- use winapi::um::libloaderapi::{
- GetModuleFileNameW, GetModuleHandleExW, GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS,
- };
-
- unsafe {
- let mut module = ptr::null_mut();
- let r = GetModuleHandleExW(
- GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS,
- current_dll_path as usize as *mut _,
- &mut module,
- );
- if r == 0 {
- info!("GetModuleHandleExW failed: {}", io::Error::last_os_error());
- return None;
- }
- let mut space = Vec::with_capacity(1024);
- let r = GetModuleFileNameW(module, space.as_mut_ptr(), space.capacity() as u32);
- if r == 0 {
- info!("GetModuleFileNameW failed: {}", io::Error::last_os_error());
- return None;
- }
- let r = r as usize;
- if r >= space.capacity() {
- info!("our buffer was too small? {}", io::Error::last_os_error());
- return None;
- }
- space.set_len(r);
- let os = OsString::from_wide(&space);
- Some(PathBuf::from(os))
- }
- }
-}
-
fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> MakeBackendFn {
// For now we only allow this function to be called once as it'll dlopen a
// few things, which seems to work best if we only do that once. In
@@ -420,7 +324,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
let mut file: Option<PathBuf> = None;
let expected_names = &[
- format!("rustc_codegen_{}-{}", backend_name, release_str().expect("CFG_RELEASE")),
+ format!("rustc_codegen_{}-{}", backend_name, env!("CFG_RELEASE")),
format!("rustc_codegen_{}", backend_name),
];
for entry in d.filter_map(|e| e.ok()) {
@@ -647,22 +551,12 @@ pub fn build_output_filenames(
}
}
-/// Returns a version string such as "1.46.0 (04488afe3 2020-08-24)"
-pub fn version_str() -> Option<&'static str> {
+/// Returns a version string such as "1.46.0 (04488afe3 2020-08-24)" when invoked by an in-tree tool.
+pub macro version_str() {
option_env!("CFG_VERSION")
}
-/// Returns a version string such as "0.12.0-dev".
-pub fn release_str() -> Option<&'static str> {
- option_env!("CFG_RELEASE")
-}
-
-/// Returns the full SHA1 hash of HEAD of the Git repo from which rustc was built.
-pub fn commit_hash_str() -> Option<&'static str> {
- option_env!("CFG_VER_HASH")
-}
-
-/// Returns the "commit date" of HEAD of the Git repo from which rustc was built as a static string.
-pub fn commit_date_str() -> Option<&'static str> {
- option_env!("CFG_VER_DATE")
+/// Returns the version string for `rustc` itself (which may be different from a tool version).
+pub fn rustc_version_str() -> Option<&'static str> {
+ version_str!()
}
diff --git a/compiler/rustc_lexer/Cargo.toml b/compiler/rustc_lexer/Cargo.toml
index ad685c2ad..23294dc2e 100644
--- a/compiler/rustc_lexer/Cargo.toml
+++ b/compiler/rustc_lexer/Cargo.toml
@@ -19,4 +19,4 @@ unicode-xid = "0.2.0"
unic-emoji-char = "0.9.0"
[dev-dependencies]
-expect-test = "1.0"
+expect-test = "1.4.0"
diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs
index 51515976e..3fbabbc63 100644
--- a/compiler/rustc_lexer/src/lib.rs
+++ b/compiler/rustc_lexer/src/lib.rs
@@ -88,7 +88,9 @@ pub enum TokenKind {
/// tokens.
UnknownPrefix,
- /// Examples: `"12_u8"`, `"1.0e-40"`, `b"123`.
+ /// Examples: `12u8`, `1.0e-40`, `b"123"`. Note that `_` is an invalid
+ /// suffix, but may be present here on string and float literals. Users of
+ /// this type will need to check for and reject that case.
///
/// See [LiteralKind] for more details.
Literal { kind: LiteralKind, suffix_start: u32 },
@@ -165,11 +167,15 @@ pub enum DocStyle {
Inner,
}
+// Note that the suffix is *not* considered when deciding the `LiteralKind` in
+// this type. This means that float literals like `1f32` are classified by this
+// type as `Int`. (Compare against `rustc_ast::token::LitKind` and
+// `rustc_ast::ast::LitKind.)
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum LiteralKind {
- /// "12_u8", "0o100", "0b120i99"
+ /// "12_u8", "0o100", "0b120i99", "1f32".
Int { base: Base, empty_int: bool },
- /// "12.34f32", "0b100.100"
+ /// "12.34f32", "1e3", but not "1f32`.
Float { base: Base, empty_exponent: bool },
/// "'a'", "'\\'", "'''", "';"
Char { terminated: bool },
@@ -203,13 +209,13 @@ pub enum RawStrError {
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum Base {
/// Literal starts with "0b".
- Binary,
+ Binary = 2,
/// Literal starts with "0o".
- Octal,
- /// Literal starts with "0x".
- Hexadecimal,
+ Octal = 8,
/// Literal doesn't contain a prefix.
- Decimal,
+ Decimal = 10,
+ /// Literal starts with "0x".
+ Hexadecimal = 16,
}
/// `rustc` allows files to have a shebang, e.g. "#!/usr/bin/rustrun",
@@ -840,12 +846,13 @@ impl Cursor<'_> {
self.eat_decimal_digits()
}
- // Eats the suffix of the literal, e.g. "_u8".
+ // Eats the suffix of the literal, e.g. "u8".
fn eat_literal_suffix(&mut self) {
self.eat_identifier();
}
- // Eats the identifier.
+ // Eats the identifier. Note: succeeds on `_`, which isn't a valid
+ // identifer.
fn eat_identifier(&mut self) {
if !is_id_start(self.first()) {
return;
diff --git a/compiler/rustc_lexer/src/unescape.rs b/compiler/rustc_lexer/src/unescape.rs
index 8f64b5f51..e405013dc 100644
--- a/compiler/rustc_lexer/src/unescape.rs
+++ b/compiler/rustc_lexer/src/unescape.rs
@@ -52,10 +52,8 @@ pub enum EscapeError {
/// Unicode escape code in byte literal.
UnicodeEscapeInByte,
- /// Non-ascii character in byte literal.
+ /// Non-ascii character in byte literal, byte string literal, or raw byte string literal.
NonAsciiCharInByte,
- /// Non-ascii character in byte string literal.
- NonAsciiCharInByteString,
/// After a line ending with '\', the next line contains whitespace
/// characters that are not skipped.
@@ -78,54 +76,33 @@ impl EscapeError {
/// Takes a contents of a literal (without quotes) and produces a
/// sequence of escaped characters or errors.
/// Values are returned through invoking of the provided callback.
-pub fn unescape_literal<F>(literal_text: &str, mode: Mode, callback: &mut F)
+pub fn unescape_literal<F>(src: &str, mode: Mode, callback: &mut F)
where
F: FnMut(Range<usize>, Result<char, EscapeError>),
{
match mode {
Mode::Char | Mode::Byte => {
- let mut chars = literal_text.chars();
- let result = unescape_char_or_byte(&mut chars, mode);
- // The Chars iterator moved forward.
- callback(0..(literal_text.len() - chars.as_str().len()), result);
+ let mut chars = src.chars();
+ let res = unescape_char_or_byte(&mut chars, mode == Mode::Byte);
+ callback(0..(src.len() - chars.as_str().len()), res);
}
- Mode::Str | Mode::ByteStr => unescape_str_or_byte_str(literal_text, mode, callback),
- // NOTE: Raw strings do not perform any explicit character escaping, here we
- // only translate CRLF to LF and produce errors on bare CR.
+ Mode::Str | Mode::ByteStr => unescape_str_or_byte_str(src, mode == Mode::ByteStr, callback),
Mode::RawStr | Mode::RawByteStr => {
- unescape_raw_str_or_raw_byte_str(literal_text, mode, callback)
+ unescape_raw_str_or_raw_byte_str(src, mode == Mode::RawByteStr, callback)
}
}
}
-/// Takes a contents of a byte, byte string or raw byte string (without quotes)
-/// and produces a sequence of bytes or errors.
-/// Values are returned through invoking of the provided callback.
-pub fn unescape_byte_literal<F>(literal_text: &str, mode: Mode, callback: &mut F)
-where
- F: FnMut(Range<usize>, Result<u8, EscapeError>),
-{
- debug_assert!(mode.is_bytes());
- unescape_literal(literal_text, mode, &mut |range, result| {
- callback(range, result.map(byte_from_char));
- })
-}
-
/// Takes a contents of a char literal (without quotes), and returns an
-/// unescaped char or an error
-pub fn unescape_char(literal_text: &str) -> Result<char, (usize, EscapeError)> {
- let mut chars = literal_text.chars();
- unescape_char_or_byte(&mut chars, Mode::Char)
- .map_err(|err| (literal_text.len() - chars.as_str().len(), err))
+/// unescaped char or an error.
+pub fn unescape_char(src: &str) -> Result<char, EscapeError> {
+ unescape_char_or_byte(&mut src.chars(), false)
}
/// Takes a contents of a byte literal (without quotes), and returns an
/// unescaped byte or an error.
-pub fn unescape_byte(literal_text: &str) -> Result<u8, (usize, EscapeError)> {
- let mut chars = literal_text.chars();
- unescape_char_or_byte(&mut chars, Mode::Byte)
- .map(byte_from_char)
- .map_err(|err| (literal_text.len() - chars.as_str().len(), err))
+pub fn unescape_byte(src: &str) -> Result<u8, EscapeError> {
+ unescape_char_or_byte(&mut src.chars(), true).map(byte_from_char)
}
/// What kind of literal do we parse.
@@ -147,7 +124,7 @@ impl Mode {
}
}
- pub fn is_bytes(self) -> bool {
+ pub fn is_byte(self) -> bool {
match self {
Mode::Byte | Mode::ByteStr | Mode::RawByteStr => true,
Mode::Char | Mode::Str | Mode::RawStr => false,
@@ -155,12 +132,9 @@ impl Mode {
}
}
-fn scan_escape(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {
+fn scan_escape(chars: &mut Chars<'_>, is_byte: bool) -> Result<char, EscapeError> {
// Previous character was '\\', unescape what follows.
-
- let second_char = chars.next().ok_or(EscapeError::LoneSlash)?;
-
- let res = match second_char {
+ let res = match chars.next().ok_or(EscapeError::LoneSlash)? {
'"' => '"',
'n' => '\n',
'r' => '\r',
@@ -181,7 +155,7 @@ fn scan_escape(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {
let value = hi * 16 + lo;
// For a non-byte literal verify that it is within ASCII range.
- if !mode.is_bytes() && !is_ascii(value) {
+ if !is_byte && !is_ascii(value) {
return Err(EscapeError::OutOfRangeHexEscape);
}
let value = value as u8;
@@ -217,7 +191,7 @@ fn scan_escape(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {
// Incorrect syntax has higher priority for error reporting
// than unallowed value for a literal.
- if mode.is_bytes() {
+ if is_byte {
return Err(EscapeError::UnicodeEscapeInByte);
}
@@ -249,23 +223,22 @@ fn scan_escape(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {
}
#[inline]
-fn ascii_check(first_char: char, mode: Mode) -> Result<char, EscapeError> {
- if mode.is_bytes() && !first_char.is_ascii() {
+fn ascii_check(c: char, is_byte: bool) -> Result<char, EscapeError> {
+ if is_byte && !c.is_ascii() {
// Byte literal can't be a non-ascii character.
Err(EscapeError::NonAsciiCharInByte)
} else {
- Ok(first_char)
+ Ok(c)
}
}
-fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, EscapeError> {
- debug_assert!(mode == Mode::Char || mode == Mode::Byte);
- let first_char = chars.next().ok_or(EscapeError::ZeroChars)?;
- let res = match first_char {
- '\\' => scan_escape(chars, mode),
+fn unescape_char_or_byte(chars: &mut Chars<'_>, is_byte: bool) -> Result<char, EscapeError> {
+ let c = chars.next().ok_or(EscapeError::ZeroChars)?;
+ let res = match c {
+ '\\' => scan_escape(chars, is_byte),
'\n' | '\t' | '\'' => Err(EscapeError::EscapeOnlyChar),
'\r' => Err(EscapeError::BareCarriageReturn),
- _ => ascii_check(first_char, mode),
+ _ => ascii_check(c, is_byte),
}?;
if chars.next().is_some() {
return Err(EscapeError::MoreThanOneChar);
@@ -275,20 +248,20 @@ fn unescape_char_or_byte(chars: &mut Chars<'_>, mode: Mode) -> Result<char, Esca
/// Takes a contents of a string literal (without quotes) and produces a
/// sequence of escaped characters or errors.
-fn unescape_str_or_byte_str<F>(src: &str, mode: Mode, callback: &mut F)
+fn unescape_str_or_byte_str<F>(src: &str, is_byte: bool, callback: &mut F)
where
F: FnMut(Range<usize>, Result<char, EscapeError>),
{
- debug_assert!(mode == Mode::Str || mode == Mode::ByteStr);
- let initial_len = src.len();
let mut chars = src.chars();
- while let Some(first_char) = chars.next() {
- let start = initial_len - chars.as_str().len() - first_char.len_utf8();
- let unescaped_char = match first_char {
+ // The `start` and `end` computation here is complicated because
+ // `skip_ascii_whitespace` makes us to skip over chars without counting
+ // them in the range computation.
+ while let Some(c) = chars.next() {
+ let start = src.len() - chars.as_str().len() - c.len_utf8();
+ let res = match c {
'\\' => {
- let second_char = chars.clone().next();
- match second_char {
+ match chars.clone().next() {
Some('\n') => {
// Rust language specification requires us to skip whitespaces
// if unescaped '\' character is followed by '\n'.
@@ -297,17 +270,17 @@ where
skip_ascii_whitespace(&mut chars, start, callback);
continue;
}
- _ => scan_escape(&mut chars, mode),
+ _ => scan_escape(&mut chars, is_byte),
}
}
'\n' => Ok('\n'),
'\t' => Ok('\t'),
'"' => Err(EscapeError::EscapeOnlyChar),
'\r' => Err(EscapeError::BareCarriageReturn),
- _ => ascii_check(first_char, mode),
+ _ => ascii_check(c, is_byte),
};
- let end = initial_len - chars.as_str().len();
- callback(start..end, unescaped_char);
+ let end = src.len() - chars.as_str().len();
+ callback(start..end, res);
}
fn skip_ascii_whitespace<F>(chars: &mut Chars<'_>, start: usize, callback: &mut F)
@@ -340,30 +313,29 @@ where
/// Takes a contents of a string literal (without quotes) and produces a
/// sequence of characters or errors.
/// NOTE: Raw strings do not perform any explicit character escaping, here we
-/// only translate CRLF to LF and produce errors on bare CR.
-fn unescape_raw_str_or_raw_byte_str<F>(literal_text: &str, mode: Mode, callback: &mut F)
+/// only produce errors on bare CR.
+fn unescape_raw_str_or_raw_byte_str<F>(src: &str, is_byte: bool, callback: &mut F)
where
F: FnMut(Range<usize>, Result<char, EscapeError>),
{
- debug_assert!(mode == Mode::RawStr || mode == Mode::RawByteStr);
- let initial_len = literal_text.len();
-
- let mut chars = literal_text.chars();
- while let Some(curr) = chars.next() {
- let start = initial_len - chars.as_str().len() - curr.len_utf8();
+ let mut chars = src.chars();
- let result = match curr {
+ // The `start` and `end` computation here matches the one in
+ // `unescape_str_or_byte_str` for consistency, even though this function
+ // doesn't have to worry about skipping any chars.
+ while let Some(c) = chars.next() {
+ let start = src.len() - chars.as_str().len() - c.len_utf8();
+ let res = match c {
'\r' => Err(EscapeError::BareCarriageReturnInRawString),
- c if mode.is_bytes() && !c.is_ascii() => Err(EscapeError::NonAsciiCharInByteString),
- c => Ok(c),
+ _ => ascii_check(c, is_byte),
};
- let end = initial_len - chars.as_str().len();
-
- callback(start..end, result);
+ let end = src.len() - chars.as_str().len();
+ callback(start..end, res);
}
}
-fn byte_from_char(c: char) -> u8 {
+#[inline]
+pub fn byte_from_char(c: char) -> u8 {
let res = c as u32;
debug_assert!(res <= u8::MAX as u32, "guaranteed because of Mode::ByteStr");
res as u8
diff --git a/compiler/rustc_lexer/src/unescape/tests.rs b/compiler/rustc_lexer/src/unescape/tests.rs
index fa61554af..1c25b03fd 100644
--- a/compiler/rustc_lexer/src/unescape/tests.rs
+++ b/compiler/rustc_lexer/src/unescape/tests.rs
@@ -3,8 +3,7 @@ use super::*;
#[test]
fn test_unescape_char_bad() {
fn check(literal_text: &str, expected_error: EscapeError) {
- let actual_result = unescape_char(literal_text).map_err(|(_offset, err)| err);
- assert_eq!(actual_result, Err(expected_error));
+ assert_eq!(unescape_char(literal_text), Err(expected_error));
}
check("", EscapeError::ZeroChars);
@@ -68,8 +67,7 @@ fn test_unescape_char_bad() {
#[test]
fn test_unescape_char_good() {
fn check(literal_text: &str, expected_char: char) {
- let actual_result = unescape_char(literal_text);
- assert_eq!(actual_result, Ok(expected_char));
+ assert_eq!(unescape_char(literal_text), Ok(expected_char));
}
check("a", 'a');
@@ -134,8 +132,7 @@ fn test_unescape_str_good() {
}
}
});
- let buf = buf.as_ref().map(|it| it.as_ref());
- assert_eq!(buf, Ok(expected))
+ assert_eq!(buf.as_deref(), Ok(expected))
}
check("foo", "foo");
@@ -149,8 +146,7 @@ fn test_unescape_str_good() {
#[test]
fn test_unescape_byte_bad() {
fn check(literal_text: &str, expected_error: EscapeError) {
- let actual_result = unescape_byte(literal_text).map_err(|(_offset, err)| err);
- assert_eq!(actual_result, Err(expected_error));
+ assert_eq!(unescape_byte(literal_text), Err(expected_error));
}
check("", EscapeError::ZeroChars);
@@ -219,8 +215,7 @@ fn test_unescape_byte_bad() {
#[test]
fn test_unescape_byte_good() {
fn check(literal_text: &str, expected_byte: u8) {
- let actual_result = unescape_byte(literal_text);
- assert_eq!(actual_result, Ok(expected_byte));
+ assert_eq!(unescape_byte(literal_text), Ok(expected_byte));
}
check("a", b'a');
@@ -246,16 +241,15 @@ fn test_unescape_byte_good() {
fn test_unescape_byte_str_good() {
fn check(literal_text: &str, expected: &[u8]) {
let mut buf = Ok(Vec::with_capacity(literal_text.len()));
- unescape_byte_literal(literal_text, Mode::ByteStr, &mut |range, c| {
+ unescape_literal(literal_text, Mode::ByteStr, &mut |range, c| {
if let Ok(b) = &mut buf {
match c {
- Ok(c) => b.push(c),
+ Ok(c) => b.push(byte_from_char(c)),
Err(e) => buf = Err((range, e)),
}
}
});
- let buf = buf.as_ref().map(|it| it.as_ref());
- assert_eq!(buf, Ok(expected))
+ assert_eq!(buf.as_deref(), Ok(expected))
}
check("foo", b"foo");
@@ -280,18 +274,13 @@ fn test_unescape_raw_str() {
#[test]
fn test_unescape_raw_byte_str() {
- fn check(literal: &str, expected: &[(Range<usize>, Result<u8, EscapeError>)]) {
+ fn check(literal: &str, expected: &[(Range<usize>, Result<char, EscapeError>)]) {
let mut unescaped = Vec::with_capacity(literal.len());
- unescape_byte_literal(literal, Mode::RawByteStr, &mut |range, res| {
- unescaped.push((range, res))
- });
+ unescape_literal(literal, Mode::RawByteStr, &mut |range, res| unescaped.push((range, res)));
assert_eq!(unescaped, expected);
}
check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]);
- check("🦀", &[(0..4, Err(EscapeError::NonAsciiCharInByteString))]);
- check(
- "🦀a",
- &[(0..4, Err(EscapeError::NonAsciiCharInByteString)), (4..5, Ok(byte_from_char('a')))],
- );
+ check("🦀", &[(0..4, Err(EscapeError::NonAsciiCharInByte))]);
+ check("🦀a", &[(0..4, Err(EscapeError::NonAsciiCharInByte)), (4..5, Ok('a'))]);
}
diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs
index d425adf47..c6c7caa7c 100644
--- a/compiler/rustc_lint/src/builtin.rs
+++ b/compiler/rustc_lint/src/builtin.rs
@@ -25,6 +25,7 @@ use crate::{
types::{transparent_newtype_field, CItemKind},
EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext,
};
+use hir::IsAsync;
use rustc_ast::attr;
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::visit::{FnCtxt, FnKind};
@@ -40,7 +41,10 @@ use rustc_feature::{deprecated_attributes, AttributeGate, BuiltinAttribute, Gate
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId, LocalDefIdSet, CRATE_DEF_ID};
-use rustc_hir::{ForeignItemKind, GenericParamKind, HirId, PatKind, PredicateOrigin};
+use rustc_hir::intravisit::FnKind as HirFnKind;
+use rustc_hir::{
+ Body, FnDecl, ForeignItemKind, GenericParamKind, HirId, Node, PatKind, PredicateOrigin,
+};
use rustc_index::vec::Idx;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::layout::{LayoutError, LayoutOf};
@@ -52,7 +56,7 @@ use rustc_span::edition::Edition;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, InnerSpan, Span};
-use rustc_target::abi::VariantIdx;
+use rustc_target::abi::{Abi, VariantIdx};
use rustc_trait_selection::traits::{self, misc::can_type_implement_copy};
use crate::nonstandard_style::{method_context, MethodLateContext};
@@ -98,9 +102,10 @@ fn pierce_parens(mut expr: &ast::Expr) -> &ast::Expr {
impl EarlyLintPass for WhileTrue {
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
if let ast::ExprKind::While(cond, _, label) = &e.kind
- && let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).kind
- && let ast::LitKind::Bool(true) = lit.kind
- && !lit.span.from_expansion()
+ && let cond = pierce_parens(cond)
+ && let ast::ExprKind::Lit(token_lit) = cond.kind
+ && let token::Lit { kind: token::Bool, symbol: kw::True, .. } = token_lit
+ && !cond.span.from_expansion()
{
let condition_span = e.span.with_hi(cond.span.hi());
cx.struct_span_lint(
@@ -185,9 +190,8 @@ impl<'tcx> LateLintPass<'tcx> for BoxPointers {
// If it's a struct, we also have to check the fields' types
match it.kind {
hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => {
- for struct_field in struct_def.fields() {
- let def_id = cx.tcx.hir().local_def_id(struct_field.hir_id);
- self.check_heap_type(cx, struct_field.span, cx.tcx.type_of(def_id));
+ for field in struct_def.fields() {
+ self.check_heap_type(cx, field.span, cx.tcx.type_of(field.def_id));
}
}
_ => (),
@@ -259,7 +263,7 @@ impl<'tcx> LateLintPass<'tcx> for NonShorthandFieldPatterns {
}
if let PatKind::Binding(binding_annot, _, ident, None) = fieldpat.pat.kind {
if cx.tcx.find_field_index(ident, &variant)
- == Some(cx.tcx.field_index(fieldpat.hir_id, cx.typeck_results()))
+ == Some(cx.typeck_results().field_index(fieldpat.hir_id))
{
cx.struct_span_lint(
NON_SHORTHAND_FIELD_PATTERNS,
@@ -673,13 +677,12 @@ impl<'tcx> LateLintPass<'tcx> for MissingDoc {
fn check_field_def(&mut self, cx: &LateContext<'_>, sf: &hir::FieldDef<'_>) {
if !sf.is_positional() {
- let def_id = cx.tcx.hir().local_def_id(sf.hir_id);
- self.check_missing_docs_attrs(cx, def_id, "a", "struct field")
+ self.check_missing_docs_attrs(cx, sf.def_id, "a", "struct field")
}
}
fn check_variant(&mut self, cx: &LateContext<'_>, v: &hir::Variant<'_>) {
- self.check_missing_docs_attrs(cx, cx.tcx.hir().local_def_id(v.id), "a", "variant");
+ self.check_missing_docs_attrs(cx, v.def_id, "a", "variant");
}
}
@@ -1269,10 +1272,10 @@ declare_lint_pass!(MutableTransmutes => [MUTABLE_TRANSMUTES]);
impl<'tcx> LateLintPass<'tcx> for MutableTransmutes {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &hir::Expr<'_>) {
- if let Some((&ty::Ref(_, _, from_mt), &ty::Ref(_, _, to_mt))) =
+ if let Some((&ty::Ref(_, _, from_mutbl), &ty::Ref(_, _, to_mutbl))) =
get_transmute_from_to(cx, expr).map(|(ty1, ty2)| (ty1.kind(), ty2.kind()))
{
- if to_mt == hir::Mutability::Mut && from_mt == hir::Mutability::Not {
+ if from_mutbl < to_mutbl {
cx.struct_span_lint(
MUTABLE_TRANSMUTES,
expr.span,
@@ -1339,6 +1342,72 @@ impl<'tcx> LateLintPass<'tcx> for UnstableFeatures {
}
declare_lint! {
+ /// The `ungated_async_fn_track_caller` lint warns when the
+ /// `#[track_caller]` attribute is used on an async function, method, or
+ /// closure, without enabling the corresponding unstable feature flag.
+ ///
+ /// ### Example
+ ///
+ /// ```rust
+ /// #[track_caller]
+ /// async fn foo() {}
+ /// ```
+ ///
+ /// {{produces}}
+ ///
+ /// ### Explanation
+ ///
+ /// The attribute must be used in conjunction with the
+ /// [`closure_track_caller` feature flag]. Otherwise, the `#[track_caller]`
+ /// annotation will function as as no-op.
+ ///
+ /// [`closure_track_caller` feature flag]: https://doc.rust-lang.org/beta/unstable-book/language-features/closure-track-caller.html
+ UNGATED_ASYNC_FN_TRACK_CALLER,
+ Warn,
+ "enabling track_caller on an async fn is a no-op unless the closure_track_caller feature is enabled"
+}
+
+declare_lint_pass!(
+ /// Explains corresponding feature flag must be enabled for the `#[track_caller] attribute to
+ /// do anything
+ UngatedAsyncFnTrackCaller => [UNGATED_ASYNC_FN_TRACK_CALLER]
+);
+
+impl<'tcx> LateLintPass<'tcx> for UngatedAsyncFnTrackCaller {
+ fn check_fn(
+ &mut self,
+ cx: &LateContext<'_>,
+ fn_kind: HirFnKind<'_>,
+ _: &'tcx FnDecl<'_>,
+ _: &'tcx Body<'_>,
+ span: Span,
+ hir_id: HirId,
+ ) {
+ if fn_kind.asyncness() == IsAsync::Async
+ && !cx.tcx.features().closure_track_caller
+ && let attrs = cx.tcx.hir().attrs(hir_id)
+ // Now, check if the function has the `#[track_caller]` attribute
+ && let Some(attr) = attrs.iter().find(|attr| attr.has_name(sym::track_caller))
+ {
+ cx.struct_span_lint(
+ UNGATED_ASYNC_FN_TRACK_CALLER,
+ attr.span,
+ fluent::lint_ungated_async_fn_track_caller,
+ |lint| {
+ lint.span_label(span, fluent::label);
+ rustc_session::parse::add_feature_diagnostics(
+ lint,
+ &cx.tcx.sess.parse_sess,
+ sym::closure_track_caller,
+ );
+ lint
+ },
+ );
+ }
+ }
+}
+
+declare_lint! {
/// The `unreachable_pub` lint triggers for `pub` items not reachable from
/// the crate root.
///
@@ -1423,8 +1492,11 @@ impl<'tcx> LateLintPass<'tcx> for UnreachablePub {
}
fn check_field_def(&mut self, cx: &LateContext<'_>, field: &hir::FieldDef<'_>) {
- let def_id = cx.tcx.hir().local_def_id(field.hir_id);
- self.perform_lint(cx, "field", def_id, field.vis_span, false);
+ let map = cx.tcx.hir();
+ if matches!(map.get(map.get_parent_node(field.hir_id)), Node::Variant(_)) {
+ return;
+ }
+ self.perform_lint(cx, "field", field.def_id, field.vis_span, false);
}
fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &hir::ImplItem<'_>) {
@@ -1636,19 +1708,20 @@ declare_lint_pass!(
impl<'tcx> LateLintPass<'tcx> for TrivialConstraints {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
use rustc_middle::ty::visit::TypeVisitable;
+ use rustc_middle::ty::Clause;
use rustc_middle::ty::PredicateKind::*;
if cx.tcx.features().trivial_bounds {
let predicates = cx.tcx.predicates_of(item.owner_id);
for &(predicate, span) in predicates.predicates {
let predicate_kind_name = match predicate.kind().skip_binder() {
- Trait(..) => "trait",
- TypeOutlives(..) |
- RegionOutlives(..) => "lifetime",
+ Clause(Clause::Trait(..)) => "trait",
+ Clause(Clause::TypeOutlives(..)) |
+ Clause(Clause::RegionOutlives(..)) => "lifetime",
// Ignore projections, as they can only be global
// if the trait bound is global
- Projection(..) |
+ Clause(Clause::Projection(..)) |
// Ignore bounds that a user can't type
WellFormed(..) |
ObjectSafe(..) |
@@ -1657,6 +1730,7 @@ impl<'tcx> LateLintPass<'tcx> for TrivialConstraints {
Coerce(..) |
ConstEvaluatable(..) |
ConstEquate(..) |
+ Ambiguous |
TypeWellFormedFromEnv(..) => continue,
};
if predicate.is_global() {
@@ -2028,10 +2102,10 @@ impl KeywordIdents {
impl EarlyLintPass for KeywordIdents {
fn check_mac_def(&mut self, cx: &EarlyContext<'_>, mac_def: &ast::MacroDef) {
- self.check_tokens(cx, mac_def.body.inner_tokens());
+ self.check_tokens(cx, mac_def.body.tokens.clone());
}
fn check_mac(&mut self, cx: &EarlyContext<'_>, mac: &ast::MacCall) {
- self.check_tokens(cx, mac.args.inner_tokens());
+ self.check_tokens(cx, mac.args.tokens.clone());
}
fn check_ident(&mut self, cx: &EarlyContext<'_>, ident: Ident) {
self.check_ident_token(cx, UnderMacro(false), ident);
@@ -2042,13 +2116,13 @@ declare_lint_pass!(ExplicitOutlivesRequirements => [EXPLICIT_OUTLIVES_REQUIREMEN
impl ExplicitOutlivesRequirements {
fn lifetimes_outliving_lifetime<'tcx>(
- inferred_outlives: &'tcx [(ty::Predicate<'tcx>, Span)],
+ inferred_outlives: &'tcx [(ty::Clause<'tcx>, Span)],
def_id: DefId,
) -> Vec<ty::Region<'tcx>> {
inferred_outlives
.iter()
- .filter_map(|(pred, _)| match pred.kind().skip_binder() {
- ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => match *a {
+ .filter_map(|(clause, _)| match *clause {
+ ty::Clause::RegionOutlives(ty::OutlivesPredicate(a, b)) => match *a {
ty::ReEarlyBound(ebr) if ebr.def_id == def_id => Some(b),
_ => None,
},
@@ -2058,13 +2132,13 @@ impl ExplicitOutlivesRequirements {
}
fn lifetimes_outliving_type<'tcx>(
- inferred_outlives: &'tcx [(ty::Predicate<'tcx>, Span)],
+ inferred_outlives: &'tcx [(ty::Clause<'tcx>, Span)],
index: u32,
) -> Vec<ty::Region<'tcx>> {
inferred_outlives
.iter()
- .filter_map(|(pred, _)| match pred.kind().skip_binder() {
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(a, b)) => {
+ .filter_map(|(clause, _)| match *clause {
+ ty::Clause::TypeOutlives(ty::OutlivesPredicate(a, b)) => {
a.is_param(index).then_some(b)
}
_ => None,
@@ -2405,8 +2479,34 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
}
/// Information about why a type cannot be initialized this way.
- /// Contains an error message and optionally a span to point at.
- type InitError = (String, Option<Span>);
+ struct InitError {
+ message: String,
+ /// Spans from struct fields and similar that can be obtained from just the type.
+ span: Option<Span>,
+ /// Used to report a trace through adts.
+ nested: Option<Box<InitError>>,
+ }
+ impl InitError {
+ fn spanned(self, span: Span) -> InitError {
+ Self { span: Some(span), ..self }
+ }
+
+ fn nested(self, nested: impl Into<Option<InitError>>) -> InitError {
+ assert!(self.nested.is_none());
+ Self { nested: nested.into().map(Box::new), ..self }
+ }
+ }
+
+ impl<'a> From<&'a str> for InitError {
+ fn from(s: &'a str) -> Self {
+ s.to_owned().into()
+ }
+ }
+ impl From<String> for InitError {
+ fn from(message: String) -> Self {
+ Self { message, span: None, nested: None }
+ }
+ }
/// Test if this constant is all-0.
fn is_zero(expr: &hir::Expr<'_>) -> bool {
@@ -2462,25 +2562,54 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
fn variant_find_init_error<'tcx>(
cx: &LateContext<'tcx>,
+ ty: Ty<'tcx>,
variant: &VariantDef,
substs: ty::SubstsRef<'tcx>,
descr: &str,
init: InitKind,
) -> Option<InitError> {
- variant.fields.iter().find_map(|field| {
- ty_find_init_error(cx, field.ty(cx.tcx, substs), init).map(|(mut msg, span)| {
- if span.is_none() {
- // Point to this field, should be helpful for figuring
- // out where the source of the error is.
- let span = cx.tcx.def_span(field.did);
- write!(&mut msg, " (in this {descr})").unwrap();
- (msg, Some(span))
+ let mut field_err = variant.fields.iter().find_map(|field| {
+ ty_find_init_error(cx, field.ty(cx.tcx, substs), init).map(|mut err| {
+ if !field.did.is_local() {
+ err
+ } else if err.span.is_none() {
+ err.span = Some(cx.tcx.def_span(field.did));
+ write!(&mut err.message, " (in this {descr})").unwrap();
+ err
} else {
- // Just forward.
- (msg, span)
+ InitError::from(format!("in this {descr}"))
+ .spanned(cx.tcx.def_span(field.did))
+ .nested(err)
}
})
- })
+ });
+
+ // Check if this ADT has a constrained layout (like `NonNull` and friends).
+ if let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(ty)) {
+ if let Abi::Scalar(scalar) | Abi::ScalarPair(scalar, _) = &layout.abi {
+ let range = scalar.valid_range(cx);
+ let msg = if !range.contains(0) {
+ "must be non-null"
+ } else if init == InitKind::Uninit && !scalar.is_always_valid(cx) {
+ // Prefer reporting on the fields over the entire struct for uninit,
+ // as the information bubbles out and it may be unclear why the type can't
+ // be null from just its outside signature.
+
+ "must be initialized inside its custom valid range"
+ } else {
+ return field_err;
+ };
+ if let Some(field_err) = &mut field_err {
+ // Most of the time, if the field error is the same as the struct error,
+ // the struct error only happens because of the field error.
+ if field_err.message.contains(msg) {
+ field_err.message = format!("because {}", field_err.message);
+ }
+ }
+ return Some(InitError::from(format!("`{ty}` {msg}")).nested(field_err));
+ }
+ }
+ field_err
}
/// Return `Some` only if we are sure this type does *not*
@@ -2493,63 +2622,36 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
use rustc_type_ir::sty::TyKind::*;
match ty.kind() {
// Primitive types that don't like 0 as a value.
- Ref(..) => Some(("references must be non-null".to_string(), None)),
- Adt(..) if ty.is_box() => Some(("`Box` must be non-null".to_string(), None)),
- FnPtr(..) => Some(("function pointers must be non-null".to_string(), None)),
- Never => Some(("the `!` type has no valid value".to_string(), None)),
+ Ref(..) => Some("references must be non-null".into()),
+ Adt(..) if ty.is_box() => Some("`Box` must be non-null".into()),
+ FnPtr(..) => Some("function pointers must be non-null".into()),
+ Never => Some("the `!` type has no valid value".into()),
RawPtr(tm) if matches!(tm.ty.kind(), Dynamic(..)) =>
// raw ptr to dyn Trait
{
- Some(("the vtable of a wide raw pointer must be non-null".to_string(), None))
+ Some("the vtable of a wide raw pointer must be non-null".into())
}
// Primitive types with other constraints.
Bool if init == InitKind::Uninit => {
- Some(("booleans must be either `true` or `false`".to_string(), None))
+ Some("booleans must be either `true` or `false`".into())
}
Char if init == InitKind::Uninit => {
- Some(("characters must be a valid Unicode codepoint".to_string(), None))
+ Some("characters must be a valid Unicode codepoint".into())
}
Int(_) | Uint(_) if init == InitKind::Uninit => {
- Some(("integers must not be uninitialized".to_string(), None))
- }
- Float(_) if init == InitKind::Uninit => {
- Some(("floats must not be uninitialized".to_string(), None))
+ Some("integers must be initialized".into())
}
+ Float(_) if init == InitKind::Uninit => Some("floats must be initialized".into()),
RawPtr(_) if init == InitKind::Uninit => {
- Some(("raw pointers must not be uninitialized".to_string(), None))
+ Some("raw pointers must be initialized".into())
}
// Recurse and checks for some compound types. (but not unions)
Adt(adt_def, substs) if !adt_def.is_union() => {
- // First check if this ADT has a layout attribute (like `NonNull` and friends).
- use std::ops::Bound;
- match cx.tcx.layout_scalar_valid_range(adt_def.did()) {
- // We exploit here that `layout_scalar_valid_range` will never
- // return `Bound::Excluded`. (And we have tests checking that we
- // handle the attribute correctly.)
- // We don't add a span since users cannot declare such types anyway.
- (Bound::Included(lo), Bound::Included(hi)) if 0 < lo && lo < hi => {
- return Some((format!("`{}` must be non-null", ty), None));
- }
- (Bound::Included(lo), Bound::Unbounded) if 0 < lo => {
- return Some((format!("`{}` must be non-null", ty), None));
- }
- (Bound::Included(_), _) | (_, Bound::Included(_))
- if init == InitKind::Uninit =>
- {
- return Some((
- format!(
- "`{}` must be initialized inside its custom valid range",
- ty,
- ),
- None,
- ));
- }
- _ => {}
- }
// Handle structs.
if adt_def.is_struct() {
return variant_find_init_error(
cx,
+ ty,
adt_def.non_enum_variant(),
substs,
"struct field",
@@ -2573,13 +2675,14 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
Some((variant, definitely_inhabited))
});
let Some(first_variant) = potential_variants.next() else {
- return Some(("enums with no inhabited variants have no valid value".to_string(), Some(span)));
+ return Some(InitError::from("enums with no inhabited variants have no valid value").spanned(span));
};
// So we have at least one potentially inhabited variant. Might we have two?
let Some(second_variant) = potential_variants.next() else {
// There is only one potentially inhabited variant. So we can recursively check that variant!
return variant_find_init_error(
cx,
+ ty,
&first_variant.0,
substs,
"field of the only potentially inhabited enum variant",
@@ -2597,10 +2700,9 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
.filter(|(_variant, definitely_inhabited)| *definitely_inhabited)
.count();
if definitely_inhabited > 1 {
- return Some((
- "enums with multiple inhabited variants have to be initialized to a variant".to_string(),
- Some(span),
- ));
+ return Some(InitError::from(
+ "enums with multiple inhabited variants have to be initialized to a variant",
+ ).spanned(span));
}
}
// We couldn't find anything wrong here.
@@ -2629,8 +2731,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
// using zeroed or uninitialized memory.
// We are extremely conservative with what we warn about.
let conjured_ty = cx.typeck_results().expr_ty(expr);
- if let Some((msg, span)) =
- with_no_trimmed_paths!(ty_find_init_error(cx, conjured_ty, init))
+ if let Some(mut err) = with_no_trimmed_paths!(ty_find_init_error(cx, conjured_ty, init))
{
// FIXME(davidtwco): make translatable
cx.struct_span_lint(
@@ -2656,10 +2757,17 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
"help: use `MaybeUninit<T>` instead, \
and only call `assume_init` after initialization is done",
);
- if let Some(span) = span {
- lint.span_note(span, &msg);
- } else {
- lint.note(&msg);
+ loop {
+ if let Some(span) = err.span {
+ lint.span_note(span, &err.message);
+ } else {
+ lint.note(&err.message);
+ }
+ if let Some(e) = err.nested {
+ err = *e;
+ } else {
+ break;
+ }
}
lint
},
diff --git a/compiler/rustc_lint/src/context.rs b/compiler/rustc_lint/src/context.rs
index cec0003ff..e6a0d7e60 100644
--- a/compiler/rustc_lint/src/context.rs
+++ b/compiler/rustc_lint/src/context.rs
@@ -206,7 +206,7 @@ impl LintStore {
self.late_module_passes.push(Box::new(pass));
}
- // Helper method for register_early/late_pass
+ /// Helper method for register_early/late_pass
pub fn register_lints(&mut self, lints: &[&'static Lint]) {
for lint in lints {
self.lints.push(lint);
@@ -579,6 +579,7 @@ pub trait LintContext: Sized {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
fn lookup_with_diagnostics(
&self,
lint: &'static Lint,
@@ -882,6 +883,7 @@ pub trait LintContext: Sized {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
fn lookup<S: Into<MultiSpan>>(
&self,
lint: &'static Lint,
@@ -908,6 +910,7 @@ pub trait LintContext: Sized {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
fn struct_span_lint<S: Into<MultiSpan>>(
&self,
lint: &'static Lint,
@@ -933,6 +936,7 @@ pub trait LintContext: Sized {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
fn lint(
&self,
lint: &'static Lint,
@@ -1155,7 +1159,7 @@ impl<'tcx> LateContext<'tcx> {
fn print_dyn_existential(
self,
- _predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ _predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
Ok(())
}
@@ -1241,6 +1245,23 @@ impl<'tcx> LateContext<'tcx> {
AbsolutePathPrinter { tcx: self.tcx }.print_def_path(def_id, &[]).unwrap()
}
+
+ /// Returns the associated type `name` for `self_ty` as an implementation of `trait_id`.
+ /// Do not invoke without first verifying that the type implements the trait.
+ pub fn get_associated_type(
+ &self,
+ self_ty: Ty<'tcx>,
+ trait_id: DefId,
+ name: &str,
+ ) -> Option<Ty<'tcx>> {
+ let tcx = self.tcx;
+ tcx.associated_items(trait_id)
+ .find_by_name_and_kind(tcx, Ident::from_str(name), ty::AssocKind::Type, trait_id)
+ .and_then(|assoc| {
+ let proj = tcx.mk_projection(assoc.def_id, tcx.mk_substs_trait(self_ty, []));
+ tcx.try_normalize_erasing_regions(self.param_env, proj).ok()
+ })
+ }
}
impl<'tcx> abi::HasDataLayout for LateContext<'tcx> {
diff --git a/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs b/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs
new file mode 100644
index 000000000..1d29a234a
--- /dev/null
+++ b/compiler/rustc_lint/src/deref_into_dyn_supertrait.rs
@@ -0,0 +1,92 @@
+use crate::{LateContext, LateLintPass, LintContext};
+
+use rustc_errors::DelayDm;
+use rustc_hir as hir;
+use rustc_middle::{traits::util::supertraits, ty};
+use rustc_span::sym;
+
+declare_lint! {
+ /// The `deref_into_dyn_supertrait` lint is output whenever there is a use of the
+ /// `Deref` implementation with a `dyn SuperTrait` type as `Output`.
+ ///
+ /// These implementations will become shadowed when the `trait_upcasting` feature is stabilized.
+ /// The `deref` functions will no longer be called implicitly, so there might be behavior change.
+ ///
+ /// ### Example
+ ///
+ /// ```rust,compile_fail
+ /// #![deny(deref_into_dyn_supertrait)]
+ /// #![allow(dead_code)]
+ ///
+ /// use core::ops::Deref;
+ ///
+ /// trait A {}
+ /// trait B: A {}
+ /// impl<'a> Deref for dyn 'a + B {
+ /// type Target = dyn A;
+ /// fn deref(&self) -> &Self::Target {
+ /// todo!()
+ /// }
+ /// }
+ ///
+ /// fn take_a(_: &dyn A) { }
+ ///
+ /// fn take_b(b: &dyn B) {
+ /// take_a(b);
+ /// }
+ /// ```
+ ///
+ /// {{produces}}
+ ///
+ /// ### Explanation
+ ///
+ /// The dyn upcasting coercion feature adds new coercion rules, taking priority
+ /// over certain other coercion rules, which will cause some behavior change.
+ pub DEREF_INTO_DYN_SUPERTRAIT,
+ Warn,
+ "`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future",
+ @future_incompatible = FutureIncompatibleInfo {
+ reference: "issue #89460 <https://github.com/rust-lang/rust/issues/89460>",
+ };
+}
+
+declare_lint_pass!(DerefIntoDynSupertrait => [DEREF_INTO_DYN_SUPERTRAIT]);
+
+impl<'tcx> LateLintPass<'tcx> for DerefIntoDynSupertrait {
+ fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
+ // `Deref` is being implemented for `t`
+ if let hir::ItemKind::Impl(impl_) = item.kind
+ && let Some(trait_) = &impl_.of_trait
+ && let t = cx.tcx.type_of(item.owner_id)
+ && let opt_did @ Some(did) = trait_.trait_def_id()
+ && opt_did == cx.tcx.lang_items().deref_trait()
+ // `t` is `dyn t_principal`
+ && let ty::Dynamic(data, _, ty::Dyn) = t.kind()
+ && let Some(t_principal) = data.principal()
+ // `<T as Deref>::Target` is `dyn target_principal`
+ && let Some(target) = cx.get_associated_type(t, did, "Target")
+ && let ty::Dynamic(data, _, ty::Dyn) = target.kind()
+ && let Some(target_principal) = data.principal()
+ // `target_principal` is a supertrait of `t_principal`
+ && supertraits(cx.tcx, t_principal.with_self_ty(cx.tcx, cx.tcx.types.trait_object_dummy_self))
+ .any(|sup| sup.map_bound(|x| ty::ExistentialTraitRef::erase_self_ty(cx.tcx, x)) == target_principal)
+ {
+ cx.struct_span_lint(
+ DEREF_INTO_DYN_SUPERTRAIT,
+ cx.tcx.def_span(item.owner_id.def_id),
+ DelayDm(|| {
+ format!(
+ "`{t}` implements `Deref` with supertrait `{target_principal}` as target"
+ )
+ }),
+ |lint| {
+ if let Some(target_span) = impl_.items.iter().find_map(|i| (i.ident.name == sym::Target).then_some(i.span)) {
+ lint.span_label(target_span, "target type is set here");
+ }
+
+ lint
+ },
+ )
+ }
+ }
+}
diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs
index aee870dd2..52363b0be 100644
--- a/compiler/rustc_lint/src/early.rs
+++ b/compiler/rustc_lint/src/early.rs
@@ -20,23 +20,23 @@ use rustc_ast::ptr::P;
use rustc_ast::visit::{self as ast_visit, Visitor};
use rustc_ast::{self as ast, walk_list, HasAttrs};
use rustc_middle::ty::RegisteredTools;
-use rustc_session::lint::{BufferedEarlyLint, LintBuffer, LintPass};
+use rustc_session::lint::{BufferedEarlyLint, LintBuffer};
use rustc_session::Session;
use rustc_span::symbol::Ident;
use rustc_span::Span;
-use std::slice;
-
-macro_rules! run_early_pass { ($cx:expr, $f:ident, $($args:expr),*) => ({
- $cx.pass.$f(&$cx.context, $($args),*);
+macro_rules! run_early_passes { ($cx:expr, $f:ident, $($args:expr),*) => ({
+ for pass in $cx.passes.iter_mut() {
+ pass.$f(&$cx.context, $($args),*);
+ }
}) }
-pub struct EarlyContextAndPass<'a, T: EarlyLintPass> {
+pub struct EarlyContextAndPasses<'a> {
context: EarlyContext<'a>,
- pass: T,
+ passes: Vec<EarlyLintPassObject>,
}
-impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> {
+impl<'a> EarlyContextAndPasses<'a> {
fn check_id(&mut self, id: ast::NodeId) {
for early_lint in self.context.buffered.take(id) {
let BufferedEarlyLint { span, msg, node_id: _, lint_id, diagnostic } = early_lint;
@@ -63,27 +63,27 @@ impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> {
self.check_id(id);
debug!("early context: enter_attrs({:?})", attrs);
- run_early_pass!(self, enter_lint_attrs, attrs);
+ run_early_passes!(self, enter_lint_attrs, attrs);
f(self);
debug!("early context: exit_attrs({:?})", attrs);
- run_early_pass!(self, exit_lint_attrs, attrs);
+ run_early_passes!(self, exit_lint_attrs, attrs);
self.context.builder.pop(push);
}
}
-impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> {
+impl<'a> ast_visit::Visitor<'a> for EarlyContextAndPasses<'a> {
fn visit_param(&mut self, param: &'a ast::Param) {
self.with_lint_attrs(param.id, &param.attrs, |cx| {
- run_early_pass!(cx, check_param, param);
+ run_early_passes!(cx, check_param, param);
ast_visit::walk_param(cx, param);
});
}
fn visit_item(&mut self, it: &'a ast::Item) {
self.with_lint_attrs(it.id, &it.attrs, |cx| {
- run_early_pass!(cx, check_item, it);
+ run_early_passes!(cx, check_item, it);
ast_visit::walk_item(cx, it);
- run_early_pass!(cx, check_item_post, it);
+ run_early_passes!(cx, check_item_post, it);
})
}
@@ -94,10 +94,10 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
}
fn visit_pat(&mut self, p: &'a ast::Pat) {
- run_early_pass!(self, check_pat, p);
+ run_early_passes!(self, check_pat, p);
self.check_id(p.id);
ast_visit::walk_pat(self, p);
- run_early_pass!(self, check_pat_post, p);
+ run_early_passes!(self, check_pat_post, p);
}
fn visit_pat_field(&mut self, field: &'a ast::PatField) {
@@ -113,7 +113,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
fn visit_expr(&mut self, e: &'a ast::Expr) {
self.with_lint_attrs(e.id, &e.attrs, |cx| {
- run_early_pass!(cx, check_expr, e);
+ run_early_passes!(cx, check_expr, e);
ast_visit::walk_expr(cx, e);
})
}
@@ -134,7 +134,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
// Note that statements get their attributes from
// the AST struct that they wrap (e.g. an item)
self.with_lint_attrs(s.id, s.attrs(), |cx| {
- run_early_pass!(cx, check_stmt, s);
+ run_early_passes!(cx, check_stmt, s);
cx.check_id(s.id);
});
// The visitor for the AST struct wrapped
@@ -145,7 +145,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
}
fn visit_fn(&mut self, fk: ast_visit::FnKind<'a>, span: Span, id: ast::NodeId) {
- run_early_pass!(self, check_fn, fk, span, id);
+ run_early_passes!(self, check_fn, fk, span, id);
self.check_id(id);
ast_visit::walk_fn(self, fk);
@@ -159,8 +159,8 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
}
fn visit_variant_data(&mut self, s: &'a ast::VariantData) {
- if let Some(ctor_hir_id) = s.ctor_id() {
- self.check_id(ctor_hir_id);
+ if let Some(ctor_node_id) = s.ctor_node_id() {
+ self.check_id(ctor_node_id);
}
ast_visit::walk_struct_def(self, s);
}
@@ -173,37 +173,37 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
fn visit_variant(&mut self, v: &'a ast::Variant) {
self.with_lint_attrs(v.id, &v.attrs, |cx| {
- run_early_pass!(cx, check_variant, v);
+ run_early_passes!(cx, check_variant, v);
ast_visit::walk_variant(cx, v);
})
}
fn visit_ty(&mut self, t: &'a ast::Ty) {
- run_early_pass!(self, check_ty, t);
+ run_early_passes!(self, check_ty, t);
self.check_id(t.id);
ast_visit::walk_ty(self, t);
}
fn visit_ident(&mut self, ident: Ident) {
- run_early_pass!(self, check_ident, ident);
+ run_early_passes!(self, check_ident, ident);
}
fn visit_local(&mut self, l: &'a ast::Local) {
self.with_lint_attrs(l.id, &l.attrs, |cx| {
- run_early_pass!(cx, check_local, l);
+ run_early_passes!(cx, check_local, l);
ast_visit::walk_local(cx, l);
})
}
fn visit_block(&mut self, b: &'a ast::Block) {
- run_early_pass!(self, check_block, b);
+ run_early_passes!(self, check_block, b);
self.check_id(b.id);
ast_visit::walk_block(self, b);
}
fn visit_arm(&mut self, a: &'a ast::Arm) {
self.with_lint_attrs(a.id, &a.attrs, |cx| {
- run_early_pass!(cx, check_arm, a);
+ run_early_passes!(cx, check_arm, a);
ast_visit::walk_arm(cx, a);
})
}
@@ -212,26 +212,29 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
// Explicitly check for lints associated with 'closure_id', since
// it does not have a corresponding AST node
match e.kind {
- ast::ExprKind::Closure(_, _, ast::Async::Yes { closure_id, .. }, ..)
+ ast::ExprKind::Closure(box ast::Closure {
+ asyncness: ast::Async::Yes { closure_id, .. },
+ ..
+ })
| ast::ExprKind::Async(_, closure_id, ..) => self.check_id(closure_id),
_ => {}
}
}
fn visit_generic_arg(&mut self, arg: &'a ast::GenericArg) {
- run_early_pass!(self, check_generic_arg, arg);
+ run_early_passes!(self, check_generic_arg, arg);
ast_visit::walk_generic_arg(self, arg);
}
fn visit_generic_param(&mut self, param: &'a ast::GenericParam) {
self.with_lint_attrs(param.id, &param.attrs, |cx| {
- run_early_pass!(cx, check_generic_param, param);
+ run_early_passes!(cx, check_generic_param, param);
ast_visit::walk_generic_param(cx, param);
});
}
fn visit_generics(&mut self, g: &'a ast::Generics) {
- run_early_pass!(self, check_generics, g);
+ run_early_passes!(self, check_generics, g);
ast_visit::walk_generics(self, g);
}
@@ -240,18 +243,18 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
}
fn visit_poly_trait_ref(&mut self, t: &'a ast::PolyTraitRef) {
- run_early_pass!(self, check_poly_trait_ref, t);
+ run_early_passes!(self, check_poly_trait_ref, t);
ast_visit::walk_poly_trait_ref(self, t);
}
fn visit_assoc_item(&mut self, item: &'a ast::AssocItem, ctxt: ast_visit::AssocCtxt) {
self.with_lint_attrs(item.id, &item.attrs, |cx| match ctxt {
ast_visit::AssocCtxt::Trait => {
- run_early_pass!(cx, check_trait_item, item);
+ run_early_passes!(cx, check_trait_item, item);
ast_visit::walk_assoc_item(cx, item, ctxt);
}
ast_visit::AssocCtxt::Impl => {
- run_early_pass!(cx, check_impl_item, item);
+ run_early_passes!(cx, check_impl_item, item);
ast_visit::walk_assoc_item(cx, item, ctxt);
}
});
@@ -272,51 +275,20 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
}
fn visit_attribute(&mut self, attr: &'a ast::Attribute) {
- run_early_pass!(self, check_attribute, attr);
+ run_early_passes!(self, check_attribute, attr);
}
fn visit_mac_def(&mut self, mac: &'a ast::MacroDef, id: ast::NodeId) {
- run_early_pass!(self, check_mac_def, mac);
+ run_early_passes!(self, check_mac_def, mac);
self.check_id(id);
}
fn visit_mac_call(&mut self, mac: &'a ast::MacCall) {
- run_early_pass!(self, check_mac, mac);
+ run_early_passes!(self, check_mac, mac);
ast_visit::walk_mac(self, mac);
}
}
-struct EarlyLintPassObjects<'a> {
- lints: &'a mut [EarlyLintPassObject],
-}
-
-#[allow(rustc::lint_pass_impl_without_macro)]
-impl LintPass for EarlyLintPassObjects<'_> {
- fn name(&self) -> &'static str {
- panic!()
- }
-}
-
-macro_rules! expand_early_lint_pass_impl_methods {
- ([$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
- $(fn $name(&mut self, context: &EarlyContext<'_>, $($param: $arg),*) {
- for obj in self.lints.iter_mut() {
- obj.$name(context, $($param),*);
- }
- })*
- )
-}
-
-macro_rules! early_lint_pass_impl {
- ([], [$($methods:tt)*]) => (
- impl EarlyLintPass for EarlyLintPassObjects<'_> {
- expand_early_lint_pass_impl_methods!([$($methods)*]);
- }
- )
-}
-
-crate::early_lint_methods!(early_lint_pass_impl, []);
-
/// Early lints work on different nodes - either on the crate root, or on freshly loaded modules.
/// This trait generalizes over those nodes.
pub trait EarlyCheckNode<'a>: Copy {
@@ -324,7 +296,7 @@ pub trait EarlyCheckNode<'a>: Copy {
fn attrs<'b>(self) -> &'b [ast::Attribute]
where
'a: 'b;
- fn check<'b>(self, cx: &mut EarlyContextAndPass<'b, impl EarlyLintPass>)
+ fn check<'b>(self, cx: &mut EarlyContextAndPasses<'b>)
where
'a: 'b;
}
@@ -339,13 +311,13 @@ impl<'a> EarlyCheckNode<'a> for &'a ast::Crate {
{
&self.attrs
}
- fn check<'b>(self, cx: &mut EarlyContextAndPass<'b, impl EarlyLintPass>)
+ fn check<'b>(self, cx: &mut EarlyContextAndPasses<'b>)
where
'a: 'b,
{
- run_early_pass!(cx, check_crate, self);
+ run_early_passes!(cx, check_crate, self);
ast_visit::walk_crate(cx, self);
- run_early_pass!(cx, check_crate_post, self);
+ run_early_passes!(cx, check_crate_post, self);
}
}
@@ -359,7 +331,7 @@ impl<'a> EarlyCheckNode<'a> for (ast::NodeId, &'a [ast::Attribute], &'a [P<ast::
{
self.1
}
- fn check<'b>(self, cx: &mut EarlyContextAndPass<'b, impl EarlyLintPass>)
+ fn check<'b>(self, cx: &mut EarlyContextAndPasses<'b>)
where
'a: 'b,
{
@@ -368,87 +340,36 @@ impl<'a> EarlyCheckNode<'a> for (ast::NodeId, &'a [ast::Attribute], &'a [P<ast::
}
}
-fn early_lint_node<'a>(
- sess: &Session,
- warn_about_weird_lints: bool,
- lint_store: &LintStore,
- registered_tools: &RegisteredTools,
- buffered: LintBuffer,
- pass: impl EarlyLintPass,
- check_node: impl EarlyCheckNode<'a>,
-) -> LintBuffer {
- let mut cx = EarlyContextAndPass {
- context: EarlyContext::new(
- sess,
- warn_about_weird_lints,
- lint_store,
- registered_tools,
- buffered,
- ),
- pass,
- };
-
- cx.with_lint_attrs(check_node.id(), check_node.attrs(), |cx| check_node.check(cx));
- cx.context.buffered
-}
-
pub fn check_ast_node<'a>(
sess: &Session,
pre_expansion: bool,
lint_store: &LintStore,
registered_tools: &RegisteredTools,
lint_buffer: Option<LintBuffer>,
- builtin_lints: impl EarlyLintPass,
+ builtin_lints: impl EarlyLintPass + 'static,
check_node: impl EarlyCheckNode<'a>,
) {
let passes =
if pre_expansion { &lint_store.pre_expansion_passes } else { &lint_store.early_passes };
- let mut passes: Vec<_> = passes.iter().map(|p| (p)()).collect();
- let mut buffered = lint_buffer.unwrap_or_default();
-
- if sess.opts.unstable_opts.no_interleave_lints {
- for (i, pass) in passes.iter_mut().enumerate() {
- buffered =
- sess.prof.verbose_generic_activity_with_arg("run_lint", pass.name()).run(|| {
- early_lint_node(
- sess,
- !pre_expansion && i == 0,
- lint_store,
- registered_tools,
- buffered,
- EarlyLintPassObjects { lints: slice::from_mut(pass) },
- check_node,
- )
- });
- }
- } else {
- buffered = early_lint_node(
+ let mut passes: Vec<EarlyLintPassObject> = passes.iter().map(|p| (p)()).collect();
+ passes.push(Box::new(builtin_lints));
+
+ let mut cx = EarlyContextAndPasses {
+ context: EarlyContext::new(
sess,
!pre_expansion,
lint_store,
registered_tools,
- buffered,
- builtin_lints,
- check_node,
- );
-
- if !passes.is_empty() {
- buffered = early_lint_node(
- sess,
- false,
- lint_store,
- registered_tools,
- buffered,
- EarlyLintPassObjects { lints: &mut passes[..] },
- check_node,
- );
- }
- }
+ lint_buffer.unwrap_or_default(),
+ ),
+ passes,
+ };
+ cx.with_lint_attrs(check_node.id(), check_node.attrs(), |cx| check_node.check(cx));
// All of the buffered lints should have been emitted at this point.
// If not, that means that we somehow buffered a lint for a node id
// that was not lint-checked (perhaps it doesn't exist?). This is a bug.
- for (id, lints) in buffered.map {
+ for (id, lints) in cx.context.buffered.map {
for early_lint in lints {
sess.delay_span_bug(
early_lint.span,
diff --git a/compiler/rustc_lint/src/errors.rs b/compiler/rustc_lint/src/errors.rs
index a49d1bdac..1a769893f 100644
--- a/compiler/rustc_lint/src/errors.rs
+++ b/compiler/rustc_lint/src/errors.rs
@@ -83,7 +83,7 @@ pub struct UnknownToolInScopedLint {
pub struct BuiltinEllpisisInclusiveRangePatterns {
#[primary_span]
pub span: Span,
- #[suggestion_short(code = "{replace}", applicability = "machine-applicable")]
+ #[suggestion(style = "short", code = "{replace}", applicability = "machine-applicable")]
pub suggestion: Span,
pub replace: String,
}
diff --git a/compiler/rustc_lint/src/for_loops_over_fallibles.rs b/compiler/rustc_lint/src/for_loops_over_fallibles.rs
index ed8d424e0..418785015 100644
--- a/compiler/rustc_lint/src/for_loops_over_fallibles.rs
+++ b/compiler/rustc_lint/src/for_loops_over_fallibles.rs
@@ -3,11 +3,9 @@ use crate::{LateContext, LateLintPass, LintContext};
use hir::{Expr, Pat};
use rustc_errors::{Applicability, DelayDm};
use rustc_hir as hir;
-use rustc_infer::traits::TraitEngine;
use rustc_infer::{infer::TyCtxtInferExt, traits::ObligationCause};
use rustc_middle::ty::{self, List};
use rustc_span::{sym, Span};
-use rustc_trait_selection::traits::TraitEngineExt;
declare_lint! {
/// The `for_loops_over_fallibles` lint checks for `for` loops over `Option` or `Result` values.
@@ -160,24 +158,19 @@ fn suggest_question_mark<'tcx>(
let ty = substs.type_at(0);
let infcx = cx.tcx.infer_ctxt().build();
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
-
let cause = ObligationCause::new(
span,
body_id.hir_id,
rustc_infer::traits::ObligationCauseCode::MiscObligation,
);
- fulfill_cx.register_bound(
+ let errors = rustc_trait_selection::traits::fully_solve_bound(
&infcx,
+ cause,
ty::ParamEnv::empty(),
// Erase any region vids from the type, which may not be resolved
infcx.tcx.erase_regions(ty),
into_iterator_did,
- cause,
);
- // Select all, including ambiguous predicates
- let errors = fulfill_cx.select_all_or_error(&infcx);
-
errors.is_empty()
}
diff --git a/compiler/rustc_lint/src/hidden_unicode_codepoints.rs b/compiler/rustc_lint/src/hidden_unicode_codepoints.rs
index 7e884e990..7106e75db 100644
--- a/compiler/rustc_lint/src/hidden_unicode_codepoints.rs
+++ b/compiler/rustc_lint/src/hidden_unicode_codepoints.rs
@@ -123,23 +123,22 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &ast::Expr) {
// byte strings are already handled well enough by `EscapeError::NonAsciiCharInByteString`
- let (text, span, padding) = match &expr.kind {
- ast::ExprKind::Lit(ast::Lit { token_lit, kind, span }) => {
+ match &expr.kind {
+ ast::ExprKind::Lit(token_lit) => {
let text = token_lit.symbol;
if !contains_text_flow_control_chars(text.as_str()) {
return;
}
- let padding = match kind {
+ let padding = match token_lit.kind {
// account for `"` or `'`
- ast::LitKind::Str(_, ast::StrStyle::Cooked) | ast::LitKind::Char(_) => 1,
+ ast::token::LitKind::Str | ast::token::LitKind::Char => 1,
// account for `r###"`
- ast::LitKind::Str(_, ast::StrStyle::Raw(val)) => *val as u32 + 2,
+ ast::token::LitKind::StrRaw(n) => n as u32 + 2,
_ => return,
};
- (text, span, padding)
+ self.lint_text_direction_codepoint(cx, text, expr.span, padding, true, "literal");
}
- _ => return,
+ _ => {}
};
- self.lint_text_direction_codepoint(cx, text, *span, padding, true, "literal");
}
}
diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs
index 11e4650cb..4f92661db 100644
--- a/compiler/rustc_lint/src/internal.rs
+++ b/compiler/rustc_lint/src/internal.rs
@@ -117,7 +117,7 @@ impl<'tcx> LateLintPass<'tcx> for TyTyKind {
fn check_path(
&mut self,
cx: &LateContext<'tcx>,
- path: &'tcx rustc_hir::Path<'tcx>,
+ path: &rustc_hir::Path<'tcx>,
_: rustc_hir::HirId,
) {
if let Some(segment) = path.segments.iter().nth_back(1)
@@ -272,11 +272,7 @@ fn gen_args(segment: &PathSegment<'_>) -> String {
.args
.iter()
.filter_map(|arg| {
- if let GenericArg::Lifetime(lt) = arg {
- Some(lt.name.ident().to_string())
- } else {
- None
- }
+ if let GenericArg::Lifetime(lt) = arg { Some(lt.ident.to_string()) } else { None }
})
.collect::<Vec<_>>();
@@ -466,8 +462,8 @@ impl LateLintPass<'_> for BadOptAccess {
let Some(attr) = cx.tcx.get_attr(field.did, sym::rustc_lint_opt_deny_field_access) &&
let Some(items) = attr.meta_item_list() &&
let Some(item) = items.first() &&
- let Some(literal) = item.literal() &&
- let ast::LitKind::Str(val, _) = literal.kind
+ let Some(lit) = item.lit() &&
+ let ast::LitKind::Str(val, _) = lit.kind
{
cx.struct_span_lint(BAD_OPT_ACCESS, expr.span, val.as_str(), |lint|
lint
diff --git a/compiler/rustc_lint/src/late.rs b/compiler/rustc_lint/src/late.rs
index 303fcb1a1..8a50cb1f1 100644
--- a/compiler/rustc_lint/src/late.rs
+++ b/compiler/rustc_lint/src/late.rs
@@ -23,12 +23,10 @@ use rustc_hir::intravisit as hir_visit;
use rustc_hir::intravisit::Visitor;
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{self, TyCtxt};
-use rustc_session::lint::LintPass;
use rustc_span::Span;
use std::any::Any;
use std::cell::Cell;
-use std::slice;
/// Extract the `LintStore` from the query context.
/// This function exists because we've erased `LintStore` as `dyn Any` in the context.
@@ -38,15 +36,17 @@ pub fn unerased_lint_store(tcx: TyCtxt<'_>) -> &LintStore {
}
macro_rules! lint_callback { ($cx:expr, $f:ident, $($args:expr),*) => ({
- $cx.pass.$f(&$cx.context, $($args),*);
+ for pass in $cx.passes.iter_mut() {
+ pass.$f(&$cx.context, $($args),*);
+ }
}) }
-struct LateContextAndPass<'tcx, T: LateLintPass<'tcx>> {
+struct LateContextAndPasses<'tcx> {
context: LateContext<'tcx>,
- pass: T,
+ passes: Vec<LateLintPassObject<'tcx>>,
}
-impl<'tcx, T: LateLintPass<'tcx>> LateContextAndPass<'tcx, T> {
+impl<'tcx> LateContextAndPasses<'tcx> {
/// Merge the lints specified by any lint attributes into the
/// current lint context, call the provided function, then reset the
/// lints in effect to their previous state.
@@ -82,7 +82,7 @@ impl<'tcx, T: LateLintPass<'tcx>> LateContextAndPass<'tcx, T> {
}
}
-impl<'tcx, T: LateLintPass<'tcx>> hir_visit::Visitor<'tcx> for LateContextAndPass<'tcx, T> {
+impl<'tcx> hir_visit::Visitor<'tcx> for LateContextAndPasses<'tcx> {
type NestedFilter = nested_filter::All;
/// Because lints are scoped lexically, we want to walk nested
@@ -205,7 +205,7 @@ impl<'tcx, T: LateLintPass<'tcx>> hir_visit::Visitor<'tcx> for LateContextAndPas
}
fn visit_variant(&mut self, v: &'tcx hir::Variant<'tcx>) {
- self.with_lint_attrs(v.id, |cx| {
+ self.with_lint_attrs(v.hir_id, |cx| {
lint_callback!(cx, check_variant, v);
hir_visit::walk_variant(cx, v);
})
@@ -292,7 +292,7 @@ impl<'tcx, T: LateLintPass<'tcx>> hir_visit::Visitor<'tcx> for LateContextAndPas
hir_visit::walk_lifetime(self, lt);
}
- fn visit_path(&mut self, p: &'tcx hir::Path<'tcx>, id: hir::HirId) {
+ fn visit_path(&mut self, p: &hir::Path<'tcx>, id: hir::HirId) {
lint_callback!(self, check_path, p, id);
hir_visit::walk_path(self, p);
}
@@ -302,57 +302,28 @@ impl<'tcx, T: LateLintPass<'tcx>> hir_visit::Visitor<'tcx> for LateContextAndPas
}
}
-struct LateLintPassObjects<'a, 'tcx> {
- lints: &'a mut [LateLintPassObject<'tcx>],
-}
-
-#[allow(rustc::lint_pass_impl_without_macro)]
-impl LintPass for LateLintPassObjects<'_, '_> {
- fn name(&self) -> &'static str {
- panic!()
- }
-}
-
-macro_rules! expand_late_lint_pass_impl_methods {
- ([$hir:tt], [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
- $(fn $name(&mut self, context: &LateContext<$hir>, $($param: $arg),*) {
- for obj in self.lints.iter_mut() {
- obj.$name(context, $($param),*);
- }
- })*
- )
-}
-
-macro_rules! late_lint_pass_impl {
- ([], [$hir:tt], $methods:tt) => {
- impl<$hir> LateLintPass<$hir> for LateLintPassObjects<'_, $hir> {
- expand_late_lint_pass_impl_methods!([$hir], $methods);
- }
- };
-}
-
-crate::late_lint_methods!(late_lint_pass_impl, [], ['tcx]);
-
-fn late_lint_mod_pass<'tcx, T: LateLintPass<'tcx>>(
+pub(super) fn late_lint_mod<'tcx, T: LateLintPass<'tcx> + 'tcx>(
tcx: TyCtxt<'tcx>,
module_def_id: LocalDefId,
- pass: T,
+ builtin_lints: T,
) {
- let effective_visibilities = &tcx.effective_visibilities(());
-
let context = LateContext {
tcx,
enclosing_body: None,
cached_typeck_results: Cell::new(None),
param_env: ty::ParamEnv::empty(),
- effective_visibilities,
+ effective_visibilities: &tcx.effective_visibilities(()),
lint_store: unerased_lint_store(tcx),
last_node_with_lint_attrs: tcx.hir().local_def_id_to_hir_id(module_def_id),
generics: None,
only_module: true,
};
- let mut cx = LateContextAndPass { context, pass };
+ let mut passes: Vec<_> =
+ unerased_lint_store(tcx).late_module_passes.iter().map(|pass| (pass)(tcx)).collect();
+ passes.push(Box::new(builtin_lints));
+
+ let mut cx = LateContextAndPasses { context, passes };
let (module, _span, hir_id) = tcx.hir().get_module(module_def_id);
cx.process_mod(module, hir_id);
@@ -365,46 +336,28 @@ fn late_lint_mod_pass<'tcx, T: LateLintPass<'tcx>>(
}
}
-pub fn late_lint_mod<'tcx, T: LateLintPass<'tcx>>(
- tcx: TyCtxt<'tcx>,
- module_def_id: LocalDefId,
- builtin_lints: T,
-) {
- if tcx.sess.opts.unstable_opts.no_interleave_lints {
- // These passes runs in late_lint_crate with -Z no_interleave_lints
- return;
- }
-
- late_lint_mod_pass(tcx, module_def_id, builtin_lints);
-
- let mut passes: Vec<_> =
- unerased_lint_store(tcx).late_module_passes.iter().map(|pass| (pass)(tcx)).collect();
-
- if !passes.is_empty() {
- late_lint_mod_pass(tcx, module_def_id, LateLintPassObjects { lints: &mut passes[..] });
- }
-}
-
-fn late_lint_pass_crate<'tcx, T: LateLintPass<'tcx>>(tcx: TyCtxt<'tcx>, pass: T) {
- let effective_visibilities = &tcx.effective_visibilities(());
-
+fn late_lint_crate<'tcx, T: LateLintPass<'tcx> + 'tcx>(tcx: TyCtxt<'tcx>, builtin_lints: T) {
let context = LateContext {
tcx,
enclosing_body: None,
cached_typeck_results: Cell::new(None),
param_env: ty::ParamEnv::empty(),
- effective_visibilities,
+ effective_visibilities: &tcx.effective_visibilities(()),
lint_store: unerased_lint_store(tcx),
last_node_with_lint_attrs: hir::CRATE_HIR_ID,
generics: None,
only_module: false,
};
- let mut cx = LateContextAndPass { context, pass };
+ let mut passes =
+ unerased_lint_store(tcx).late_passes.iter().map(|p| (p)(tcx)).collect::<Vec<_>>();
+ passes.push(Box::new(builtin_lints));
+
+ let mut cx = LateContextAndPasses { context, passes };
// Visit the whole crate.
cx.with_lint_attrs(hir::CRATE_HIR_ID, |cx| {
- // since the root module isn't visited as an item (because it isn't an
+ // Since the root module isn't visited as an item (because it isn't an
// item), warn for it here.
lint_callback!(cx, check_crate,);
tcx.hir().walk_toplevel_module(cx);
@@ -413,41 +366,8 @@ fn late_lint_pass_crate<'tcx, T: LateLintPass<'tcx>>(tcx: TyCtxt<'tcx>, pass: T)
})
}
-fn late_lint_crate<'tcx, T: LateLintPass<'tcx>>(tcx: TyCtxt<'tcx>, builtin_lints: T) {
- let mut passes =
- unerased_lint_store(tcx).late_passes.iter().map(|p| (p)(tcx)).collect::<Vec<_>>();
-
- if !tcx.sess.opts.unstable_opts.no_interleave_lints {
- if !passes.is_empty() {
- late_lint_pass_crate(tcx, LateLintPassObjects { lints: &mut passes[..] });
- }
-
- late_lint_pass_crate(tcx, builtin_lints);
- } else {
- for pass in &mut passes {
- tcx.sess.prof.verbose_generic_activity_with_arg("run_late_lint", pass.name()).run(
- || {
- late_lint_pass_crate(tcx, LateLintPassObjects { lints: slice::from_mut(pass) });
- },
- );
- }
-
- let mut passes: Vec<_> =
- unerased_lint_store(tcx).late_module_passes.iter().map(|pass| (pass)(tcx)).collect();
-
- for pass in &mut passes {
- tcx.sess
- .prof
- .verbose_generic_activity_with_arg("run_late_module_lint", pass.name())
- .run(|| {
- late_lint_pass_crate(tcx, LateLintPassObjects { lints: slice::from_mut(pass) });
- });
- }
- }
-}
-
/// Performs lint checking on a crate.
-pub fn check_crate<'tcx, T: LateLintPass<'tcx>>(
+pub fn check_crate<'tcx, T: LateLintPass<'tcx> + 'tcx>(
tcx: TyCtxt<'tcx>,
builtin_lints: impl FnOnce() -> T + Send,
) {
diff --git a/compiler/rustc_lint/src/let_underscore.rs b/compiler/rustc_lint/src/let_underscore.rs
index 78f355ec3..04d844d21 100644
--- a/compiler/rustc_lint/src/let_underscore.rs
+++ b/compiler/rustc_lint/src/let_underscore.rs
@@ -11,7 +11,8 @@ declare_lint! {
/// scope.
///
/// ### Example
- /// ```
+ ///
+ /// ```rust
/// struct SomeStruct;
/// impl Drop for SomeStruct {
/// fn drop(&mut self) {
@@ -56,7 +57,7 @@ declare_lint! {
/// of at end of scope, which is typically incorrect.
///
/// ### Example
- /// ```compile_fail
+ /// ```rust,compile_fail
/// use std::sync::{Arc, Mutex};
/// use std::thread;
/// let data = Arc::new(Mutex::new(0));
diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs
index db0a3419e..847c356b8 100644
--- a/compiler/rustc_lint/src/levels.rs
+++ b/compiler/rustc_lint/src/levels.rs
@@ -163,7 +163,7 @@ fn shallow_lint_levels_on(tcx: TyCtxt<'_>, owner: hir::OwnerId) -> ShallowLintLe
// Otherwise, we need to visit the attributes in source code order, so we fetch HIR and do
// a standard visit.
// FIXME(#102522) Just iterate on attrs once that iteration order matches HIR's.
- _ => match tcx.hir().expect_owner(owner) {
+ _ => match tcx.hir().owner(owner) {
hir::OwnerNode::Item(item) => levels.visit_item(item),
hir::OwnerNode::ForeignItem(item) => levels.visit_foreign_item(item),
hir::OwnerNode::TraitItem(item) => levels.visit_trait_item(item),
@@ -320,7 +320,7 @@ impl<'tcx> Visitor<'tcx> for LintLevelsBuilder<'_, LintLevelQueryMap<'tcx>> {
}
fn visit_variant(&mut self, v: &'tcx hir::Variant<'tcx>) {
- self.add_id(v.id);
+ self.add_id(v.hir_id);
intravisit::walk_variant(self, v);
}
@@ -392,7 +392,7 @@ impl<'tcx> Visitor<'tcx> for LintLevelsBuilder<'_, QueryMapExpectationsWrapper<'
}
fn visit_variant(&mut self, v: &'tcx hir::Variant<'tcx>) {
- self.add_id(v.id);
+ self.add_id(v.hir_id);
intravisit::walk_variant(self, v);
}
@@ -1073,6 +1073,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
pub(crate) fn struct_lint(
&self,
lint: &'static Lint,
diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs
index 5288fc542..771628553 100644
--- a/compiler/rustc_lint/src/lib.rs
+++ b/compiler/rustc_lint/src/lib.rs
@@ -36,6 +36,7 @@
#![feature(let_chains)]
#![feature(min_specialization)]
#![feature(never_type)]
+#![feature(rustc_attrs)]
#![recursion_limit = "256"]
#[macro_use]
@@ -48,6 +49,7 @@ extern crate tracing;
mod array_into_iter;
pub mod builtin;
mod context;
+mod deref_into_dyn_supertrait;
mod early;
mod enum_intrinsics_non_enums;
mod errors;
@@ -86,6 +88,7 @@ use rustc_span::Span;
use array_into_iter::ArrayIntoIter;
use builtin::*;
+use deref_into_dyn_supertrait::*;
use enum_intrinsics_non_enums::EnumIntrinsicsNonEnums;
use for_loops_over_fallibles::*;
use hidden_unicode_codepoints::*;
@@ -124,131 +127,117 @@ fn lint_mod(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
late::late_lint_mod(tcx, module_def_id, BuiltinCombinedModuleLateLintPass::new());
}
-macro_rules! pre_expansion_lint_passes {
- ($macro:path, $args:tt) => {
- $macro!($args, [KeywordIdents: KeywordIdents,]);
- };
-}
-
-macro_rules! early_lint_passes {
- ($macro:path, $args:tt) => {
- $macro!(
- $args,
- [
- UnusedParens: UnusedParens,
- UnusedBraces: UnusedBraces,
- UnusedImportBraces: UnusedImportBraces,
- UnsafeCode: UnsafeCode,
- SpecialModuleName: SpecialModuleName,
- AnonymousParameters: AnonymousParameters,
- EllipsisInclusiveRangePatterns: EllipsisInclusiveRangePatterns::default(),
- NonCamelCaseTypes: NonCamelCaseTypes,
- DeprecatedAttr: DeprecatedAttr::new(),
- WhileTrue: WhileTrue,
- NonAsciiIdents: NonAsciiIdents,
- HiddenUnicodeCodepoints: HiddenUnicodeCodepoints,
- IncompleteFeatures: IncompleteFeatures,
- RedundantSemicolons: RedundantSemicolons,
- UnusedDocComment: UnusedDocComment,
- UnexpectedCfgs: UnexpectedCfgs,
- ]
- );
- };
-}
-
-macro_rules! declare_combined_early_pass {
- ([$name:ident], $passes:tt) => (
- early_lint_methods!(declare_combined_early_lint_pass, [pub $name, $passes]);
- )
-}
-
-pre_expansion_lint_passes!(declare_combined_early_pass, [BuiltinCombinedPreExpansionLintPass]);
-early_lint_passes!(declare_combined_early_pass, [BuiltinCombinedEarlyLintPass]);
-
-macro_rules! late_lint_passes {
- ($macro:path, $args:tt) => {
- $macro!(
- $args,
- [
- // Tracks state across modules
- UnnameableTestItems: UnnameableTestItems::new(),
- // Tracks attributes of parents
- MissingDoc: MissingDoc::new(),
- // Builds a global list of all impls of `Debug`.
- // FIXME: Turn the computation of types which implement Debug into a query
- // and change this to a module lint pass
- MissingDebugImplementations: MissingDebugImplementations::default(),
- // Keeps a global list of foreign declarations.
- ClashingExternDeclarations: ClashingExternDeclarations::new(),
- ]
- );
- };
-}
-
-macro_rules! late_lint_mod_passes {
- ($macro:path, $args:tt) => {
- $macro!(
- $args,
- [
- ForLoopsOverFallibles: ForLoopsOverFallibles,
- HardwiredLints: HardwiredLints,
- ImproperCTypesDeclarations: ImproperCTypesDeclarations,
- ImproperCTypesDefinitions: ImproperCTypesDefinitions,
- VariantSizeDifferences: VariantSizeDifferences,
- BoxPointers: BoxPointers,
- PathStatements: PathStatements,
- LetUnderscore: LetUnderscore,
- // Depends on referenced function signatures in expressions
- UnusedResults: UnusedResults,
- NonUpperCaseGlobals: NonUpperCaseGlobals,
- NonShorthandFieldPatterns: NonShorthandFieldPatterns,
- UnusedAllocation: UnusedAllocation,
- // Depends on types used in type definitions
- MissingCopyImplementations: MissingCopyImplementations,
- // Depends on referenced function signatures in expressions
- MutableTransmutes: MutableTransmutes,
- TypeAliasBounds: TypeAliasBounds,
- TrivialConstraints: TrivialConstraints,
- TypeLimits: TypeLimits::new(),
- NonSnakeCase: NonSnakeCase,
- InvalidNoMangleItems: InvalidNoMangleItems,
- // Depends on effective visibilities
- UnreachablePub: UnreachablePub,
- ExplicitOutlivesRequirements: ExplicitOutlivesRequirements,
- InvalidValue: InvalidValue,
- DerefNullPtr: DerefNullPtr,
- // May Depend on constants elsewhere
- UnusedBrokenConst: UnusedBrokenConst,
- UnstableFeatures: UnstableFeatures,
- ArrayIntoIter: ArrayIntoIter::default(),
- DropTraitConstraints: DropTraitConstraints,
- TemporaryCStringAsPtr: TemporaryCStringAsPtr,
- NonPanicFmt: NonPanicFmt,
- NoopMethodCall: NoopMethodCall,
- EnumIntrinsicsNonEnums: EnumIntrinsicsNonEnums,
- InvalidAtomicOrdering: InvalidAtomicOrdering,
- NamedAsmLabels: NamedAsmLabels,
- OpaqueHiddenInferredBound: OpaqueHiddenInferredBound,
- ]
- );
- };
-}
-
-macro_rules! declare_combined_late_pass {
- ([$v:vis $name:ident], $passes:tt) => (
- late_lint_methods!(declare_combined_late_lint_pass, [$v $name, $passes], ['tcx]);
- )
-}
+early_lint_methods!(
+ declare_combined_early_lint_pass,
+ [
+ pub BuiltinCombinedPreExpansionLintPass,
+ [
+ KeywordIdents: KeywordIdents,
+ ]
+ ]
+);
+
+early_lint_methods!(
+ declare_combined_early_lint_pass,
+ [
+ pub BuiltinCombinedEarlyLintPass,
+ [
+ UnusedParens: UnusedParens,
+ UnusedBraces: UnusedBraces,
+ UnusedImportBraces: UnusedImportBraces,
+ UnsafeCode: UnsafeCode,
+ SpecialModuleName: SpecialModuleName,
+ AnonymousParameters: AnonymousParameters,
+ EllipsisInclusiveRangePatterns: EllipsisInclusiveRangePatterns::default(),
+ NonCamelCaseTypes: NonCamelCaseTypes,
+ DeprecatedAttr: DeprecatedAttr::new(),
+ WhileTrue: WhileTrue,
+ NonAsciiIdents: NonAsciiIdents,
+ HiddenUnicodeCodepoints: HiddenUnicodeCodepoints,
+ IncompleteFeatures: IncompleteFeatures,
+ RedundantSemicolons: RedundantSemicolons,
+ UnusedDocComment: UnusedDocComment,
+ UnexpectedCfgs: UnexpectedCfgs,
+ ]
+ ]
+);
// FIXME: Make a separate lint type which do not require typeck tables
-late_lint_passes!(declare_combined_late_pass, [pub BuiltinCombinedLateLintPass]);
-
-late_lint_mod_passes!(declare_combined_late_pass, [BuiltinCombinedModuleLateLintPass]);
-
-pub fn new_lint_store(no_interleave_lints: bool, internal_lints: bool) -> LintStore {
+late_lint_methods!(
+ declare_combined_late_lint_pass,
+ [
+ pub BuiltinCombinedLateLintPass,
+ [
+ // Tracks state across modules
+ UnnameableTestItems: UnnameableTestItems::new(),
+ // Tracks attributes of parents
+ MissingDoc: MissingDoc::new(),
+ // Builds a global list of all impls of `Debug`.
+ // FIXME: Turn the computation of types which implement Debug into a query
+ // and change this to a module lint pass
+ MissingDebugImplementations: MissingDebugImplementations::default(),
+ // Keeps a global list of foreign declarations.
+ ClashingExternDeclarations: ClashingExternDeclarations::new(),
+ ]
+ ],
+ ['tcx]
+);
+
+late_lint_methods!(
+ declare_combined_late_lint_pass,
+ [
+ BuiltinCombinedModuleLateLintPass,
+ [
+ ForLoopsOverFallibles: ForLoopsOverFallibles,
+ DerefIntoDynSupertrait: DerefIntoDynSupertrait,
+ HardwiredLints: HardwiredLints,
+ ImproperCTypesDeclarations: ImproperCTypesDeclarations,
+ ImproperCTypesDefinitions: ImproperCTypesDefinitions,
+ VariantSizeDifferences: VariantSizeDifferences,
+ BoxPointers: BoxPointers,
+ PathStatements: PathStatements,
+ LetUnderscore: LetUnderscore,
+ // Depends on referenced function signatures in expressions
+ UnusedResults: UnusedResults,
+ NonUpperCaseGlobals: NonUpperCaseGlobals,
+ NonShorthandFieldPatterns: NonShorthandFieldPatterns,
+ UnusedAllocation: UnusedAllocation,
+ // Depends on types used in type definitions
+ MissingCopyImplementations: MissingCopyImplementations,
+ // Depends on referenced function signatures in expressions
+ MutableTransmutes: MutableTransmutes,
+ TypeAliasBounds: TypeAliasBounds,
+ TrivialConstraints: TrivialConstraints,
+ TypeLimits: TypeLimits::new(),
+ NonSnakeCase: NonSnakeCase,
+ InvalidNoMangleItems: InvalidNoMangleItems,
+ // Depends on effective visibilities
+ UnreachablePub: UnreachablePub,
+ ExplicitOutlivesRequirements: ExplicitOutlivesRequirements,
+ InvalidValue: InvalidValue,
+ DerefNullPtr: DerefNullPtr,
+ // May Depend on constants elsewhere
+ UnusedBrokenConst: UnusedBrokenConst,
+ UnstableFeatures: UnstableFeatures,
+ UngatedAsyncFnTrackCaller: UngatedAsyncFnTrackCaller,
+ ArrayIntoIter: ArrayIntoIter::default(),
+ DropTraitConstraints: DropTraitConstraints,
+ TemporaryCStringAsPtr: TemporaryCStringAsPtr,
+ NonPanicFmt: NonPanicFmt,
+ NoopMethodCall: NoopMethodCall,
+ EnumIntrinsicsNonEnums: EnumIntrinsicsNonEnums,
+ InvalidAtomicOrdering: InvalidAtomicOrdering,
+ NamedAsmLabels: NamedAsmLabels,
+ OpaqueHiddenInferredBound: OpaqueHiddenInferredBound,
+ ]
+ ],
+ ['tcx]
+);
+
+pub fn new_lint_store(internal_lints: bool) -> LintStore {
let mut lint_store = LintStore::new();
- register_builtins(&mut lint_store, no_interleave_lints);
+ register_builtins(&mut lint_store);
if internal_lints {
register_internals(&mut lint_store);
}
@@ -259,54 +248,17 @@ pub fn new_lint_store(no_interleave_lints: bool, internal_lints: bool) -> LintSt
/// Tell the `LintStore` about all the built-in lints (the ones
/// defined in this crate and the ones defined in
/// `rustc_session::lint::builtin`).
-fn register_builtins(store: &mut LintStore, no_interleave_lints: bool) {
+fn register_builtins(store: &mut LintStore) {
macro_rules! add_lint_group {
($name:expr, $($lint:ident),*) => (
store.register_group(false, $name, None, vec![$(LintId::of($lint)),*]);
)
}
- macro_rules! register_early_pass {
- ($method:ident, $ty:ident, $constructor:expr) => {
- store.register_lints(&$ty::get_lints());
- store.$method(|| Box::new($constructor));
- };
- }
-
- macro_rules! register_late_pass {
- ($method:ident, $ty:ident, $constructor:expr) => {
- store.register_lints(&$ty::get_lints());
- store.$method(|_| Box::new($constructor));
- };
- }
-
- macro_rules! register_early_passes {
- ($method:ident, [$($passes:ident: $constructor:expr,)*]) => (
- $(
- register_early_pass!($method, $passes, $constructor);
- )*
- )
- }
-
- macro_rules! register_late_passes {
- ($method:ident, [$($passes:ident: $constructor:expr,)*]) => (
- $(
- register_late_pass!($method, $passes, $constructor);
- )*
- )
- }
-
- if no_interleave_lints {
- pre_expansion_lint_passes!(register_early_passes, register_pre_expansion_pass);
- early_lint_passes!(register_early_passes, register_early_pass);
- late_lint_passes!(register_late_passes, register_late_pass);
- late_lint_mod_passes!(register_late_passes, register_late_mod_pass);
- } else {
- store.register_lints(&BuiltinCombinedPreExpansionLintPass::get_lints());
- store.register_lints(&BuiltinCombinedEarlyLintPass::get_lints());
- store.register_lints(&BuiltinCombinedModuleLateLintPass::get_lints());
- store.register_lints(&BuiltinCombinedLateLintPass::get_lints());
- }
+ store.register_lints(&BuiltinCombinedPreExpansionLintPass::get_lints());
+ store.register_lints(&BuiltinCombinedEarlyLintPass::get_lints());
+ store.register_lints(&BuiltinCombinedModuleLateLintPass::get_lints());
+ store.register_lints(&BuiltinCombinedLateLintPass::get_lints());
add_lint_group!(
"nonstandard_style",
diff --git a/compiler/rustc_lint/src/non_fmt_panic.rs b/compiler/rustc_lint/src/non_fmt_panic.rs
index 6ad2e0294..c1820ac4d 100644
--- a/compiler/rustc_lint/src/non_fmt_panic.rs
+++ b/compiler/rustc_lint/src/non_fmt_panic.rs
@@ -5,7 +5,6 @@ use rustc_hir as hir;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty;
-use rustc_middle::ty::subst::InternalSubsts;
use rustc_parse_format::{ParseMode, Parser, Piece};
use rustc_session::lint::FutureIncompatibilityReason;
use rustc_span::edition::Edition;
@@ -148,22 +147,22 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
ty::Ref(_, r, _) if *r.kind() == ty::Str,
) || matches!(
ty.ty_adt_def(),
- Some(ty_def) if cx.tcx.is_diagnostic_item(sym::String, ty_def.did()),
+ Some(ty_def) if Some(ty_def.did()) == cx.tcx.lang_items().string(),
);
let infcx = cx.tcx.infer_ctxt().build();
let suggest_display = is_str
- || cx.tcx.get_diagnostic_item(sym::Display).map(|t| {
- infcx
- .type_implements_trait(t, ty, InternalSubsts::empty(), cx.param_env)
- .may_apply()
- }) == Some(true);
+ || cx
+ .tcx
+ .get_diagnostic_item(sym::Display)
+ .map(|t| infcx.type_implements_trait(t, [ty], cx.param_env).may_apply())
+ == Some(true);
let suggest_debug = !suggest_display
- && cx.tcx.get_diagnostic_item(sym::Debug).map(|t| {
- infcx
- .type_implements_trait(t, ty, InternalSubsts::empty(), cx.param_env)
- .may_apply()
- }) == Some(true);
+ && cx
+ .tcx
+ .get_diagnostic_item(sym::Debug)
+ .map(|t| infcx.type_implements_trait(t, [ty], cx.param_env).may_apply())
+ == Some(true);
let suggest_panic_any = !is_str && panic == sym::std_panic_macro;
diff --git a/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs b/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs
index 00bf287ba..03d6f4fd9 100644
--- a/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs
+++ b/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs
@@ -26,19 +26,23 @@ declare_lint! {
///
/// ### Example
///
- /// ```
+ /// ```rust
+ /// trait Duh {}
+ ///
+ /// impl Duh for i32 {}
+ ///
/// trait Trait {
- /// type Assoc: Send;
+ /// type Assoc: Duh;
/// }
///
/// struct Struct;
///
- /// impl Trait for Struct {
- /// type Assoc = i32;
+ /// impl<F: Duh> Trait for F {
+ /// type Assoc = F;
/// }
///
/// fn test() -> impl Trait<Assoc = impl Sized> {
- /// Struct
+ /// 42
/// }
/// ```
///
@@ -70,7 +74,7 @@ impl<'tcx> LateLintPass<'tcx> for OpaqueHiddenInferredBound {
// Liberate bound regions in the predicate since we
// don't actually care about lifetimes in this check.
let predicate = cx.tcx.liberate_late_bound_regions(def_id, pred.kind());
- let ty::PredicateKind::Projection(proj) = predicate else {
+ let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = predicate else {
continue;
};
// Only check types, since those are the only things that may
@@ -104,6 +108,7 @@ impl<'tcx> LateLintPass<'tcx> for OpaqueHiddenInferredBound {
// then we must've taken advantage of the hack in `project_and_unify_types` where
// we replace opaques with inference vars. Emit a warning!
if !infcx.predicate_must_hold_modulo_regions(&traits::Obligation::new(
+ cx.tcx,
traits::ObligationCause::dummy(),
cx.param_env,
assoc_pred,
@@ -111,12 +116,13 @@ impl<'tcx> LateLintPass<'tcx> for OpaqueHiddenInferredBound {
// If it's a trait bound and an opaque that doesn't satisfy it,
// then we can emit a suggestion to add the bound.
let add_bound = match (proj_term.kind(), assoc_pred.kind().skip_binder()) {
- (ty::Opaque(def_id, _), ty::PredicateKind::Trait(trait_pred)) => {
- Some(AddBound {
- suggest_span: cx.tcx.def_span(*def_id).shrink_to_hi(),
- trait_ref: trait_pred.print_modifiers_and_trait_path(),
- })
- }
+ (
+ ty::Opaque(def_id, _),
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)),
+ ) => Some(AddBound {
+ suggest_span: cx.tcx.def_span(*def_id).shrink_to_hi(),
+ trait_ref: trait_pred.print_modifiers_and_trait_path(),
+ }),
_ => None,
};
cx.emit_spanned_lint(
@@ -150,8 +156,9 @@ struct OpaqueHiddenInferredBoundLint<'tcx> {
}
#[derive(Subdiagnostic)]
-#[suggestion_verbose(
+#[suggestion(
lint_opaque_hidden_inferred_bound_sugg,
+ style = "verbose",
applicability = "machine-applicable",
code = " + {trait_ref}"
)]
diff --git a/compiler/rustc_lint/src/pass_by_value.rs b/compiler/rustc_lint/src/pass_by_value.rs
index 01bface71..0fa81b7e4 100644
--- a/compiler/rustc_lint/src/pass_by_value.rs
+++ b/compiler/rustc_lint/src/pass_by_value.rs
@@ -10,7 +10,7 @@ declare_tool_lint! {
/// The `rustc_pass_by_value` lint marks a type with `#[rustc_pass_by_value]` requiring it to
/// always be passed by value. This is usually used for types that are thin wrappers around
/// references, so there is no benefit to an extra layer of indirection. (Example: `Ty` which
- /// is a reference to an `Interned<TyS>`)
+ /// is a reference to an `Interned<TyKind>`)
pub rustc::PASS_BY_VALUE,
Warn,
"pass by reference of a type flagged as `#[rustc_pass_by_value]`",
@@ -78,7 +78,7 @@ fn gen_args(cx: &LateContext<'_>, segment: &PathSegment<'_>) -> String {
.args
.iter()
.map(|arg| match arg {
- GenericArg::Lifetime(lt) => lt.name.ident().to_string(),
+ GenericArg::Lifetime(lt) => lt.to_string(),
GenericArg::Type(ty) => {
cx.tcx.sess.source_map().span_to_snippet(ty.span).unwrap_or_else(|_| "_".into())
}
diff --git a/compiler/rustc_lint/src/passes.rs b/compiler/rustc_lint/src/passes.rs
index 1c6a057d1..2f5398613 100644
--- a/compiler/rustc_lint/src/passes.rs
+++ b/compiler/rustc_lint/src/passes.rs
@@ -1,7 +1,6 @@
use crate::context::{EarlyContext, LateContext};
use rustc_ast as ast;
-use rustc_data_structures::sync;
use rustc_hir as hir;
use rustc_session::lint::builtin::HardwiredLints;
use rustc_session::lint::LintPass;
@@ -44,7 +43,7 @@ macro_rules! late_lint_methods {
fn check_struct_def(a: &$hir hir::VariantData<$hir>);
fn check_field_def(a: &$hir hir::FieldDef<$hir>);
fn check_variant(a: &$hir hir::Variant<$hir>);
- fn check_path(a: &$hir hir::Path<$hir>, b: hir::HirId);
+ fn check_path(a: &hir::Path<$hir>, b: hir::HirId);
fn check_attribute(a: &$hir ast::Attribute);
/// Called when entering a syntax node that can have lint attributes such
@@ -66,16 +65,10 @@ macro_rules! late_lint_methods {
// FIXME: eliminate the duplication with `Visitor`. But this also
// contains a few lint-specific methods with no equivalent in `Visitor`.
-macro_rules! expand_lint_pass_methods {
- ($context:ty, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
- $(#[inline(always)] fn $name(&mut self, _: $context, $(_: $arg),*) {})*
- )
-}
-
macro_rules! declare_late_lint_pass {
- ([], [$hir:tt], [$($methods:tt)*]) => (
+ ([], [$hir:tt], [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
pub trait LateLintPass<$hir>: LintPass {
- expand_lint_pass_methods!(&LateContext<$hir>, [$($methods)*]);
+ $(#[inline(always)] fn $name(&mut self, _: &LateContext<$hir>, $(_: $arg),*) {})*
}
)
}
@@ -175,16 +168,10 @@ macro_rules! early_lint_methods {
)
}
-macro_rules! expand_early_lint_pass_methods {
- ($context:ty, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
- $(#[inline(always)] fn $name(&mut self, _: $context, $(_: $arg),*) {})*
- )
-}
-
macro_rules! declare_early_lint_pass {
- ([], [$($methods:tt)*]) => (
+ ([], [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
pub trait EarlyLintPass: LintPass {
- expand_early_lint_pass_methods!(&EarlyContext<'_>, [$($methods)*]);
+ $(#[inline(always)] fn $name(&mut self, _: &EarlyContext<'_>, $(_: $arg),*) {})*
}
)
}
@@ -243,5 +230,5 @@ macro_rules! declare_combined_early_lint_pass {
}
/// A lint pass boxed up as a trait object.
-pub type EarlyLintPassObject = Box<dyn EarlyLintPass + sync::Send + 'static>;
-pub type LateLintPassObject<'tcx> = Box<dyn LateLintPass<'tcx> + sync::Send + 'tcx>;
+pub type EarlyLintPassObject = Box<dyn EarlyLintPass + 'static>;
+pub type LateLintPassObject<'tcx> = Box<dyn LateLintPass<'tcx> + 'tcx>;
diff --git a/compiler/rustc_lint/src/traits.rs b/compiler/rustc_lint/src/traits.rs
index f22f38aa2..1b21c2dac 100644
--- a/compiler/rustc_lint/src/traits.rs
+++ b/compiler/rustc_lint/src/traits.rs
@@ -87,11 +87,12 @@ declare_lint_pass!(
impl<'tcx> LateLintPass<'tcx> for DropTraitConstraints {
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
+ use rustc_middle::ty::Clause;
use rustc_middle::ty::PredicateKind::*;
let predicates = cx.tcx.explicit_predicates_of(item.owner_id);
for &(predicate, span) in predicates.predicates {
- let Trait(trait_predicate) = predicate.kind().skip_binder() else {
+ let Clause(Clause::Trait(trait_predicate)) = predicate.kind().skip_binder() else {
continue
};
let def_id = trait_predicate.trait_ref.def_id;
diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs
index 37caab2da..297b509d4 100644
--- a/compiler/rustc_lint/src/types.rs
+++ b/compiler/rustc_lint/src/types.rs
@@ -12,7 +12,7 @@ use rustc_middle::ty::{self, AdtKind, DefIdTree, Ty, TyCtxt, TypeSuperVisitable,
use rustc_span::source_map;
use rustc_span::symbol::sym;
use rustc_span::{Span, Symbol};
-use rustc_target::abi::{Abi, WrappingRange};
+use rustc_target::abi::{Abi, Size, WrappingRange};
use rustc_target::abi::{Integer, TagEncoding, Variants};
use rustc_target::spec::abi::Abi as SpecAbi;
@@ -225,11 +225,11 @@ fn report_bin_hex_error(
cx: &LateContext<'_>,
expr: &hir::Expr<'_>,
ty: attr::IntType,
+ size: Size,
repr_str: String,
val: u128,
negative: bool,
) {
- let size = Integer::from_attr(&cx.tcx, ty).size();
cx.struct_span_lint(
OVERFLOWING_LITERALS,
expr.span,
@@ -352,6 +352,7 @@ fn lint_int_literal<'tcx>(
cx,
e,
attr::IntType::SignedInt(ty::ast_int_ty(t)),
+ Integer::from_int_ty(cx, t).size(),
repr_str,
v,
negative,
@@ -360,7 +361,7 @@ fn lint_int_literal<'tcx>(
}
if lint_overflowing_range_endpoint(cx, lit, v, max, e, t.name_str()) {
- // The overflowing literal lint was emited by `lint_overflowing_range_endpoint`.
+ // The overflowing literal lint was emitted by `lint_overflowing_range_endpoint`.
return;
}
@@ -429,7 +430,7 @@ fn lint_uint_literal<'tcx>(
}
}
if lint_overflowing_range_endpoint(cx, lit, lit_val, max, e, t.name_str()) {
- // The overflowing literal lint was emited by `lint_overflowing_range_endpoint`.
+ // The overflowing literal lint was emitted by `lint_overflowing_range_endpoint`.
return;
}
if let Some(repr_str) = get_bin_hex_repr(cx, lit) {
@@ -437,6 +438,7 @@ fn lint_uint_literal<'tcx>(
cx,
e,
attr::IntType::UnsignedInt(ty::ast_uint_ty(t)),
+ Integer::from_uint_ty(cx, t).size(),
repr_str,
lit_val,
false,
@@ -1195,35 +1197,30 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
}
fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
- struct ProhibitOpaqueTypes<'a, 'tcx> {
- cx: &'a LateContext<'tcx>,
- }
-
- impl<'a, 'tcx> ty::visit::TypeVisitor<'tcx> for ProhibitOpaqueTypes<'a, 'tcx> {
+ struct ProhibitOpaqueTypes;
+ impl<'tcx> ty::visit::TypeVisitor<'tcx> for ProhibitOpaqueTypes {
type BreakTy = Ty<'tcx>;
fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
- match ty.kind() {
- ty::Opaque(..) => ControlFlow::Break(ty),
- // Consider opaque types within projections FFI-safe if they do not normalize
- // to more opaque types.
- ty::Projection(..) => {
- let ty = self.cx.tcx.normalize_erasing_regions(self.cx.param_env, ty);
-
- // If `ty` is an opaque type directly then `super_visit_with` won't invoke
- // this function again.
- if ty.has_opaque_types() {
- self.visit_ty(ty)
- } else {
- ControlFlow::CONTINUE
- }
- }
- _ => ty.super_visit_with(self),
+ if !ty.has_opaque_types() {
+ return ControlFlow::CONTINUE;
+ }
+
+ if let ty::Opaque(..) = ty.kind() {
+ ControlFlow::Break(ty)
+ } else {
+ ty.super_visit_with(self)
}
}
}
- if let Some(ty) = ty.visit_with(&mut ProhibitOpaqueTypes { cx: self.cx }).break_value() {
+ if let Some(ty) = self
+ .cx
+ .tcx
+ .normalize_erasing_regions(self.cx.param_env, ty)
+ .visit_with(&mut ProhibitOpaqueTypes)
+ .break_value()
+ {
self.emit_ffi_unsafe_type_lint(ty, sp, fluent::lint_improper_ctypes_opaque, None);
true
} else {
@@ -1381,7 +1378,7 @@ impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
.map(|(variant, variant_layout)| {
// Subtract the size of the enum tag.
- let bytes = variant_layout.size().bytes().saturating_sub(tag_size);
+ let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
bytes
diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs
index 46706e498..b5db94f8c 100644
--- a/compiler/rustc_lint/src/unused.rs
+++ b/compiler/rustc_lint/src/unused.rs
@@ -9,10 +9,11 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::DefId;
use rustc_infer::traits::util::elaborate_predicates_with_span;
use rustc_middle::ty::adjustment;
-use rustc_middle::ty::{self, Ty};
+use rustc_middle::ty::{self, DefIdTree, Ty};
use rustc_span::symbol::Symbol;
use rustc_span::symbol::{kw, sym};
use rustc_span::{BytePos, Span};
+use std::iter;
declare_lint! {
/// The `unused_must_use` lint detects unused result of a type flagged as
@@ -87,40 +88,45 @@ declare_lint_pass!(UnusedResults => [UNUSED_MUST_USE, UNUSED_RESULTS]);
impl<'tcx> LateLintPass<'tcx> for UnusedResults {
fn check_stmt(&mut self, cx: &LateContext<'_>, s: &hir::Stmt<'_>) {
- let expr = match s.kind {
- hir::StmtKind::Semi(ref expr) => &**expr,
- _ => return,
- };
+ let hir::StmtKind::Semi(expr) = s.kind else { return; };
if let hir::ExprKind::Ret(..) = expr.kind {
return;
}
+ if let hir::ExprKind::Match(await_expr, _arms, hir::MatchSource::AwaitDesugar) = expr.kind
+ && let ty = cx.typeck_results().expr_ty(&await_expr)
+ && let ty::Opaque(future_def_id, _) = ty.kind()
+ && cx.tcx.ty_is_opaque_future(ty)
+ // FIXME: This also includes non-async fns that return `impl Future`.
+ && let async_fn_def_id = cx.tcx.parent(*future_def_id)
+ && check_must_use_def(
+ cx,
+ async_fn_def_id,
+ expr.span,
+ "output of future returned by ",
+ "",
+ )
+ {
+ // We have a bare `foo().await;` on an opaque type from an async function that was
+ // annotated with `#[must_use]`.
+ return;
+ }
+
let ty = cx.typeck_results().expr_ty(&expr);
- let type_permits_lack_of_use = check_must_use_ty(cx, ty, &expr, s.span, "", "", 1);
- let mut fn_warned = false;
- let mut op_warned = false;
- let maybe_def_id = match expr.kind {
- hir::ExprKind::Call(ref callee, _) => {
- match callee.kind {
- hir::ExprKind::Path(ref qpath) => {
- match cx.qpath_res(qpath, callee.hir_id) {
- Res::Def(DefKind::Fn | DefKind::AssocFn, def_id) => Some(def_id),
- // `Res::Local` if it was a closure, for which we
- // do not currently support must-use linting
- _ => None,
- }
- }
- _ => None,
- }
+ let must_use_result = is_ty_must_use(cx, ty, &expr, expr.span);
+ let type_lint_emitted_or_suppressed = match must_use_result {
+ Some(path) => {
+ emit_must_use_untranslated(cx, &path, "", "", 1);
+ true
}
- hir::ExprKind::MethodCall(..) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
- _ => None,
+ None => false,
};
- if let Some(def_id) = maybe_def_id {
- fn_warned = check_must_use_def(cx, def_id, s.span, "return value of ", "");
- } else if type_permits_lack_of_use {
+
+ let fn_warned = check_fn_must_use(cx, expr);
+
+ if !fn_warned && type_lint_emitted_or_suppressed {
// We don't warn about unused unit or uninhabited types.
// (See https://github.com/rust-lang/rust/issues/43806 for details.)
return;
@@ -154,6 +160,8 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
_ => None,
};
+ let mut op_warned = false;
+
if let Some(must_use_op) = must_use_op {
cx.struct_span_lint(UNUSED_MUST_USE, expr.span, fluent::lint_unused_op, |lint| {
lint.set_arg("op", must_use_op)
@@ -168,110 +176,240 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
op_warned = true;
}
- if !(type_permits_lack_of_use || fn_warned || op_warned) {
+ if !(type_lint_emitted_or_suppressed || fn_warned || op_warned) {
cx.struct_span_lint(UNUSED_RESULTS, s.span, fluent::lint_unused_result, |lint| {
lint.set_arg("ty", ty)
});
}
- // Returns whether an error has been emitted (and thus another does not need to be later).
- fn check_must_use_ty<'tcx>(
+ fn check_fn_must_use(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
+ let maybe_def_id = match expr.kind {
+ hir::ExprKind::Call(ref callee, _) => {
+ match callee.kind {
+ hir::ExprKind::Path(ref qpath) => {
+ match cx.qpath_res(qpath, callee.hir_id) {
+ Res::Def(DefKind::Fn | DefKind::AssocFn, def_id) => Some(def_id),
+ // `Res::Local` if it was a closure, for which we
+ // do not currently support must-use linting
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+ hir::ExprKind::MethodCall(..) => {
+ cx.typeck_results().type_dependent_def_id(expr.hir_id)
+ }
+ _ => None,
+ };
+ if let Some(def_id) = maybe_def_id {
+ check_must_use_def(cx, def_id, expr.span, "return value of ", "")
+ } else {
+ false
+ }
+ }
+
+ /// A path through a type to a must_use source. Contains useful info for the lint.
+ #[derive(Debug)]
+ enum MustUsePath {
+ /// Suppress must_use checking.
+ Suppressed,
+ /// The root of the normal must_use lint with an optional message.
+ Def(Span, DefId, Option<Symbol>),
+ Boxed(Box<Self>),
+ Opaque(Box<Self>),
+ TraitObject(Box<Self>),
+ TupleElement(Vec<(usize, Self)>),
+ Array(Box<Self>, u64),
+ /// The root of the unused_closures lint.
+ Closure(Span),
+ /// The root of the unused_generators lint.
+ Generator(Span),
+ }
+
+ #[instrument(skip(cx, expr), level = "debug", ret)]
+ fn is_ty_must_use<'tcx>(
cx: &LateContext<'tcx>,
ty: Ty<'tcx>,
expr: &hir::Expr<'_>,
span: Span,
- descr_pre: &str,
- descr_post: &str,
- plural_len: usize,
- ) -> bool {
+ ) -> Option<MustUsePath> {
if ty.is_unit()
- || cx.tcx.is_ty_uninhabited_from(
+ || !ty.is_inhabited_from(
+ cx.tcx,
cx.tcx.parent_module(expr.hir_id).to_def_id(),
- ty,
cx.param_env,
)
{
- return true;
+ return Some(MustUsePath::Suppressed);
}
- let plural_suffix = pluralize!(plural_len);
-
match *ty.kind() {
ty::Adt(..) if ty.is_box() => {
let boxed_ty = ty.boxed_ty();
- let descr_pre = &format!("{}boxed ", descr_pre);
- check_must_use_ty(cx, boxed_ty, expr, span, descr_pre, descr_post, plural_len)
+ is_ty_must_use(cx, boxed_ty, expr, span)
+ .map(|inner| MustUsePath::Boxed(Box::new(inner)))
}
- ty::Adt(def, _) => check_must_use_def(cx, def.did(), span, descr_pre, descr_post),
+ ty::Adt(def, _) => is_def_must_use(cx, def.did(), span),
ty::Opaque(def, _) => {
- let mut has_emitted = false;
- for obligation in elaborate_predicates_with_span(
+ elaborate_predicates_with_span(
cx.tcx,
cx.tcx.explicit_item_bounds(def).iter().cloned(),
- ) {
+ )
+ .filter_map(|obligation| {
// We only look at the `DefId`, so it is safe to skip the binder here.
- if let ty::PredicateKind::Trait(ref poly_trait_predicate) =
- obligation.predicate.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(
+ ref poly_trait_predicate,
+ )) = obligation.predicate.kind().skip_binder()
{
let def_id = poly_trait_predicate.trait_ref.def_id;
- let descr_pre =
- &format!("{}implementer{} of ", descr_pre, plural_suffix,);
- if check_must_use_def(cx, def_id, span, descr_pre, descr_post) {
- has_emitted = true;
- break;
- }
+
+ is_def_must_use(cx, def_id, span)
+ } else {
+ None
}
- }
- has_emitted
+ })
+ .map(|inner| MustUsePath::Opaque(Box::new(inner)))
+ .next()
}
- ty::Dynamic(binder, _, _) => {
- let mut has_emitted = false;
- for predicate in binder.iter() {
+ ty::Dynamic(binders, _, _) => binders
+ .iter()
+ .filter_map(|predicate| {
if let ty::ExistentialPredicate::Trait(ref trait_ref) =
predicate.skip_binder()
{
let def_id = trait_ref.def_id;
- let descr_post =
- &format!(" trait object{}{}", plural_suffix, descr_post,);
- if check_must_use_def(cx, def_id, span, descr_pre, descr_post) {
- has_emitted = true;
- break;
- }
+ is_def_must_use(cx, def_id, span)
+ } else {
+ None
}
- }
- has_emitted
- }
- ty::Tuple(ref tys) => {
- let mut has_emitted = false;
- let comps = if let hir::ExprKind::Tup(comps) = expr.kind {
- debug_assert_eq!(comps.len(), tys.len());
- comps
+ .map(|inner| MustUsePath::TraitObject(Box::new(inner)))
+ })
+ .next(),
+ ty::Tuple(tys) => {
+ let elem_exprs = if let hir::ExprKind::Tup(elem_exprs) = expr.kind {
+ debug_assert_eq!(elem_exprs.len(), tys.len());
+ elem_exprs
} else {
&[]
};
- for (i, ty) in tys.iter().enumerate() {
- let descr_post = &format!(" in tuple element {}", i);
- let e = comps.get(i).unwrap_or(expr);
- let span = e.span;
- if check_must_use_ty(cx, ty, e, span, descr_pre, descr_post, plural_len) {
- has_emitted = true;
- }
+
+ // Default to `expr`.
+ let elem_exprs = elem_exprs.iter().chain(iter::repeat(expr));
+
+ let nested_must_use = tys
+ .iter()
+ .zip(elem_exprs)
+ .enumerate()
+ .filter_map(|(i, (ty, expr))| {
+ is_ty_must_use(cx, ty, expr, expr.span).map(|path| (i, path))
+ })
+ .collect::<Vec<_>>();
+
+ if !nested_must_use.is_empty() {
+ Some(MustUsePath::TupleElement(nested_must_use))
+ } else {
+ None
}
- has_emitted
}
ty::Array(ty, len) => match len.try_eval_usize(cx.tcx, cx.param_env) {
// If the array is empty we don't lint, to avoid false positives
- Some(0) | None => false,
+ Some(0) | None => None,
// If the array is definitely non-empty, we can do `#[must_use]` checking.
- Some(n) => {
- let descr_pre = &format!("{}array{} of ", descr_pre, plural_suffix,);
- check_must_use_ty(cx, ty, expr, span, descr_pre, descr_post, n as usize + 1)
- }
+ Some(len) => is_ty_must_use(cx, ty, expr, span)
+ .map(|inner| MustUsePath::Array(Box::new(inner), len)),
},
- ty::Closure(..) => {
+ ty::Closure(..) => Some(MustUsePath::Closure(span)),
+ ty::Generator(def_id, ..) => {
+ // async fn should be treated as "implementor of `Future`"
+ let must_use = if cx.tcx.generator_is_async(def_id) {
+ let def_id = cx.tcx.lang_items().future_trait().unwrap();
+ is_def_must_use(cx, def_id, span)
+ .map(|inner| MustUsePath::Opaque(Box::new(inner)))
+ } else {
+ None
+ };
+ must_use.or(Some(MustUsePath::Generator(span)))
+ }
+ _ => None,
+ }
+ }
+
+ fn is_def_must_use(cx: &LateContext<'_>, def_id: DefId, span: Span) -> Option<MustUsePath> {
+ if let Some(attr) = cx.tcx.get_attr(def_id, sym::must_use) {
+ // check for #[must_use = "..."]
+ let reason = attr.value_str();
+ Some(MustUsePath::Def(span, def_id, reason))
+ } else {
+ None
+ }
+ }
+
+ // Returns whether further errors should be suppressed because either a lint has been emitted or the type should be ignored.
+ fn check_must_use_def(
+ cx: &LateContext<'_>,
+ def_id: DefId,
+ span: Span,
+ descr_pre_path: &str,
+ descr_post_path: &str,
+ ) -> bool {
+ is_def_must_use(cx, def_id, span)
+ .map(|must_use_path| {
+ emit_must_use_untranslated(
+ cx,
+ &must_use_path,
+ descr_pre_path,
+ descr_post_path,
+ 1,
+ )
+ })
+ .is_some()
+ }
+
+ #[instrument(skip(cx), level = "debug")]
+ fn emit_must_use_untranslated(
+ cx: &LateContext<'_>,
+ path: &MustUsePath,
+ descr_pre: &str,
+ descr_post: &str,
+ plural_len: usize,
+ ) {
+ let plural_suffix = pluralize!(plural_len);
+
+ match path {
+ MustUsePath::Suppressed => {}
+ MustUsePath::Boxed(path) => {
+ let descr_pre = &format!("{}boxed ", descr_pre);
+ emit_must_use_untranslated(cx, path, descr_pre, descr_post, plural_len);
+ }
+ MustUsePath::Opaque(path) => {
+ let descr_pre = &format!("{}implementer{} of ", descr_pre, plural_suffix);
+ emit_must_use_untranslated(cx, path, descr_pre, descr_post, plural_len);
+ }
+ MustUsePath::TraitObject(path) => {
+ let descr_post = &format!(" trait object{}{}", plural_suffix, descr_post);
+ emit_must_use_untranslated(cx, path, descr_pre, descr_post, plural_len);
+ }
+ MustUsePath::TupleElement(elems) => {
+ for (index, path) in elems {
+ let descr_post = &format!(" in tuple element {}", index);
+ emit_must_use_untranslated(cx, path, descr_pre, descr_post, plural_len);
+ }
+ }
+ MustUsePath::Array(path, len) => {
+ let descr_pre = &format!("{}array{} of ", descr_pre, plural_suffix);
+ emit_must_use_untranslated(
+ cx,
+ path,
+ descr_pre,
+ descr_post,
+ plural_len.saturating_add(usize::try_from(*len).unwrap_or(usize::MAX)),
+ );
+ }
+ MustUsePath::Closure(span) => {
cx.struct_span_lint(
UNUSED_MUST_USE,
- span,
+ *span,
fluent::lint_unused_closure,
|lint| {
// FIXME(davidtwco): this isn't properly translatable because of the
@@ -282,12 +420,11 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
.note(fluent::note)
},
);
- true
}
- ty::Generator(..) => {
+ MustUsePath::Generator(span) => {
cx.struct_span_lint(
UNUSED_MUST_USE,
- span,
+ *span,
fluent::lint_unused_generator,
|lint| {
// FIXME(davidtwco): this isn't properly translatable because of the
@@ -298,40 +435,20 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
.note(fluent::note)
},
);
- true
}
- _ => false,
- }
- }
-
- // Returns whether an error has been emitted (and thus another does not need to be later).
- // FIXME: Args desc_{pre,post}_path could be made lazy by taking Fn() -> &str, but this
- // would make calling it a big awkward. Could also take String (so args are moved), but
- // this would still require a copy into the format string, which would only be executed
- // when needed.
- fn check_must_use_def(
- cx: &LateContext<'_>,
- def_id: DefId,
- span: Span,
- descr_pre_path: &str,
- descr_post_path: &str,
- ) -> bool {
- if let Some(attr) = cx.tcx.get_attr(def_id, sym::must_use) {
- cx.struct_span_lint(UNUSED_MUST_USE, span, fluent::lint_unused_def, |lint| {
- // FIXME(davidtwco): this isn't properly translatable because of the pre/post
- // strings
- lint.set_arg("pre", descr_pre_path);
- lint.set_arg("post", descr_post_path);
- lint.set_arg("def", cx.tcx.def_path_str(def_id));
- // check for #[must_use = "..."]
- if let Some(note) = attr.value_str() {
- lint.note(note.as_str());
- }
- lint
- });
- true
- } else {
- false
+ MustUsePath::Def(span, def_id, reason) => {
+ cx.struct_span_lint(UNUSED_MUST_USE, *span, fluent::lint_unused_def, |lint| {
+ // FIXME(davidtwco): this isn't properly translatable because of the pre/post
+ // strings
+ lint.set_arg("pre", descr_pre);
+ lint.set_arg("post", descr_post);
+ lint.set_arg("def", cx.tcx.def_path_str(*def_id));
+ if let Some(note) = reason {
+ lint.note(note.as_str());
+ }
+ lint
+ });
+ }
}
}
}
@@ -516,17 +633,36 @@ trait UnusedDelimLint {
left_pos: Option<BytePos>,
right_pos: Option<BytePos>,
) {
+ // If `value` has `ExprKind::Err`, unused delim lint can be broken.
+ // For example, the following code caused ICE.
+ // This is because the `ExprKind::Call` in `value` has `ExprKind::Err` as its argument
+ // and this leads to wrong spans. #104897
+ //
+ // ```
+ // fn f(){(print!(á
+ // ```
+ use rustc_ast::visit::{walk_expr, Visitor};
+ struct ErrExprVisitor {
+ has_error: bool,
+ }
+ impl<'ast> Visitor<'ast> for ErrExprVisitor {
+ fn visit_expr(&mut self, expr: &'ast ast::Expr) {
+ if let ExprKind::Err = expr.kind {
+ self.has_error = true;
+ return;
+ }
+ walk_expr(self, expr)
+ }
+ }
+ let mut visitor = ErrExprVisitor { has_error: false };
+ visitor.visit_expr(value);
+ if visitor.has_error {
+ return;
+ }
let spans = match value.kind {
- ast::ExprKind::Block(ref block, None) if block.stmts.len() > 0 => {
- let start = block.stmts[0].span;
- let end = block.stmts[block.stmts.len() - 1].span;
- if let Some(start) = start.find_ancestor_inside(value.span)
- && let Some(end) = end.find_ancestor_inside(value.span)
- {
- Some((
- value.span.with_hi(start.lo()),
- value.span.with_lo(end.hi()),
- ))
+ ast::ExprKind::Block(ref block, None) if block.stmts.len() == 1 => {
+ if let Some(span) = block.stmts[0].span.find_ancestor_inside(value.span) {
+ Some((value.span.with_hi(span.lo()), value.span.with_lo(span.hi())))
} else {
None
}
@@ -565,10 +701,24 @@ trait UnusedDelimLint {
lint.set_arg("delim", Self::DELIM_STR);
lint.set_arg("item", msg);
if let Some((lo, hi)) = spans {
- let replacement = vec![
- (lo, if keep_space.0 { " ".into() } else { "".into() }),
- (hi, if keep_space.1 { " ".into() } else { "".into() }),
- ];
+ let sm = cx.sess().source_map();
+ let lo_replace =
+ if keep_space.0 &&
+ let Ok(snip) = sm.span_to_prev_source(lo) && !snip.ends_with(' ') {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
+
+ let hi_replace =
+ if keep_space.1 &&
+ let Ok(snip) = sm.span_to_next_source(hi) && !snip.starts_with(' ') {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
+
+ let replacement = vec![(lo, lo_replace), (hi, hi_replace)];
lint.multipart_suggestion(
fluent::suggestion,
replacement,
@@ -623,7 +773,7 @@ trait UnusedDelimLint {
ref call_or_other => {
let (args_to_check, ctx) = match *call_or_other {
Call(_, ref args) => (&args[..], UnusedDelimsCtx::FunctionArg),
- MethodCall(_, _, ref args, _) => (&args[..], UnusedDelimsCtx::MethodArg),
+ MethodCall(ref call) => (&call.args[..], UnusedDelimsCtx::MethodArg),
// actual catch-all arm
_ => {
return;
@@ -765,6 +915,7 @@ impl UnusedParens {
value: &ast::Pat,
avoid_or: bool,
avoid_mut: bool,
+ keep_space: (bool, bool),
) {
use ast::{BindingAnnotation, PatKind};
@@ -789,7 +940,7 @@ impl UnusedParens {
} else {
None
};
- self.emit_unused_delims(cx, value.span, spans, "pattern", (false, false));
+ self.emit_unused_delims(cx, value.span, spans, "pattern", keep_space);
}
}
}
@@ -798,7 +949,7 @@ impl EarlyLintPass for UnusedParens {
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
match e.kind {
ExprKind::Let(ref pat, _, _) | ExprKind::ForLoop(ref pat, ..) => {
- self.check_unused_parens_pat(cx, pat, false, false);
+ self.check_unused_parens_pat(cx, pat, false, false, (true, true));
}
// We ignore parens in cases like `if (((let Some(0) = Some(1))))` because we already
// handle a hard error for them during AST lowering in `lower_expr_mut`, but we still
@@ -842,6 +993,7 @@ impl EarlyLintPass for UnusedParens {
fn check_pat(&mut self, cx: &EarlyContext<'_>, p: &ast::Pat) {
use ast::{Mutability, PatKind::*};
+ let keep_space = (false, false);
match &p.kind {
// Do not lint on `(..)` as that will result in the other arms being useless.
Paren(_)
@@ -849,39 +1001,40 @@ impl EarlyLintPass for UnusedParens {
| Wild | Rest | Lit(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) => {},
// These are list-like patterns; parens can always be removed.
TupleStruct(_, _, ps) | Tuple(ps) | Slice(ps) | Or(ps) => for p in ps {
- self.check_unused_parens_pat(cx, p, false, false);
+ self.check_unused_parens_pat(cx, p, false, false, keep_space);
},
Struct(_, _, fps, _) => for f in fps {
- self.check_unused_parens_pat(cx, &f.pat, false, false);
+ self.check_unused_parens_pat(cx, &f.pat, false, false, keep_space);
},
// Avoid linting on `i @ (p0 | .. | pn)` and `box (p0 | .. | pn)`, #64106.
- Ident(.., Some(p)) | Box(p) => self.check_unused_parens_pat(cx, p, true, false),
+ Ident(.., Some(p)) | Box(p) => self.check_unused_parens_pat(cx, p, true, false, keep_space),
// Avoid linting on `&(mut x)` as `&mut x` has a different meaning, #55342.
// Also avoid linting on `& mut? (p0 | .. | pn)`, #64106.
- Ref(p, m) => self.check_unused_parens_pat(cx, p, true, *m == Mutability::Not),
+ Ref(p, m) => self.check_unused_parens_pat(cx, p, true, *m == Mutability::Not, keep_space),
}
}
fn check_stmt(&mut self, cx: &EarlyContext<'_>, s: &ast::Stmt) {
if let StmtKind::Local(ref local) = s.kind {
- self.check_unused_parens_pat(cx, &local.pat, true, false);
+ self.check_unused_parens_pat(cx, &local.pat, true, false, (false, false));
}
<Self as UnusedDelimLint>::check_stmt(self, cx, s)
}
fn check_param(&mut self, cx: &EarlyContext<'_>, param: &ast::Param) {
- self.check_unused_parens_pat(cx, &param.pat, true, false);
+ self.check_unused_parens_pat(cx, &param.pat, true, false, (false, false));
}
fn check_arm(&mut self, cx: &EarlyContext<'_>, arm: &ast::Arm) {
- self.check_unused_parens_pat(cx, &arm.pat, false, false);
+ self.check_unused_parens_pat(cx, &arm.pat, false, false, (false, false));
}
fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) {
if let ast::TyKind::Paren(r) = &ty.kind {
match &r.kind {
ast::TyKind::TraitObject(..) => {}
+ ast::TyKind::BareFn(b) if b.generic_params.len() > 0 => {}
ast::TyKind::ImplTrait(_, bounds) if bounds.len() > 1 => {}
ast::TyKind::Array(_, len) => {
self.check_unused_delims_expr(
@@ -1132,7 +1285,7 @@ impl UnusedImportBraces {
// Trigger the lint if the nested item is a non-self single item
let node_name = match items[0].0.kind {
- ast::UseTreeKind::Simple(rename, ..) => {
+ ast::UseTreeKind::Simple(rename) => {
let orig_ident = items[0].0.prefix.segments.last().unwrap().ident;
if orig_ident.name == kw::SelfLower {
return;
diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs
index 61ee467f5..82b837fba 100644
--- a/compiler/rustc_lint_defs/src/builtin.rs
+++ b/compiler/rustc_lint_defs/src/builtin.rs
@@ -605,7 +605,7 @@ declare_lint! {
///
/// ### Example
///
- /// ```
+ /// ```rust
/// #[warn(unused_tuple_struct_fields)]
/// struct S(i32, i32, i32);
/// let s = S(1, 2, 3);
@@ -1154,7 +1154,7 @@ declare_lint! {
///
/// ### Example
///
- /// ```compile_fail
+ /// ```rust,compile_fail
/// #[repr(packed)]
/// pub struct Foo {
/// field1: u64,
@@ -2615,7 +2615,7 @@ declare_lint! {
///
/// ### Example
///
- /// ```compile_fail
+ /// ```rust,compile_fail
/// # #![allow(unused)]
/// enum E {
/// A,
@@ -3330,7 +3330,6 @@ declare_lint_pass! {
UNUSED_TUPLE_STRUCT_FIELDS,
NON_EXHAUSTIVE_OMITTED_PATTERNS,
TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
- DEREF_INTO_DYN_SUPERTRAIT,
DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME,
DUPLICATE_MACRO_ATTRIBUTES,
SUSPICIOUS_AUTO_TRAIT_IMPLS,
@@ -3339,6 +3338,7 @@ declare_lint_pass! {
FFI_UNWIND_CALLS,
REPR_TRANSPARENT_EXTERNAL_PRIVATE_FIELDS,
NAMED_ARGUMENTS_USED_POSITIONALLY,
+ IMPLIED_BOUNDS_ENTAILMENT,
]
}
@@ -3833,51 +3833,6 @@ declare_lint! {
}
declare_lint! {
- /// The `deref_into_dyn_supertrait` lint is output whenever there is a use of the
- /// `Deref` implementation with a `dyn SuperTrait` type as `Output`.
- ///
- /// These implementations will become shadowed when the `trait_upcasting` feature is stabilized.
- /// The `deref` functions will no longer be called implicitly, so there might be behavior change.
- ///
- /// ### Example
- ///
- /// ```rust,compile_fail
- /// #![deny(deref_into_dyn_supertrait)]
- /// #![allow(dead_code)]
- ///
- /// use core::ops::Deref;
- ///
- /// trait A {}
- /// trait B: A {}
- /// impl<'a> Deref for dyn 'a + B {
- /// type Target = dyn A;
- /// fn deref(&self) -> &Self::Target {
- /// todo!()
- /// }
- /// }
- ///
- /// fn take_a(_: &dyn A) { }
- ///
- /// fn take_b(b: &dyn B) {
- /// take_a(b);
- /// }
- /// ```
- ///
- /// {{produces}}
- ///
- /// ### Explanation
- ///
- /// The dyn upcasting coercion feature adds new coercion rules, taking priority
- /// over certain other coercion rules, which will cause some behavior change.
- pub DEREF_INTO_DYN_SUPERTRAIT,
- Warn,
- "`Deref` implementation usage with a supertrait trait object for output might be shadowed in the future",
- @future_incompatible = FutureIncompatibleInfo {
- reference: "issue #89460 <https://github.com/rust-lang/rust/issues/89460>",
- };
-}
-
-declare_lint! {
/// The `duplicate_macro_attributes` lint detects when a `#[test]`-like built-in macro
/// attribute is duplicated on an item. This lint may trigger on `bench`, `cfg_eval`, `test`
/// and `test_case`.
@@ -3986,7 +3941,7 @@ declare_lint! {
///
/// ### Example
///
- /// ```
+ /// ```rust
/// #![allow(test_unstable_lint)]
/// ```
///
@@ -4071,3 +4026,44 @@ declare_lint! {
Warn,
"named arguments in format used positionally"
}
+
+declare_lint! {
+ /// The `implied_bounds_entailment` lint detects cases where the arguments of an impl method
+ /// have stronger implied bounds than those from the trait method it's implementing.
+ ///
+ /// ### Example
+ ///
+ /// ```rust,compile_fail
+ /// #![deny(implied_bounds_entailment)]
+ ///
+ /// trait Trait {
+ /// fn get<'s>(s: &'s str, _: &'static &'static ()) -> &'static str;
+ /// }
+ ///
+ /// impl Trait for () {
+ /// fn get<'s>(s: &'s str, _: &'static &'s ()) -> &'static str {
+ /// s
+ /// }
+ /// }
+ ///
+ /// let val = <() as Trait>::get(&String::from("blah blah blah"), &&());
+ /// println!("{}", val);
+ /// ```
+ ///
+ /// {{produces}}
+ ///
+ /// ### Explanation
+ ///
+ /// Neither the trait method, which provides no implied bounds about `'s`, nor the impl,
+ /// requires the main function to prove that 's: 'static, but the impl method is allowed
+ /// to assume that `'s: 'static` within its own body.
+ ///
+ /// This can be used to implement an unsound API if used incorrectly.
+ pub IMPLIED_BOUNDS_ENTAILMENT,
+ Warn,
+ "impl method assumes more implied bounds than its corresponding trait method",
+ @future_incompatible = FutureIncompatibleInfo {
+ reference: "issue #105572 <https://github.com/rust-lang/rust/issues/105572>",
+ reason: FutureIncompatibilityReason::FutureReleaseError,
+ };
+}
diff --git a/compiler/rustc_llvm/build.rs b/compiler/rustc_llvm/build.rs
index 28e092c1e..0b3c05734 100644
--- a/compiler/rustc_llvm/build.rs
+++ b/compiler/rustc_llvm/build.rs
@@ -222,6 +222,7 @@ fn main() {
.file("llvm-wrapper/RustWrapper.cpp")
.file("llvm-wrapper/ArchiveWrapper.cpp")
.file("llvm-wrapper/CoverageMappingWrapper.cpp")
+ .file("llvm-wrapper/SymbolWrapper.cpp")
.file("llvm-wrapper/Linker.cpp")
.cpp(true)
.cpp_link_stdlib(None) // we handle this below
@@ -237,18 +238,20 @@ fn main() {
if !is_crossed {
cmd.arg("--system-libs");
- } else if target.contains("windows-gnu") {
- println!("cargo:rustc-link-lib=shell32");
- println!("cargo:rustc-link-lib=uuid");
- } else if target.contains("netbsd") || target.contains("haiku") || target.contains("darwin") {
- println!("cargo:rustc-link-lib=z");
- } else if target.starts_with("arm")
+ }
+
+ if (target.starts_with("arm") && !target.contains("freebsd"))
|| target.starts_with("mips-")
|| target.starts_with("mipsel-")
|| target.starts_with("powerpc-")
{
// 32-bit targets need to link libatomic.
println!("cargo:rustc-link-lib=atomic");
+ } else if target.contains("windows-gnu") {
+ println!("cargo:rustc-link-lib=shell32");
+ println!("cargo:rustc-link-lib=uuid");
+ } else if target.contains("netbsd") || target.contains("haiku") || target.contains("darwin") {
+ println!("cargo:rustc-link-lib=z");
}
cmd.args(&components);
@@ -334,7 +337,7 @@ fn main() {
"c++"
} else if target.contains("netbsd") && llvm_static_stdcpp.is_some() {
// NetBSD uses a separate library when relocation is required
- "stdc++_pic"
+ "stdc++_p"
} else if llvm_use_libcxx.is_some() {
"c++"
} else {
diff --git a/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h b/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h
index 015c1c52b..727cfc441 100644
--- a/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h
+++ b/compiler/rustc_llvm/llvm-wrapper/LLVMWrapper.h
@@ -76,7 +76,6 @@ enum LLVMRustAttribute {
OptimizeNone = 24,
ReturnsTwice = 25,
ReadNone = 26,
- InaccessibleMemOnly = 27,
SanitizeHWAddress = 28,
WillReturn = 29,
StackProtectReq = 30,
diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
index 18d37d95a..1a3d458c3 100644
--- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
@@ -205,7 +205,12 @@ enum class LLVMRustCodeModel {
None,
};
-static Optional<CodeModel::Model> fromRust(LLVMRustCodeModel Model) {
+#if LLVM_VERSION_LT(16, 0)
+static Optional<CodeModel::Model>
+#else
+static std::optional<CodeModel::Model>
+#endif
+fromRust(LLVMRustCodeModel Model) {
switch (Model) {
case LLVMRustCodeModel::Tiny:
return CodeModel::Tiny;
@@ -627,14 +632,22 @@ LLVMRustOptimize(
bool DebugPassManager = false;
PassInstrumentationCallbacks PIC;
+#if LLVM_VERSION_LT(16, 0)
StandardInstrumentations SI(DebugPassManager);
+#else
+ StandardInstrumentations SI(TheModule->getContext(), DebugPassManager);
+#endif
SI.registerCallbacks(PIC);
if (LlvmSelfProfiler){
LLVMSelfProfileInitializeCallbacks(PIC,LlvmSelfProfiler,BeforePassCallback,AfterPassCallback);
}
+#if LLVM_VERSION_LT(16, 0)
Optional<PGOOptions> PGOOpt;
+#else
+ std::optional<PGOOptions> PGOOpt;
+#endif
if (PGOGenPath) {
assert(!PGOUsePath && !PGOSampleUsePath);
PGOOpt = PGOOptions(PGOGenPath, "", "", PGOOptions::IRInstr,
@@ -800,7 +813,7 @@ LLVMRustOptimize(
auto Plugin = PassPlugin::Load(PluginPath.str());
if (!Plugin) {
LLVMRustSetLastError(("Failed to load pass plugin" + PluginPath.str()).c_str());
- continue;
+ return LLVMRustResult::Failure;
}
Plugin->registerPassBuilderCallbacks(PB);
}
diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
index 6f36281af..3a748f389 100644
--- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
@@ -8,13 +8,18 @@
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/IntrinsicsARM.h"
#include "llvm/IR/Mangler.h"
+#if LLVM_VERSION_GE(16, 0)
+#include "llvm/Support/ModRef.h"
+#endif
#include "llvm/Object/Archive.h"
#include "llvm/Object/COFFImportFile.h"
#include "llvm/Object/ObjectFile.h"
#include "llvm/Pass.h"
#include "llvm/Bitcode/BitcodeWriter.h"
#include "llvm/Support/Signals.h"
+#if LLVM_VERSION_LT(16, 0)
#include "llvm/ADT/Optional.h"
+#endif
#include <iostream>
@@ -213,8 +218,6 @@ static Attribute::AttrKind fromRust(LLVMRustAttribute Kind) {
return Attribute::ReturnsTwice;
case ReadNone:
return Attribute::ReadNone;
- case InaccessibleMemOnly:
- return Attribute::InaccessibleMemOnly;
case SanitizeHWAddress:
return Attribute::SanitizeHWAddress;
case WillReturn:
@@ -379,6 +382,43 @@ extern "C" LLVMAttributeRef LLVMRustCreateAllocKindAttr(LLVMContextRef C, uint64
#endif
}
+// Simplified representation of `MemoryEffects` across the FFI boundary.
+//
+// Each variant corresponds to one of the static factory methods on `MemoryEffects`.
+enum class LLVMRustMemoryEffects {
+ None,
+ ReadOnly,
+ InaccessibleMemOnly,
+};
+
+extern "C" LLVMAttributeRef LLVMRustCreateMemoryEffectsAttr(LLVMContextRef C,
+ LLVMRustMemoryEffects Effects) {
+#if LLVM_VERSION_GE(16, 0)
+ switch (Effects) {
+ case LLVMRustMemoryEffects::None:
+ return wrap(Attribute::getWithMemoryEffects(*unwrap(C), MemoryEffects::none()));
+ case LLVMRustMemoryEffects::ReadOnly:
+ return wrap(Attribute::getWithMemoryEffects(*unwrap(C), MemoryEffects::readOnly()));
+ case LLVMRustMemoryEffects::InaccessibleMemOnly:
+ return wrap(Attribute::getWithMemoryEffects(*unwrap(C),
+ MemoryEffects::inaccessibleMemOnly()));
+ default:
+ report_fatal_error("bad MemoryEffects.");
+ }
+#else
+ switch (Effects) {
+ case LLVMRustMemoryEffects::None:
+ return wrap(Attribute::get(*unwrap(C), Attribute::ReadNone));
+ case LLVMRustMemoryEffects::ReadOnly:
+ return wrap(Attribute::get(*unwrap(C), Attribute::ReadOnly));
+ case LLVMRustMemoryEffects::InaccessibleMemOnly:
+ return wrap(Attribute::get(*unwrap(C), Attribute::InaccessibleMemOnly));
+ default:
+ report_fatal_error("bad MemoryEffects.");
+ }
+#endif
+}
+
// Enable a fast-math flag
//
// https://llvm.org/docs/LangRef.html#fast-math-flags
@@ -670,7 +710,11 @@ enum class LLVMRustChecksumKind {
SHA256,
};
+#if LLVM_VERSION_LT(16, 0)
static Optional<DIFile::ChecksumKind> fromRust(LLVMRustChecksumKind Kind) {
+#else
+static std::optional<DIFile::ChecksumKind> fromRust(LLVMRustChecksumKind Kind) {
+#endif
switch (Kind) {
case LLVMRustChecksumKind::None:
return None;
@@ -749,8 +793,18 @@ extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateFile(
const char *Filename, size_t FilenameLen,
const char *Directory, size_t DirectoryLen, LLVMRustChecksumKind CSKind,
const char *Checksum, size_t ChecksumLen) {
+
+#if LLVM_VERSION_LT(16, 0)
Optional<DIFile::ChecksumKind> llvmCSKind = fromRust(CSKind);
+#else
+ std::optional<DIFile::ChecksumKind> llvmCSKind = fromRust(CSKind);
+#endif
+
+#if LLVM_VERSION_LT(16, 0)
Optional<DIFile::ChecksumInfo<StringRef>> CSInfo{};
+#else
+ std::optional<DIFile::ChecksumInfo<StringRef>> CSInfo{};
+#endif
if (llvmCSKind)
CSInfo.emplace(*llvmCSKind, StringRef{Checksum, ChecksumLen});
return wrap(Builder->createFile(StringRef(Filename, FilenameLen),
@@ -998,8 +1052,9 @@ extern "C" LLVMValueRef LLVMRustDIBuilderInsertDeclareAtEnd(
extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateEnumerator(
LLVMRustDIBuilderRef Builder, const char *Name, size_t NameLen,
- int64_t Value, bool IsUnsigned) {
- return wrap(Builder->createEnumerator(StringRef(Name, NameLen), Value, IsUnsigned));
+ const uint64_t Value[2], unsigned SizeInBits, bool IsUnsigned) {
+ return wrap(Builder->createEnumerator(StringRef(Name, NameLen),
+ APSInt(APInt(SizeInBits, makeArrayRef(Value, 2)), IsUnsigned)));
}
extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateEnumerationType(
@@ -1073,6 +1128,10 @@ extern "C" uint64_t LLVMRustDIBuilderCreateOpPlusUconst() {
return dwarf::DW_OP_plus_uconst;
}
+extern "C" int64_t LLVMRustDIBuilderCreateOpLLVMFragment() {
+ return dwarf::DW_OP_LLVM_fragment;
+}
+
extern "C" void LLVMRustWriteTypeToString(LLVMTypeRef Ty, RustStringRef Str) {
RawRustStringOstream OS(Str);
unwrap<llvm::Type>(Ty)->print(OS);
@@ -1924,3 +1983,7 @@ extern "C" int32_t LLVMRustGetElementTypeArgIndex(LLVMValueRef CallSite) {
#endif
return -1;
}
+
+extern "C" bool LLVMRustIsBitcode(char *ptr, size_t len) {
+ return identify_magic(StringRef(ptr, len)) == file_magic::bitcode;
+}
diff --git a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp
new file mode 100644
index 000000000..974207e91
--- /dev/null
+++ b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp
@@ -0,0 +1,96 @@
+// Derived from code in LLVM, which is:
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+
+// Derived from:
+// * https://github.com/llvm/llvm-project/blob/8ef3e895ad8ab1724e2b87cabad1dacdc7a397a3/llvm/include/llvm/Object/ArchiveWriter.h
+// * https://github.com/llvm/llvm-project/blob/8ef3e895ad8ab1724e2b87cabad1dacdc7a397a3/llvm/lib/Object/ArchiveWriter.cpp
+
+#include "llvm/IR/LLVMContext.h"
+#include "llvm/Object/ObjectFile.h"
+#include "llvm/ADT/Optional.h"
+
+using namespace llvm;
+using namespace llvm::sys;
+using namespace llvm::object;
+
+static bool isArchiveSymbol(const object::BasicSymbolRef &S) {
+ Expected<uint32_t> SymFlagsOrErr = S.getFlags();
+ if (!SymFlagsOrErr)
+ // FIXME: Actually report errors helpfully.
+ report_fatal_error(SymFlagsOrErr.takeError());
+ if (*SymFlagsOrErr & object::SymbolRef::SF_FormatSpecific)
+ return false;
+ if (!(*SymFlagsOrErr & object::SymbolRef::SF_Global))
+ return false;
+ if (*SymFlagsOrErr & object::SymbolRef::SF_Undefined)
+ return false;
+ return true;
+}
+
+typedef void *(*LLVMRustGetSymbolsCallback)(void *, const char *);
+typedef void *(*LLVMRustGetSymbolsErrorCallback)(const char *);
+
+// Note: This is implemented in C++ instead of using the C api from Rust as IRObjectFile doesn't
+// implement getSymbolName, only printSymbolName, which is inaccessible from the C api.
+extern "C" void *LLVMRustGetSymbols(
+ char *BufPtr, size_t BufLen, void *State, LLVMRustGetSymbolsCallback Callback,
+ LLVMRustGetSymbolsErrorCallback ErrorCallback) {
+ std::unique_ptr<MemoryBuffer> Buf =
+ MemoryBuffer::getMemBuffer(StringRef(BufPtr, BufLen), StringRef("LLVMRustGetSymbolsObject"),
+ false);
+ SmallString<0> SymNameBuf;
+ raw_svector_ostream SymName(SymNameBuf);
+
+ // In the scenario when LLVMContext is populated SymbolicFile will contain a
+ // reference to it, thus SymbolicFile should be destroyed first.
+ LLVMContext Context;
+ std::unique_ptr<object::SymbolicFile> Obj;
+
+ const file_magic Type = identify_magic(Buf->getBuffer());
+ if (!object::SymbolicFile::isSymbolicFile(Type, &Context)) {
+ return 0;
+ }
+
+ if (Type == file_magic::bitcode) {
+ auto ObjOrErr = object::SymbolicFile::createSymbolicFile(
+ Buf->getMemBufferRef(), file_magic::bitcode, &Context);
+ if (!ObjOrErr) {
+ Error E = ObjOrErr.takeError();
+ SmallString<0> ErrorBuf;
+ raw_svector_ostream Error(ErrorBuf);
+ Error << E << '\0';
+ return ErrorCallback(Error.str().data());
+ }
+ Obj = std::move(*ObjOrErr);
+ } else {
+ auto ObjOrErr = object::SymbolicFile::createSymbolicFile(Buf->getMemBufferRef());
+ if (!ObjOrErr) {
+ Error E = ObjOrErr.takeError();
+ SmallString<0> ErrorBuf;
+ raw_svector_ostream Error(ErrorBuf);
+ Error << E << '\0';
+ return ErrorCallback(Error.str().data());
+ }
+ Obj = std::move(*ObjOrErr);
+ }
+
+
+ for (const object::BasicSymbolRef &S : Obj->symbols()) {
+ if (!isArchiveSymbol(S))
+ continue;
+ if (Error E = S.printName(SymName)) {
+ SmallString<0> ErrorBuf;
+ raw_svector_ostream Error(ErrorBuf);
+ Error << E << '\0';
+ return ErrorCallback(Error.str().data());
+ }
+ SymName << '\0';
+ if (void *E = Callback(State, SymNameBuf.str().data())) {
+ return E;
+ }
+ SymNameBuf.clear();
+ }
+ return 0;
+}
diff --git a/compiler/rustc_log/Cargo.toml b/compiler/rustc_log/Cargo.toml
index 1b2cde605..3c50827c1 100644
--- a/compiler/rustc_log/Cargo.toml
+++ b/compiler/rustc_log/Cargo.toml
@@ -4,7 +4,6 @@ version = "0.0.0"
edition = "2021"
[dependencies]
-atty = "0.2"
tracing = "0.1.28"
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
tracing-tree = "0.2.0"
diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs
index 458f5e87b..ddf29c488 100644
--- a/compiler/rustc_log/src/lib.rs
+++ b/compiler/rustc_log/src/lib.rs
@@ -40,10 +40,11 @@
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
+#![feature(is_terminal)]
use std::env::{self, VarError};
use std::fmt::{self, Display};
-use std::io;
+use std::io::{self, IsTerminal};
use tracing_subscriber::filter::{Directive, EnvFilter, LevelFilter};
use tracing_subscriber::layer::SubscriberExt;
@@ -93,11 +94,11 @@ pub fn init_env_logger(env: &str) -> Result<(), Error> {
}
pub fn stdout_isatty() -> bool {
- atty::is(atty::Stream::Stdout)
+ io::stdout().is_terminal()
}
pub fn stderr_isatty() -> bool {
- atty::is(atty::Stream::Stderr)
+ io::stderr().is_terminal()
}
#[derive(Debug)]
diff --git a/compiler/rustc_macros/src/diagnostics/diagnostic.rs b/compiler/rustc_macros/src/diagnostics/diagnostic.rs
index ef1985b96..684835d8c 100644
--- a/compiler/rustc_macros/src/diagnostics/diagnostic.rs
+++ b/compiler/rustc_macros/src/diagnostics/diagnostic.rs
@@ -5,6 +5,7 @@ use crate::diagnostics::error::{span_err, DiagnosticDeriveError};
use crate::diagnostics::utils::SetOnce;
use proc_macro2::TokenStream;
use quote::quote;
+use syn::spanned::Spanned;
use synstructure::Structure;
/// The central struct for constructing the `into_diagnostic` method from an annotated struct.
@@ -28,8 +29,8 @@ impl<'a> DiagnosticDerive<'a> {
let DiagnosticDerive { mut structure, mut builder } = self;
let implementation = builder.each_variant(&mut structure, |mut builder, variant| {
- let preamble = builder.preamble(&variant);
- let body = builder.body(&variant);
+ let preamble = builder.preamble(variant);
+ let body = builder.body(variant);
let diag = &builder.parent.diag;
let DiagnosticDeriveKind::Diagnostic { handler } = &builder.parent.kind else {
@@ -38,13 +39,24 @@ impl<'a> DiagnosticDerive<'a> {
let init = match builder.slug.value_ref() {
None => {
span_err(builder.span, "diagnostic slug not specified")
- .help(&format!(
+ .help(format!(
"specify the slug as the first argument to the `#[diag(...)]` \
attribute, such as `#[diag(hir_analysis_example_error)]`",
))
.emit();
return DiagnosticDeriveError::ErrorHandled.to_compile_error();
}
+ Some(slug) if let Some( Mismatch { slug_name, crate_name, slug_prefix }) = Mismatch::check(slug) => {
+ span_err(slug.span().unwrap(), "diagnostic slug and crate name do not match")
+ .note(format!(
+ "slug is `{slug_name}` but the crate name is `{crate_name}`"
+ ))
+ .help(format!(
+ "expected a slug starting with `{slug_prefix}_...`"
+ ))
+ .emit();
+ return DiagnosticDeriveError::ErrorHandled.to_compile_error();
+ }
Some(slug) => {
quote! {
let mut #diag = #handler.struct_diagnostic(rustc_errors::fluent::#slug);
@@ -69,6 +81,8 @@ impl<'a> DiagnosticDerive<'a> {
for @Self
where G: rustc_errors::EmissionGuarantee
{
+
+ #[track_caller]
fn into_diagnostic(
self,
#handler: &'__diagnostic_handler_sess rustc_errors::Handler
@@ -99,8 +113,8 @@ impl<'a> LintDiagnosticDerive<'a> {
let LintDiagnosticDerive { mut structure, mut builder } = self;
let implementation = builder.each_variant(&mut structure, |mut builder, variant| {
- let preamble = builder.preamble(&variant);
- let body = builder.body(&variant);
+ let preamble = builder.preamble(variant);
+ let body = builder.body(variant);
let diag = &builder.parent.diag;
let formatting_init = &builder.formatting_init;
@@ -114,25 +128,41 @@ impl<'a> LintDiagnosticDerive<'a> {
let msg = builder.each_variant(&mut structure, |mut builder, variant| {
// Collect the slug by generating the preamble.
- let _ = builder.preamble(&variant);
+ let _ = builder.preamble(variant);
match builder.slug.value_ref() {
None => {
span_err(builder.span, "diagnostic slug not specified")
- .help(&format!(
+ .help(format!(
"specify the slug as the first argument to the attribute, such as \
`#[diag(compiletest_example)]`",
))
.emit();
- return DiagnosticDeriveError::ErrorHandled.to_compile_error();
+ DiagnosticDeriveError::ErrorHandled.to_compile_error()
+ }
+ Some(slug) if let Some( Mismatch { slug_name, crate_name, slug_prefix }) = Mismatch::check(slug) => {
+ span_err(slug.span().unwrap(), "diagnostic slug and crate name do not match")
+ .note(format!(
+ "slug is `{slug_name}` but the crate name is `{crate_name}`"
+ ))
+ .help(format!(
+ "expected a slug starting with `{slug_prefix}_...`"
+ ))
+ .emit();
+ DiagnosticDeriveError::ErrorHandled.to_compile_error()
+ }
+ Some(slug) => {
+ quote! {
+ rustc_errors::fluent::#slug.into()
+ }
}
- Some(slug) => quote! { rustc_errors::fluent::#slug.into() },
}
});
let diag = &builder.diag;
structure.gen_impl(quote! {
gen impl<'__a> rustc_errors::DecorateLint<'__a, ()> for @Self {
+ #[track_caller]
fn decorate_lint<'__b>(
self,
#diag: &'__b mut rustc_errors::DiagnosticBuilder<'__a, ()>
@@ -148,3 +178,27 @@ impl<'a> LintDiagnosticDerive<'a> {
})
}
}
+
+struct Mismatch {
+ slug_name: String,
+ crate_name: String,
+ slug_prefix: String,
+}
+
+impl Mismatch {
+ /// Checks whether the slug starts with the crate name it's in.
+ fn check(slug: &syn::Path) -> Option<Mismatch> {
+ // If this is missing we're probably in a test, so bail.
+ let crate_name = std::env::var("CARGO_CRATE_NAME").ok()?;
+
+ // If we're not in a "rustc_" crate, bail.
+ let Some(("rustc", slug_prefix)) = crate_name.split_once("_") else { return None };
+
+ let slug_name = slug.segments.first()?.ident.to_string();
+ if !slug_name.starts_with(slug_prefix) {
+ Some(Mismatch { slug_name, slug_prefix: slug_prefix.to_string(), crate_name })
+ } else {
+ None
+ }
+ }
+}
diff --git a/compiler/rustc_macros/src/diagnostics/diagnostic_builder.rs b/compiler/rustc_macros/src/diagnostics/diagnostic_builder.rs
index 3ea83fd09..9f2ac5112 100644
--- a/compiler/rustc_macros/src/diagnostics/diagnostic_builder.rs
+++ b/compiler/rustc_macros/src/diagnostics/diagnostic_builder.rs
@@ -100,7 +100,7 @@ impl DiagnosticDeriveBuilder {
_ => variant.ast().ident.span().unwrap(),
};
let builder = DiagnosticDeriveVariantBuilder {
- parent: &self,
+ parent: self,
span,
field_map: build_field_mapping(variant),
formatting_init: TokenStream::new(),
@@ -211,7 +211,7 @@ impl<'a> DiagnosticDeriveVariantBuilder<'a> {
nested_iter.next();
}
Some(NestedMeta::Meta(Meta::NameValue { .. })) => {}
- Some(nested_attr) => throw_invalid_nested_attr!(attr, &nested_attr, |diag| diag
+ Some(nested_attr) => throw_invalid_nested_attr!(attr, nested_attr, |diag| diag
.help("a diagnostic slug is required as the first argument")),
None => throw_invalid_attr!(attr, &meta, |diag| diag
.help("a diagnostic slug is required as the first argument")),
@@ -227,13 +227,13 @@ impl<'a> DiagnosticDeriveVariantBuilder<'a> {
..
})) => (value, path),
NestedMeta::Meta(Meta::Path(_)) => {
- invalid_nested_attr(attr, &nested_attr)
+ invalid_nested_attr(attr, nested_attr)
.help("diagnostic slug must be the first argument")
.emit();
continue;
}
_ => {
- invalid_nested_attr(attr, &nested_attr).emit();
+ invalid_nested_attr(attr, nested_attr).emit();
continue;
}
};
@@ -251,7 +251,7 @@ impl<'a> DiagnosticDeriveVariantBuilder<'a> {
#diag.code(rustc_errors::DiagnosticId::Error(#code.to_string()));
});
}
- _ => invalid_nested_attr(attr, &nested_attr)
+ _ => invalid_nested_attr(attr, nested_attr)
.help("only `code` is a valid nested attributes following the slug")
.emit(),
}
@@ -427,9 +427,9 @@ impl<'a> DiagnosticDeriveVariantBuilder<'a> {
Ok(self.add_spanned_subdiagnostic(binding, &fn_ident, slug))
}
SubdiagnosticKind::Note | SubdiagnosticKind::Help | SubdiagnosticKind::Warn => {
- if type_matches_path(&info.ty, &["rustc_span", "Span"]) {
+ if type_matches_path(info.ty, &["rustc_span", "Span"]) {
Ok(self.add_spanned_subdiagnostic(binding, &fn_ident, slug))
- } else if type_is_unit(&info.ty) {
+ } else if type_is_unit(info.ty) {
Ok(self.add_subdiagnostic(&fn_ident, slug))
} else {
report_type_error(attr, "`Span` or `()`")?
diff --git a/compiler/rustc_macros/src/diagnostics/error.rs b/compiler/rustc_macros/src/diagnostics/error.rs
index 0b1ededa7..4612f54e4 100644
--- a/compiler/rustc_macros/src/diagnostics/error.rs
+++ b/compiler/rustc_macros/src/diagnostics/error.rs
@@ -84,7 +84,7 @@ pub(crate) fn invalid_attr(attr: &Attribute, meta: &Meta) -> Diagnostic {
}
}
-/// Emit a error diagnostic for an invalid attribute (optionally performing additional decoration
+/// Emit an error diagnostic for an invalid attribute (optionally performing additional decoration
/// using the `FnOnce` passed in `diag`) and return `Err(ErrorHandled)`.
///
/// For methods that return a `Result<_, DiagnosticDeriveError>`:
@@ -126,7 +126,7 @@ pub(crate) fn invalid_nested_attr(attr: &Attribute, nested: &NestedMeta) -> Diag
}
}
-/// Emit a error diagnostic for an invalid nested attribute (optionally performing additional
+/// Emit an error diagnostic for an invalid nested attribute (optionally performing additional
/// decoration using the `FnOnce` passed in `diag`) and return `Err(ErrorHandled)`.
///
/// For methods that return a `Result<_, DiagnosticDeriveError>`:
diff --git a/compiler/rustc_macros/src/diagnostics/mod.rs b/compiler/rustc_macros/src/diagnostics/mod.rs
index 860340b43..78df0cd1d 100644
--- a/compiler/rustc_macros/src/diagnostics/mod.rs
+++ b/compiler/rustc_macros/src/diagnostics/mod.rs
@@ -129,7 +129,7 @@ pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream {
/// }
///
/// #[derive(Subdiagnostic)]
-/// #[suggestion_verbose(parser::raw_identifier)]
+/// #[suggestion(style = "verbose",parser::raw_identifier)]
/// pub struct RawIdentifierSuggestion<'tcx> {
/// #[primary_span]
/// span: Span,
diff --git a/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs b/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs
index fa0ca5a52..446aebe4f 100644
--- a/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs
+++ b/compiler/rustc_macros/src/diagnostics/subdiagnostic.rs
@@ -409,7 +409,7 @@ impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> {
let mut code = None;
for nested_attr in list.nested.iter() {
let NestedMeta::Meta(ref meta) = nested_attr else {
- throw_invalid_nested_attr!(attr, &nested_attr);
+ throw_invalid_nested_attr!(attr, nested_attr);
};
let span = meta.span().unwrap();
@@ -427,7 +427,7 @@ impl<'parent, 'a> SubdiagnosticDeriveVariantBuilder<'parent, 'a> {
);
code.set_once((code_field, formatting_init), span);
}
- _ => throw_invalid_nested_attr!(attr, &nested_attr, |diag| {
+ _ => throw_invalid_nested_attr!(attr, nested_attr, |diag| {
diag.help("`code` is the only valid nested attribute")
}),
}
diff --git a/compiler/rustc_macros/src/diagnostics/utils.rs b/compiler/rustc_macros/src/diagnostics/utils.rs
index 374c795d0..da9023352 100644
--- a/compiler/rustc_macros/src/diagnostics/utils.rs
+++ b/compiler/rustc_macros/src/diagnostics/utils.rs
@@ -12,7 +12,7 @@ use syn::{spanned::Spanned, Attribute, Field, Meta, Type, TypeTuple};
use syn::{MetaList, MetaNameValue, NestedMeta, Path};
use synstructure::{BindingInfo, VariantInfo};
-use super::error::invalid_nested_attr;
+use super::error::{invalid_attr, invalid_nested_attr};
thread_local! {
pub static CODE_IDENT_COUNT: RefCell<u32> = RefCell::new(0);
@@ -80,7 +80,7 @@ fn report_error_if_not_applied_to_ty(
path: &[&str],
ty_name: &str,
) -> Result<(), DiagnosticDeriveError> {
- if !type_matches_path(&info.ty, path) {
+ if !type_matches_path(info.ty, path) {
report_type_error(attr, ty_name)?;
}
@@ -105,8 +105,8 @@ pub(crate) fn report_error_if_not_applied_to_span(
attr: &Attribute,
info: &FieldInfo<'_>,
) -> Result<(), DiagnosticDeriveError> {
- if !type_matches_path(&info.ty, &["rustc_span", "Span"])
- && !type_matches_path(&info.ty, &["rustc_errors", "MultiSpan"])
+ if !type_matches_path(info.ty, &["rustc_span", "Span"])
+ && !type_matches_path(info.ty, &["rustc_errors", "MultiSpan"])
{
report_type_error(attr, "`Span` or `MultiSpan`")?;
}
@@ -472,16 +472,13 @@ pub(super) fn build_suggestion_code(
}
/// Possible styles for suggestion subdiagnostics.
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, PartialEq)]
pub(super) enum SuggestionKind {
- /// `#[suggestion]`
Normal,
- /// `#[suggestion_short]`
Short,
- /// `#[suggestion_hidden]`
Hidden,
- /// `#[suggestion_verbose]`
Verbose,
+ ToolOnly,
}
impl FromStr for SuggestionKind {
@@ -489,15 +486,28 @@ impl FromStr for SuggestionKind {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
- "" => Ok(SuggestionKind::Normal),
- "_short" => Ok(SuggestionKind::Short),
- "_hidden" => Ok(SuggestionKind::Hidden),
- "_verbose" => Ok(SuggestionKind::Verbose),
+ "normal" => Ok(SuggestionKind::Normal),
+ "short" => Ok(SuggestionKind::Short),
+ "hidden" => Ok(SuggestionKind::Hidden),
+ "verbose" => Ok(SuggestionKind::Verbose),
+ "tool-only" => Ok(SuggestionKind::ToolOnly),
_ => Err(()),
}
}
}
+impl fmt::Display for SuggestionKind {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ SuggestionKind::Normal => write!(f, "normal"),
+ SuggestionKind::Short => write!(f, "short"),
+ SuggestionKind::Hidden => write!(f, "hidden"),
+ SuggestionKind::Verbose => write!(f, "verbose"),
+ SuggestionKind::ToolOnly => write!(f, "tool-only"),
+ }
+ }
+}
+
impl SuggestionKind {
pub fn to_suggestion_style(&self) -> TokenStream {
match self {
@@ -513,6 +523,19 @@ impl SuggestionKind {
SuggestionKind::Verbose => {
quote! { rustc_errors::SuggestionStyle::ShowAlways }
}
+ SuggestionKind::ToolOnly => {
+ quote! { rustc_errors::SuggestionStyle::CompletelyHidden }
+ }
+ }
+ }
+
+ fn from_suffix(s: &str) -> Option<Self> {
+ match s {
+ "" => Some(SuggestionKind::Normal),
+ "_short" => Some(SuggestionKind::Short),
+ "_hidden" => Some(SuggestionKind::Hidden),
+ "_verbose" => Some(SuggestionKind::Verbose),
+ _ => None,
}
}
}
@@ -565,25 +588,49 @@ impl SubdiagnosticKind {
let name = name.as_str();
let meta = attr.parse_meta()?;
+
let mut kind = match name {
"label" => SubdiagnosticKind::Label,
"note" => SubdiagnosticKind::Note,
"help" => SubdiagnosticKind::Help,
"warning" => SubdiagnosticKind::Warn,
_ => {
+ // Recover old `#[(multipart_)suggestion_*]` syntaxes
+ // FIXME(#100717): remove
if let Some(suggestion_kind) =
- name.strip_prefix("suggestion").and_then(|s| s.parse().ok())
+ name.strip_prefix("suggestion").and_then(SuggestionKind::from_suffix)
{
+ if suggestion_kind != SuggestionKind::Normal {
+ invalid_attr(attr, &meta)
+ .help(format!(
+ r#"Use `#[suggestion(..., style = "{}")]` instead"#,
+ suggestion_kind
+ ))
+ .emit();
+ }
+
SubdiagnosticKind::Suggestion {
- suggestion_kind,
+ suggestion_kind: SuggestionKind::Normal,
applicability: None,
code_field: new_code_ident(),
code_init: TokenStream::new(),
}
} else if let Some(suggestion_kind) =
- name.strip_prefix("multipart_suggestion").and_then(|s| s.parse().ok())
+ name.strip_prefix("multipart_suggestion").and_then(SuggestionKind::from_suffix)
{
- SubdiagnosticKind::MultipartSuggestion { suggestion_kind, applicability: None }
+ if suggestion_kind != SuggestionKind::Normal {
+ invalid_attr(attr, &meta)
+ .help(format!(
+ r#"Use `#[multipart_suggestion(..., style = "{}")]` instead"#,
+ suggestion_kind
+ ))
+ .emit();
+ }
+
+ SubdiagnosticKind::MultipartSuggestion {
+ suggestion_kind: SuggestionKind::Normal,
+ applicability: None,
+ }
} else {
throw_invalid_attr!(attr, &meta);
}
@@ -621,6 +668,7 @@ impl SubdiagnosticKind {
};
let mut code = None;
+ let mut suggestion_kind = None;
let mut nested_iter = nested.into_iter().peekable();
@@ -638,7 +686,7 @@ impl SubdiagnosticKind {
let meta = match nested_attr {
NestedMeta::Meta(ref meta) => meta,
NestedMeta::Lit(_) => {
- invalid_nested_attr(attr, &nested_attr).emit();
+ invalid_nested_attr(attr, nested_attr).emit();
continue;
}
};
@@ -650,7 +698,7 @@ impl SubdiagnosticKind {
let string_value = match meta {
Meta::NameValue(MetaNameValue { lit: syn::Lit::Str(value), .. }) => Some(value),
- Meta::Path(_) => throw_invalid_nested_attr!(attr, &nested_attr, |diag| {
+ Meta::Path(_) => throw_invalid_nested_attr!(attr, nested_attr, |diag| {
diag.help("a diagnostic slug must be the first argument to the attribute")
}),
_ => None,
@@ -672,7 +720,7 @@ impl SubdiagnosticKind {
| SubdiagnosticKind::MultipartSuggestion { ref mut applicability, .. },
) => {
let Some(value) = string_value else {
- invalid_nested_attr(attr, &nested_attr).emit();
+ invalid_nested_attr(attr, nested_attr).emit();
continue;
};
@@ -682,26 +730,56 @@ impl SubdiagnosticKind {
});
applicability.set_once(value, span);
}
+ (
+ "style",
+ SubdiagnosticKind::Suggestion { .. }
+ | SubdiagnosticKind::MultipartSuggestion { .. },
+ ) => {
+ let Some(value) = string_value else {
+ invalid_nested_attr(attr, nested_attr).emit();
+ continue;
+ };
+
+ let value = value.value().parse().unwrap_or_else(|()| {
+ span_err(value.span().unwrap(), "invalid suggestion style")
+ .help("valid styles are `normal`, `short`, `hidden`, `verbose` and `tool-only`")
+ .emit();
+ SuggestionKind::Normal
+ });
+
+ suggestion_kind.set_once(value, span);
+ }
// Invalid nested attribute
(_, SubdiagnosticKind::Suggestion { .. }) => {
- invalid_nested_attr(attr, &nested_attr)
- .help("only `code` and `applicability` are valid nested attributes")
+ invalid_nested_attr(attr, nested_attr)
+ .help(
+ "only `style`, `code` and `applicability` are valid nested attributes",
+ )
.emit();
}
(_, SubdiagnosticKind::MultipartSuggestion { .. }) => {
- invalid_nested_attr(attr, &nested_attr)
- .help("only `applicability` is a valid nested attributes")
+ invalid_nested_attr(attr, nested_attr)
+ .help("only `style` and `applicability` are valid nested attributes")
.emit()
}
_ => {
- invalid_nested_attr(attr, &nested_attr).emit();
+ invalid_nested_attr(attr, nested_attr).emit();
}
}
}
match kind {
- SubdiagnosticKind::Suggestion { ref code_field, ref mut code_init, .. } => {
+ SubdiagnosticKind::Suggestion {
+ ref code_field,
+ ref mut code_init,
+ suggestion_kind: ref mut kind_field,
+ ..
+ } => {
+ if let Some(kind) = suggestion_kind.value() {
+ *kind_field = kind;
+ }
+
*code_init = if let Some(init) = code.value() {
init
} else {
@@ -709,11 +787,17 @@ impl SubdiagnosticKind {
quote! { let #code_field = std::iter::empty(); }
};
}
+ SubdiagnosticKind::MultipartSuggestion {
+ suggestion_kind: ref mut kind_field, ..
+ } => {
+ if let Some(kind) = suggestion_kind.value() {
+ *kind_field = kind;
+ }
+ }
SubdiagnosticKind::Label
| SubdiagnosticKind::Note
| SubdiagnosticKind::Help
- | SubdiagnosticKind::Warn
- | SubdiagnosticKind::MultipartSuggestion { .. } => {}
+ | SubdiagnosticKind::Warn => {}
}
Ok(Some((kind, slug)))
@@ -746,5 +830,5 @@ pub(super) fn should_generate_set_arg(field: &Field) -> bool {
}
pub(super) fn is_doc_comment(attr: &Attribute) -> bool {
- attr.path.segments.last().unwrap().ident.to_string() == "doc"
+ attr.path.segments.last().unwrap().ident == "doc"
}
diff --git a/compiler/rustc_macros/src/lib.rs b/compiler/rustc_macros/src/lib.rs
index 36bda3e0f..ac916bb60 100644
--- a/compiler/rustc_macros/src/lib.rs
+++ b/compiler/rustc_macros/src/lib.rs
@@ -1,4 +1,5 @@
#![feature(allow_internal_unstable)]
+#![feature(if_let_guard)]
#![feature(never_type)]
#![feature(proc_macro_diagnostic)]
#![feature(proc_macro_span)]
@@ -47,7 +48,7 @@ pub fn symbols(input: TokenStream) -> TokenStream {
/// `u32::MAX`. You can also customize things like the `Debug` impl,
/// what traits are derived, and so forth via the macro.
#[proc_macro]
-#[allow_internal_unstable(step_trait, rustc_attrs, trusted_step)]
+#[allow_internal_unstable(step_trait, rustc_attrs, trusted_step, spec_option_partial_eq)]
pub fn newtype_index(input: TokenStream) -> TokenStream {
newtype::newtype(input)
}
diff --git a/compiler/rustc_macros/src/newtype.rs b/compiler/rustc_macros/src/newtype.rs
index 0a77b734c..fd3f52251 100644
--- a/compiler/rustc_macros/src/newtype.rs
+++ b/compiler/rustc_macros/src/newtype.rs
@@ -192,6 +192,30 @@ impl Parse for Newtype {
}
}
};
+ let spec_partial_eq_impl = if let Lit::Int(max) = &max {
+ if let Ok(max_val) = max.base10_parse::<u32>() {
+ quote! {
+ impl core::option::SpecOptionPartialEq for #name {
+ #[inline]
+ fn eq(l: &Option<Self>, r: &Option<Self>) -> bool {
+ if #max_val < u32::MAX {
+ l.map(|i| i.private).unwrap_or(#max_val+1) == r.map(|i| i.private).unwrap_or(#max_val+1)
+ } else {
+ match (l, r) {
+ (Some(l), Some(r)) => r == l,
+ (None, None) => true,
+ _ => false
+ }
+ }
+ }
+ }
+ }
+ } else {
+ quote! {}
+ }
+ } else {
+ quote! {}
+ };
Ok(Self(quote! {
#(#attrs)*
@@ -293,6 +317,8 @@ impl Parse for Newtype {
#step
+ #spec_partial_eq_impl
+
impl From<#name> for u32 {
#[inline]
fn from(v: #name) -> u32 {
diff --git a/compiler/rustc_macros/src/query.rs b/compiler/rustc_macros/src/query.rs
index 7cefafef9..789d83a0d 100644
--- a/compiler/rustc_macros/src/query.rs
+++ b/compiler/rustc_macros/src/query.rs
@@ -114,6 +114,9 @@ struct QueryModifiers {
/// Always remap the ParamEnv's constness before hashing.
remap_env_constness: Option<Ident>,
+
+ /// Generate a `feed` method to set the query's value from another query.
+ feedable: Option<Ident>,
}
fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
@@ -128,6 +131,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
let mut depth_limit = None;
let mut separate_provide_extern = None;
let mut remap_env_constness = None;
+ let mut feedable = None;
while !input.is_empty() {
let modifier: Ident = input.parse()?;
@@ -187,6 +191,8 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
try_insert!(separate_provide_extern = modifier);
} else if modifier == "remap_env_constness" {
try_insert!(remap_env_constness = modifier);
+ } else if modifier == "feedable" {
+ try_insert!(feedable = modifier);
} else {
return Err(Error::new(modifier.span(), "unknown query modifier"));
}
@@ -206,6 +212,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result<QueryModifiers> {
depth_limit,
separate_provide_extern,
remap_env_constness,
+ feedable,
})
}
@@ -296,6 +303,7 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
let mut query_stream = quote! {};
let mut query_description_stream = quote! {};
let mut query_cached_stream = quote! {};
+ let mut feedable_queries = quote! {};
for query in queries.0 {
let Query { name, arg, modifiers, .. } = &query;
@@ -350,6 +358,18 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
[#attribute_stream] fn #name(#arg) #result,
});
+ if modifiers.feedable.is_some() {
+ assert!(modifiers.anon.is_none(), "Query {name} cannot be both `feedable` and `anon`.");
+ assert!(
+ modifiers.eval_always.is_none(),
+ "Query {name} cannot be both `feedable` and `eval_always`."
+ );
+ feedable_queries.extend(quote! {
+ #(#doc_comments)*
+ [#attribute_stream] fn #name(#arg) #result,
+ });
+ }
+
add_query_desc_cached_impl(&query, &mut query_description_stream, &mut query_cached_stream);
}
@@ -363,7 +383,11 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream {
}
}
}
-
+ macro_rules! rustc_feedable_queries {
+ ( $macro:ident! ) => {
+ $macro!(#feedable_queries);
+ }
+ }
pub mod descs {
use super::*;
#query_description_stream
diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs
index cfcceecbe..efeaac8fe 100644
--- a/compiler/rustc_metadata/src/creader.rs
+++ b/compiler/rustc_metadata/src/creader.rs
@@ -1,8 +1,10 @@
//! Validates all used crates and extern libraries and loads their metadata
use crate::errors::{
- ConflictingGlobalAlloc, CrateNotPanicRuntime, GlobalAllocRequired, NoMultipleGlobalAlloc,
- NoPanicStrategy, NoTransitiveNeedsDep, NotProfilerRuntime, ProfilerBuiltinsNeedsCore,
+ AllocFuncRequired, ConflictingAllocErrorHandler, ConflictingGlobalAlloc, CrateNotPanicRuntime,
+ GlobalAllocRequired, MissingAllocErrorHandler, NoMultipleAllocErrorHandler,
+ NoMultipleGlobalAlloc, NoPanicStrategy, NoTransitiveNeedsDep, NotProfilerRuntime,
+ ProfilerBuiltinsNeedsCore,
};
use crate::locator::{CrateError, CrateLocator, CratePaths};
use crate::rmeta::{CrateDep, CrateMetadata, CrateNumMap, CrateRoot, MetadataBlob};
@@ -41,8 +43,13 @@ pub struct CStore {
/// This crate needs an allocator and either provides it itself, or finds it in a dependency.
/// If the above is true, then this field denotes the kind of the found allocator.
allocator_kind: Option<AllocatorKind>,
+ /// This crate needs an allocation error handler and either provides it itself, or finds it in a dependency.
+ /// If the above is true, then this field denotes the kind of the found allocator.
+ alloc_error_handler_kind: Option<AllocatorKind>,
/// This crate has a `#[global_allocator]` item.
has_global_allocator: bool,
+ /// This crate has a `#[alloc_error_handler]` item.
+ has_alloc_error_handler: bool,
/// This map is used to verify we get no hash conflicts between
/// `StableCrateId` values.
@@ -155,7 +162,7 @@ impl CStore {
pub(crate) fn iter_crate_data(&self) -> impl Iterator<Item = (CrateNum, &CrateMetadata)> {
self.metas
.iter_enumerated()
- .filter_map(|(cnum, data)| data.as_ref().map(|data| (cnum, &**data)))
+ .filter_map(|(cnum, data)| data.as_deref().map(|data| (cnum, data)))
}
fn push_dependencies_in_postorder(&self, deps: &mut Vec<CrateNum>, cnum: CrateNum) {
@@ -197,10 +204,18 @@ impl CStore {
self.allocator_kind
}
+ pub(crate) fn alloc_error_handler_kind(&self) -> Option<AllocatorKind> {
+ self.alloc_error_handler_kind
+ }
+
pub(crate) fn has_global_allocator(&self) -> bool {
self.has_global_allocator
}
+ pub(crate) fn has_alloc_error_handler(&self) -> bool {
+ self.has_alloc_error_handler
+ }
+
pub fn report_unused_deps(&self, tcx: TyCtxt<'_>) {
let json_unused_externs = tcx.sess.opts.json_unused_externs;
@@ -230,7 +245,7 @@ impl<'a> CrateLoader<'a> {
pub fn new(
sess: &'a Session,
metadata_loader: Box<MetadataLoaderDyn>,
- local_crate_name: &str,
+ local_crate_name: Symbol,
) -> Self {
let mut stable_crate_ids = FxHashMap::default();
stable_crate_ids.insert(sess.local_stable_crate_id(), LOCAL_CRATE);
@@ -238,7 +253,7 @@ impl<'a> CrateLoader<'a> {
CrateLoader {
sess,
metadata_loader,
- local_crate_name: Symbol::intern(local_crate_name),
+ local_crate_name,
cstore: CStore {
// We add an empty entry for LOCAL_CRATE (which maps to zero) in
// order to make array indices in `metas` match with the
@@ -247,7 +262,9 @@ impl<'a> CrateLoader<'a> {
metas: IndexVec::from_elem_n(None, 1),
injected_panic_runtime: None,
allocator_kind: None,
+ alloc_error_handler_kind: None,
has_global_allocator: false,
+ has_alloc_error_handler: false,
stable_crate_ids,
unused_externs: Vec::new(),
},
@@ -792,6 +809,13 @@ impl<'a> CrateLoader<'a> {
}
spans => !spans.is_empty(),
};
+ self.cstore.has_alloc_error_handler = match &*alloc_error_handler_spans(&self.sess, krate) {
+ [span1, span2, ..] => {
+ self.sess.emit_err(NoMultipleAllocErrorHandler { span2: *span2, span1: *span1 });
+ true
+ }
+ spans => !spans.is_empty(),
+ };
// Check to see if we actually need an allocator. This desire comes
// about through the `#![needs_allocator]` attribute and is typically
@@ -832,22 +856,48 @@ impl<'a> CrateLoader<'a> {
}
}
}
+ let mut alloc_error_handler =
+ self.cstore.has_alloc_error_handler.then(|| Symbol::intern("this crate"));
+ for (_, data) in self.cstore.iter_crate_data() {
+ if data.has_alloc_error_handler() {
+ match alloc_error_handler {
+ Some(other_crate) => {
+ self.sess.emit_err(ConflictingAllocErrorHandler {
+ crate_name: data.name(),
+ other_crate_name: other_crate,
+ });
+ }
+ None => alloc_error_handler = Some(data.name()),
+ }
+ }
+ }
if global_allocator.is_some() {
self.cstore.allocator_kind = Some(AllocatorKind::Global);
- return;
+ } else {
+ // Ok we haven't found a global allocator but we still need an
+ // allocator. At this point our allocator request is typically fulfilled
+ // by the standard library, denoted by the `#![default_lib_allocator]`
+ // attribute.
+ if !self.sess.contains_name(&krate.attrs, sym::default_lib_allocator)
+ && !self.cstore.iter_crate_data().any(|(_, data)| data.has_default_lib_allocator())
+ {
+ self.sess.emit_err(GlobalAllocRequired);
+ }
+ self.cstore.allocator_kind = Some(AllocatorKind::Default);
}
- // Ok we haven't found a global allocator but we still need an
- // allocator. At this point our allocator request is typically fulfilled
- // by the standard library, denoted by the `#![default_lib_allocator]`
- // attribute.
- if !self.sess.contains_name(&krate.attrs, sym::default_lib_allocator)
- && !self.cstore.iter_crate_data().any(|(_, data)| data.has_default_lib_allocator())
- {
- self.sess.emit_err(GlobalAllocRequired);
+ if alloc_error_handler.is_some() {
+ self.cstore.alloc_error_handler_kind = Some(AllocatorKind::Global);
+ } else {
+ // The alloc crate provides a default allocation error handler if
+ // one isn't specified.
+ if !self.sess.features_untracked().default_alloc_error_handler {
+ self.sess.emit_err(AllocFuncRequired);
+ self.sess.emit_note(MissingAllocErrorHandler);
+ }
+ self.cstore.alloc_error_handler_kind = Some(AllocatorKind::Default);
}
- self.cstore.allocator_kind = Some(AllocatorKind::Default);
}
fn inject_dependency_if(
@@ -950,7 +1000,7 @@ impl<'a> CrateLoader<'a> {
);
let name = match orig_name {
Some(orig_name) => {
- validate_crate_name(self.sess, orig_name.as_str(), Some(item.span));
+ validate_crate_name(self.sess, orig_name, Some(item.span));
orig_name
}
None => item.ident.name,
@@ -1023,3 +1073,26 @@ fn global_allocator_spans(sess: &Session, krate: &ast::Crate) -> Vec<Span> {
visit::walk_crate(&mut f, krate);
f.spans
}
+
+fn alloc_error_handler_spans(sess: &Session, krate: &ast::Crate) -> Vec<Span> {
+ struct Finder<'a> {
+ sess: &'a Session,
+ name: Symbol,
+ spans: Vec<Span>,
+ }
+ impl<'ast, 'a> visit::Visitor<'ast> for Finder<'a> {
+ fn visit_item(&mut self, item: &'ast ast::Item) {
+ if item.ident.name == self.name
+ && self.sess.contains_name(&item.attrs, sym::rustc_std_internal_symbol)
+ {
+ self.spans.push(item.span);
+ }
+ visit::walk_item(self, item)
+ }
+ }
+
+ let name = Symbol::intern(&AllocatorKind::Global.fn_name(sym::oom));
+ let mut f = Finder { sess, name, spans: Vec::new() };
+ visit::walk_crate(&mut f, krate);
+ f.spans
+}
diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs
index 7c387b9a9..6f7e6e09c 100644
--- a/compiler/rustc_metadata/src/errors.rs
+++ b/compiler/rustc_metadata/src/errors.rs
@@ -344,6 +344,16 @@ pub struct NoMultipleGlobalAlloc {
}
#[derive(Diagnostic)]
+#[diag(metadata_no_multiple_alloc_error_handler)]
+pub struct NoMultipleAllocErrorHandler {
+ #[primary_span]
+ #[label]
+ pub span2: Span,
+ #[label(metadata_prev_alloc_error_handler)]
+ pub span1: Span,
+}
+
+#[derive(Diagnostic)]
#[diag(metadata_conflicting_global_alloc)]
pub struct ConflictingGlobalAlloc {
pub crate_name: Symbol,
@@ -351,10 +361,25 @@ pub struct ConflictingGlobalAlloc {
}
#[derive(Diagnostic)]
+#[diag(metadata_conflicting_alloc_error_handler)]
+pub struct ConflictingAllocErrorHandler {
+ pub crate_name: Symbol,
+ pub other_crate_name: Symbol,
+}
+
+#[derive(Diagnostic)]
#[diag(metadata_global_alloc_required)]
pub struct GlobalAllocRequired;
#[derive(Diagnostic)]
+#[diag(metadata_alloc_func_required)]
+pub struct AllocFuncRequired;
+
+#[derive(Diagnostic)]
+#[diag(metadata_missing_alloc_error_handler)]
+pub struct MissingAllocErrorHandler;
+
+#[derive(Diagnostic)]
#[diag(metadata_no_transitive_needs_dep)]
pub struct NoTransitiveNeedsDep<'a> {
pub crate_name: Symbol,
@@ -578,6 +603,7 @@ pub struct InvalidMetadataFiles {
}
impl IntoDiagnostic<'_> for InvalidMetadataFiles {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'_ rustc_errors::Handler,
@@ -606,6 +632,7 @@ pub struct CannotFindCrate {
}
impl IntoDiagnostic<'_> for CannotFindCrate {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'_ rustc_errors::Handler,
@@ -665,6 +692,7 @@ pub struct CrateLocationUnknownType<'a> {
#[primary_span]
pub span: Span,
pub path: &'a Path,
+ pub crate_name: Symbol,
}
#[derive(Diagnostic)]
diff --git a/compiler/rustc_metadata/src/fs.rs b/compiler/rustc_metadata/src/fs.rs
index f360a5864..7601f6bd3 100644
--- a/compiler/rustc_metadata/src/fs.rs
+++ b/compiler/rustc_metadata/src/fs.rs
@@ -6,7 +6,7 @@ use crate::{encode_metadata, EncodedMetadata};
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::ty::TyCtxt;
-use rustc_session::config::{CrateType, OutputFilenames, OutputType};
+use rustc_session::config::{CrateType, OutputType};
use rustc_session::output::filename_for_metadata;
use rustc_session::Session;
use tempfile::Builder as TempFileBuilder;
@@ -22,9 +22,14 @@ pub const METADATA_FILENAME: &str = "lib.rmeta";
/// building an `.rlib` (stomping over one another), or writing an `.rmeta` into a
/// directory being searched for `extern crate` (observing an incomplete file).
/// The returned path is the temporary file containing the complete metadata.
-pub fn emit_metadata(sess: &Session, metadata: &[u8], tmpdir: &MaybeTempDir) -> PathBuf {
- let out_filename = tmpdir.as_ref().join(METADATA_FILENAME);
- let result = fs::write(&out_filename, metadata);
+pub fn emit_wrapper_file(
+ sess: &Session,
+ data: &[u8],
+ tmpdir: &MaybeTempDir,
+ name: &str,
+) -> PathBuf {
+ let out_filename = tmpdir.as_ref().join(name);
+ let result = fs::write(&out_filename, data);
if let Err(err) = result {
sess.emit_fatal(FailedWriteError { filename: out_filename, err });
@@ -33,10 +38,7 @@ pub fn emit_metadata(sess: &Session, metadata: &[u8], tmpdir: &MaybeTempDir) ->
out_filename
}
-pub fn encode_and_write_metadata(
- tcx: TyCtxt<'_>,
- outputs: &OutputFilenames,
-) -> (EncodedMetadata, bool) {
+pub fn encode_and_write_metadata(tcx: TyCtxt<'_>) -> (EncodedMetadata, bool) {
#[derive(PartialEq, Eq, PartialOrd, Ord)]
enum MetadataKind {
None,
@@ -59,7 +61,7 @@ pub fn encode_and_write_metadata(
.unwrap_or(MetadataKind::None);
let crate_name = tcx.crate_name(LOCAL_CRATE);
- let out_filename = filename_for_metadata(tcx.sess, crate_name.as_str(), outputs);
+ let out_filename = filename_for_metadata(tcx.sess, crate_name, tcx.output_filenames(()));
// To avoid races with another rustc process scanning the output directory,
// we need to write the file somewhere else and atomically move it to its
// final destination, with an `fs::rename` call. In order for the rename to
diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs
index 98cf6fef5..1987f88e6 100644
--- a/compiler/rustc_metadata/src/lib.rs
+++ b/compiler/rustc_metadata/src/lib.rs
@@ -41,6 +41,6 @@ pub mod errors;
pub mod fs;
pub mod locator;
-pub use fs::{emit_metadata, METADATA_FILENAME};
+pub use fs::{emit_wrapper_file, METADATA_FILENAME};
pub use native_libs::find_native_static_library;
pub use rmeta::{encode_metadata, EncodedMetadata, METADATA_HEADER};
diff --git a/compiler/rustc_metadata/src/locator.rs b/compiler/rustc_metadata/src/locator.rs
index 35f9ef92a..15546092e 100644
--- a/compiler/rustc_metadata/src/locator.rs
+++ b/compiler/rustc_metadata/src/locator.rs
@@ -707,6 +707,12 @@ impl<'a> CrateLocator<'a> {
loc.original().clone(),
));
}
+ if !loc.original().is_file() {
+ return Err(CrateError::ExternLocationNotFile(
+ self.crate_name,
+ loc.original().clone(),
+ ));
+ }
let Some(file) = loc.original().file_name().and_then(|s| s.to_str()) else {
return Err(CrateError::ExternLocationNotFile(
self.crate_name,
@@ -1020,11 +1026,10 @@ impl CrateError {
None => String::new(),
Some(r) => format!(" which `{}` depends on", r.name),
};
- // FIXME: There are no tests for CrateLocationUnknownType or LibFilenameForm
if !locator.crate_rejections.via_filename.is_empty() {
let mismatches = locator.crate_rejections.via_filename.iter();
for CrateMismatch { path, .. } in mismatches {
- sess.emit_err(CrateLocationUnknownType { span, path: &path });
+ sess.emit_err(CrateLocationUnknownType { span, path: &path, crate_name });
sess.emit_err(LibFilenameForm {
span,
dll_prefix: &locator.dll_prefix,
diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs
index 20a2e7829..1fd35adf1 100644
--- a/compiler/rustc_metadata/src/native_libs.rs
+++ b/compiler/rustc_metadata/src/native_libs.rs
@@ -29,11 +29,11 @@ use std::path::PathBuf;
pub fn find_native_static_library(
name: &str,
- verbatim: Option<bool>,
+ verbatim: bool,
search_paths: &[PathBuf],
sess: &Session,
) -> PathBuf {
- let formats = if verbatim.unwrap_or(false) {
+ let formats = if verbatim {
vec![("".into(), "".into())]
} else {
let os = (sess.target.staticlib_prefix.clone(), sess.target.staticlib_suffix.clone());
@@ -52,7 +52,7 @@ pub fn find_native_static_library(
}
}
- sess.emit_fatal(MissingNativeLibrary::new(name, verbatim.unwrap_or(false)));
+ sess.emit_fatal(MissingNativeLibrary::new(name, verbatim));
}
fn find_bundled_library(
@@ -66,7 +66,7 @@ fn find_bundled_library(
let NativeLibKind::Static { bundle: Some(true) | None, .. } = kind {
find_native_static_library(
name.unwrap().as_str(),
- verbatim,
+ verbatim.unwrap_or(false),
&sess.target_filesearch(PathKind::Native).search_path_dirs(),
sess,
).file_name().and_then(|s| s.to_str()).map(Symbol::intern)
@@ -311,10 +311,7 @@ impl<'tcx> Collector<'tcx> {
sess.emit_err(BundleNeedsStatic { span });
}
- ("verbatim", _) => {
- report_unstable_modifier!(native_link_modifiers_verbatim);
- assign_modifier(&mut verbatim)
- }
+ ("verbatim", _) => assign_modifier(&mut verbatim),
("whole-archive", Some(NativeLibKind::Static { whole_archive, .. })) => {
assign_modifier(whole_archive)
diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs
index 691e3d0f8..af7b0793a 100644
--- a/compiler/rustc_metadata/src/rmeta/decoder.rs
+++ b/compiler/rustc_metadata/src/rmeta/decoder.rs
@@ -11,11 +11,10 @@ use rustc_data_structures::sync::{Lock, LockGuard, Lrc, OnceCell};
use rustc_data_structures::unhash::UnhashMap;
use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind};
use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro};
-use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
+use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash};
use rustc_hir::diagnostic_items::DiagnosticItems;
-use rustc_hir::lang_items;
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::metadata::ModChild;
use rustc_middle::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo};
@@ -30,17 +29,16 @@ use rustc_session::cstore::{
CrateSource, ExternCrate, ForeignModule, LinkagePreference, NativeLib,
};
use rustc_session::Session;
-use rustc_span::hygiene::{ExpnIndex, MacroKind};
+use rustc_span::hygiene::ExpnIndex;
use rustc_span::source_map::{respan, Spanned};
-use rustc_span::symbol::{kw, sym, Ident, Symbol};
+use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_span::{self, BytePos, ExpnId, Pos, Span, SyntaxContext, DUMMY_SP};
use proc_macro::bridge::client::ProcMacro;
-use std::io;
use std::iter::TrustedLen;
-use std::mem;
use std::num::NonZeroUsize;
use std::path::Path;
+use std::{io, iter, mem};
pub(super) use cstore_impl::provide;
pub use cstore_impl::provide_extern;
@@ -69,10 +67,10 @@ impl std::ops::Deref for MetadataBlob {
}
}
-// A map from external crate numbers (as decoded from some crate file) to
-// local crate numbers (as generated during this session). Each external
-// crate may refer to types in other external crates, and each has their
-// own crate numbers.
+/// A map from external crate numbers (as decoded from some crate file) to
+/// local crate numbers (as generated during this session). Each external
+/// crate may refer to types in other external crates, and each has their
+/// own crate numbers.
pub(crate) type CrateNumMap = IndexVec<CrateNum, CrateNum>;
pub(crate) struct CrateMetadata {
@@ -645,12 +643,6 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Symbol {
}
}
-impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [ty::abstract_const::Node<'tcx>] {
- fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
- ty::codec::RefDecodable::decode(d)
- }
-}
-
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self {
ty::codec::RefDecodable::decode(d)
@@ -867,12 +859,12 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
let variant_did =
if adt_kind == ty::AdtKind::Enum { Some(self.local_def_id(index)) } else { None };
- let ctor_did = data.ctor.map(|index| self.local_def_id(index));
+ let ctor = data.ctor.map(|(kind, index)| (kind, self.local_def_id(index)));
ty::VariantDef::new(
self.item_name(index),
variant_did,
- ctor_did,
+ ctor,
data.discr,
self.root
.tables
@@ -886,7 +878,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
vis: self.get_visibility(index),
})
.collect(),
- data.ctor_kind,
adt_kind,
parent_did,
false,
@@ -967,7 +958,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
}
/// Iterates over the language items in the given crate.
- fn get_lang_items(self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] {
+ fn get_lang_items(self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, LangItem)] {
tcx.arena.alloc_from_iter(
self.root
.lang_items
@@ -992,87 +983,52 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
DiagnosticItems { id_to_name, name_to_id }
}
+ fn get_mod_child(self, id: DefIndex, sess: &Session) -> ModChild {
+ let ident = self.item_ident(id, sess);
+ let kind = self.def_kind(id);
+ let def_id = self.local_def_id(id);
+ let res = Res::Def(kind, def_id);
+ let vis = self.get_visibility(id);
+ let span = self.get_span(id, sess);
+ let macro_rules = match kind {
+ DefKind::Macro(..) => self.root.tables.macro_rules.get(self, id).is_some(),
+ _ => false,
+ };
+
+ ModChild { ident, res, vis, span, macro_rules }
+ }
+
/// Iterates over all named children of the given module,
/// including both proper items and reexports.
/// Module here is understood in name resolution sense - it can be a `mod` item,
/// or a crate root, or an enum, or a trait.
- fn for_each_module_child(
+ fn get_module_children(
self,
id: DefIndex,
- mut callback: impl FnMut(ModChild),
- sess: &Session,
- ) {
- if let Some(data) = &self.root.proc_macro_data {
- // If we are loading as a proc macro, we want to return
- // the view of this crate as a proc macro crate.
- if id == CRATE_DEF_INDEX {
- for def_index in data.macros.decode(self) {
- let raw_macro = self.raw_proc_macro(def_index);
- let res = Res::Def(
- DefKind::Macro(macro_kind(raw_macro)),
- self.local_def_id(def_index),
- );
- let ident = self.item_ident(def_index, sess);
- callback(ModChild {
- ident,
- res,
- vis: ty::Visibility::Public,
- span: ident.span,
- macro_rules: false,
- });
- }
- }
- return;
- }
-
- // Iterate over all children.
- if let Some(children) = self.root.tables.children.get(self, id) {
- for child_index in children.decode((self, sess)) {
- let ident = self.item_ident(child_index, sess);
- let kind = self.def_kind(child_index);
- let def_id = self.local_def_id(child_index);
- let res = Res::Def(kind, def_id);
- let vis = self.get_visibility(child_index);
- let span = self.get_span(child_index, sess);
- let macro_rules = match kind {
- DefKind::Macro(..) => {
- self.root.tables.macro_rules.get(self, child_index).is_some()
+ sess: &'a Session,
+ ) -> impl Iterator<Item = ModChild> + 'a {
+ iter::from_generator(move || {
+ if let Some(data) = &self.root.proc_macro_data {
+ // If we are loading as a proc macro, we want to return
+ // the view of this crate as a proc macro crate.
+ if id == CRATE_DEF_INDEX {
+ for child_index in data.macros.decode(self) {
+ yield self.get_mod_child(child_index, sess);
}
- _ => false,
- };
-
- callback(ModChild { ident, res, vis, span, macro_rules });
+ }
+ } else {
+ // Iterate over all children.
+ for child_index in self.root.tables.children.get(self, id).unwrap().decode(self) {
+ yield self.get_mod_child(child_index, sess);
+ }
- // For non-reexport variants add their fictive constructors to children.
- // Braced variants, unlike structs, generate unusable names in value namespace,
- // they are reserved for possible future use. It's ok to use the variant's id as
- // a ctor id since an error will be reported on any use of such resolution anyway.
- // Reexport lists automatically contain such constructors when necessary.
- if kind == DefKind::Variant && self.get_ctor_def_id_and_kind(child_index).is_none()
- {
- let ctor_res =
- Res::Def(DefKind::Ctor(CtorOf::Variant, CtorKind::Fictive), def_id);
- let mut vis = vis;
- if vis.is_public() {
- // For non-exhaustive variants lower the constructor visibility to
- // within the crate. We only need this for fictive constructors,
- // for other constructors correct visibilities
- // were already encoded in metadata.
- let mut attrs = self.get_item_attrs(def_id.index, sess);
- if attrs.any(|item| item.has_name(sym::non_exhaustive)) {
- vis = ty::Visibility::Restricted(self.local_def_id(CRATE_DEF_INDEX));
- }
+ if let Some(reexports) = self.root.tables.module_reexports.get(self, id) {
+ for reexport in reexports.decode((self, sess)) {
+ yield reexport;
}
- callback(ModChild { ident, res: ctor_res, vis, span, macro_rules: false });
}
}
- }
-
- if let Some(exports) = self.root.tables.module_reexports.get(self, id) {
- for exp in exports.decode((self, sess)) {
- callback(exp);
- }
- }
+ })
}
fn is_ctfe_mir_available(self, id: DefIndex) -> bool {
@@ -1137,11 +1093,11 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
}
}
- fn get_ctor_def_id_and_kind(self, node_id: DefIndex) -> Option<(DefId, CtorKind)> {
+ fn get_ctor(self, node_id: DefIndex) -> Option<(CtorKind, DefId)> {
match self.def_kind(node_id) {
DefKind::Struct | DefKind::Variant => {
let vdata = self.root.tables.variant_data.get(self, node_id).unwrap().decode(self);
- vdata.ctor.map(|index| (self.local_def_id(index), vdata.ctor_kind))
+ vdata.ctor.map(|(kind, index)| (kind, self.local_def_id(index)))
}
_ => None,
}
@@ -1319,7 +1275,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
)
}
- fn get_missing_lang_items(self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] {
+ fn get_missing_lang_items(self, tcx: TyCtxt<'tcx>) -> &'tcx [LangItem] {
tcx.arena.alloc_from_iter(self.root.lang_items_missing.decode(self))
}
@@ -1765,6 +1721,10 @@ impl CrateMetadata {
self.root.has_global_allocator
}
+ pub(crate) fn has_alloc_error_handler(&self) -> bool {
+ self.root.has_alloc_error_handler
+ }
+
pub(crate) fn has_default_lib_allocator(&self) -> bool {
self.root.has_default_lib_allocator
}
@@ -1805,13 +1765,3 @@ impl CrateMetadata {
None
}
}
-
-// Cannot be implemented on 'ProcMacro', as libproc_macro
-// does not depend on librustc_ast
-fn macro_kind(raw: &ProcMacro) -> MacroKind {
- match raw {
- ProcMacro::CustomDerive { .. } => MacroKind::Derive,
- ProcMacro::Attr { .. } => MacroKind::Attr,
- ProcMacro::Bang { .. } => MacroKind::Bang,
- }
-}
diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs
index a0a085525..c5d4da079 100644
--- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs
+++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs
@@ -21,7 +21,6 @@ use rustc_span::source_map::{Span, Spanned};
use rustc_span::symbol::{kw, Symbol};
use rustc_data_structures::sync::Lrc;
-use smallvec::SmallVec;
use std::any::Any;
use super::{Decodable, DecodeContext, DecodeIterator};
@@ -224,6 +223,15 @@ provide! { tcx, def_id, other, cdata,
generator_kind => { table }
trait_def => { table }
deduced_param_attrs => { table }
+ is_type_alias_impl_trait => {
+ debug_assert_eq!(tcx.def_kind(def_id), DefKind::OpaqueTy);
+ cdata
+ .root
+ .tables
+ .is_type_alias_impl_trait
+ .get(cdata, def_id.index)
+ .is_some()
+ }
collect_trait_impl_trait_tys => {
Ok(cdata
.root
@@ -255,6 +263,7 @@ provide! { tcx, def_id, other, cdata,
is_panic_runtime => { cdata.root.panic_runtime }
is_compiler_builtins => { cdata.root.compiler_builtins }
has_global_allocator => { cdata.root.has_global_allocator }
+ has_alloc_error_handler => { cdata.root.has_alloc_error_handler }
has_panic_handler => { cdata.root.has_panic_handler }
is_profiler_runtime => { cdata.root.profiler_runtime }
required_panic_strategy => { cdata.root.required_panic_strategy }
@@ -297,9 +306,7 @@ provide! { tcx, def_id, other, cdata,
r
}
module_children => {
- let mut result = SmallVec::<[_; 8]>::new();
- cdata.for_each_module_child(def_id.index, |child| result.push(child), tcx.sess);
- tcx.arena.alloc_slice(&result)
+ tcx.arena.alloc_from_iter(cdata.get_module_children(def_id.index, tcx.sess))
}
defined_lib_features => { cdata.get_lib_features(tcx) }
stability_implications => {
@@ -339,6 +346,7 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
// resolve! Does this work? Unsure! That's what the issue is about
*providers = Providers {
allocator_kind: |tcx, ()| CStore::from_tcx(tcx).allocator_kind(),
+ alloc_error_handler_kind: |tcx, ()| CStore::from_tcx(tcx).alloc_error_handler_kind(),
is_private_dep: |_tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
false
@@ -464,6 +472,10 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
assert_eq!(cnum, LOCAL_CRATE);
CStore::from_tcx(tcx).has_global_allocator()
},
+ has_alloc_error_handler: |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ CStore::from_tcx(tcx).has_alloc_error_handler()
+ },
postorder_cnums: |tcx, ()| {
tcx.arena
.alloc_slice(&CStore::from_tcx(tcx).crate_dependencies_in_postorder(LOCAL_CRATE))
@@ -489,22 +501,20 @@ impl CStore {
self.get_crate_data(def.krate).get_struct_field_visibilities(def.index)
}
- pub fn ctor_def_id_and_kind_untracked(&self, def: DefId) -> Option<(DefId, CtorKind)> {
- self.get_crate_data(def.krate).get_ctor_def_id_and_kind(def.index)
+ pub fn ctor_untracked(&self, def: DefId) -> Option<(CtorKind, DefId)> {
+ self.get_crate_data(def.krate).get_ctor(def.index)
}
pub fn visibility_untracked(&self, def: DefId) -> Visibility<DefId> {
self.get_crate_data(def.krate).get_visibility(def.index)
}
- pub fn module_children_untracked(&self, def_id: DefId, sess: &Session) -> Vec<ModChild> {
- let mut result = vec![];
- self.get_crate_data(def_id.krate).for_each_module_child(
- def_id.index,
- |child| result.push(child),
- sess,
- );
- result
+ pub fn module_children_untracked<'a>(
+ &'a self,
+ def_id: DefId,
+ sess: &'a Session,
+ ) -> impl Iterator<Item = ModChild> + 'a {
+ self.get_crate_data(def_id.krate).get_module_children(def_id.index, sess)
}
pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs
index 049514ec7..7304c891e 100644
--- a/compiler/rustc_metadata/src/rmeta/encoder.rs
+++ b/compiler/rustc_metadata/src/rmeta/encoder.rs
@@ -17,7 +17,7 @@ use rustc_hir::def_id::{
};
use rustc_hir::definitions::DefPathData;
use rustc_hir::intravisit::{self, Visitor};
-use rustc_hir::lang_items;
+use rustc_hir::lang_items::LangItem;
use rustc_middle::hir::nested_filter;
use rustc_middle::middle::dependency_format::Linkage;
use rustc_middle::middle::exported_symbols::{
@@ -670,6 +670,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
panic_in_drop_strategy: tcx.sess.opts.unstable_opts.panic_in_drop,
edition: tcx.sess.edition(),
has_global_allocator: tcx.has_global_allocator(LOCAL_CRATE),
+ has_alloc_error_handler: tcx.has_alloc_error_handler(LOCAL_CRATE),
has_panic_handler: tcx.has_panic_handler(LOCAL_CRATE),
has_default_lib_allocator: tcx
.sess
@@ -787,8 +788,7 @@ fn should_encode_attr(
} else if attr.doc_str().is_some() {
// We keep all public doc comments because they might be "imported" into downstream crates
// if they use `#[doc(inline)]` to copy an item's documentation into their own.
- *is_def_id_public
- .get_or_insert_with(|| tcx.effective_visibilities(()).effective_vis(def_id).is_some())
+ *is_def_id_public.get_or_insert_with(|| tcx.effective_visibilities(()).is_exported(def_id))
} else if attr.has_name(sym::doc) {
// If this is a `doc` attribute, and it's marked `inline` (as in `#[doc(inline)]`), we can
// remove it. It won't be inlinable in downstream crates.
@@ -928,6 +928,8 @@ fn should_encode_variances(def_kind: DefKind) -> bool {
| DefKind::Union
| DefKind::Enum
| DefKind::Variant
+ | DefKind::OpaqueTy
+ | DefKind::ImplTraitPlaceholder
| DefKind::Fn
| DefKind::Ctor(..)
| DefKind::AssocFn => true,
@@ -941,8 +943,6 @@ fn should_encode_variances(def_kind: DefKind) -> bool {
| DefKind::Const
| DefKind::ForeignMod
| DefKind::TyAlias
- | DefKind::OpaqueTy
- | DefKind::ImplTraitPlaceholder
| DefKind::Impl
| DefKind::Trait
| DefKind::TraitAlias
@@ -1221,9 +1221,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
debug!("EncodeContext::encode_enum_variant_info({:?})", def_id);
let data = VariantData {
- ctor_kind: variant.ctor_kind,
discr: variant.discr,
- ctor: variant.ctor_def_id.map(|did| did.index),
+ ctor: variant.ctor.map(|(kind, def_id)| (kind, def_id.index)),
is_non_exhaustive: variant.is_field_list_non_exhaustive(),
};
@@ -1233,32 +1232,28 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
assert!(f.did.is_local());
f.did.index
}));
- if variant.ctor_kind == CtorKind::Fn {
+ if let Some((CtorKind::Fn, ctor_def_id)) = variant.ctor {
// FIXME(eddyb) encode signature only in `encode_enum_variant_ctor`.
- if let Some(ctor_def_id) = variant.ctor_def_id {
- record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(ctor_def_id));
- }
+ record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(ctor_def_id));
}
}
fn encode_enum_variant_ctor(&mut self, def: ty::AdtDef<'tcx>, index: VariantIdx) {
- let tcx = self.tcx;
let variant = &def.variant(index);
- let def_id = variant.ctor_def_id.unwrap();
+ let Some((ctor_kind, def_id)) = variant.ctor else { return };
debug!("EncodeContext::encode_enum_variant_ctor({:?})", def_id);
// FIXME(eddyb) encode only the `CtorKind` for constructors.
let data = VariantData {
- ctor_kind: variant.ctor_kind,
discr: variant.discr,
- ctor: Some(def_id.index),
+ ctor: Some((ctor_kind, def_id.index)),
is_non_exhaustive: variant.is_field_list_non_exhaustive(),
};
record!(self.tables.variant_data[def_id] <- data);
self.tables.constness.set(def_id.index, hir::Constness::Const);
- if variant.ctor_kind == CtorKind::Fn {
- record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id));
+ if ctor_kind == CtorKind::Fn {
+ record!(self.tables.fn_sig[def_id] <- self.tcx.fn_sig(def_id));
}
}
@@ -1272,13 +1267,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
// the crate root for consistency with other crates (some of the resolver
// code uses it). However, we skip encoding anything relating to child
// items - we encode information about proc-macros later on.
- let reexports = if !self.is_proc_macro {
- tcx.module_reexports(local_def_id).unwrap_or(&[])
- } else {
- &[]
- };
-
- record_array!(self.tables.module_reexports[def_id] <- reexports);
if self.is_proc_macro {
// Encode this here because we don't do it in encode_def_ids.
record!(self.tables.expn_that_defined[def_id] <- tcx.expn_that_defined(local_def_id));
@@ -1310,26 +1298,30 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
}
}
}));
+
+ if let Some(reexports) = tcx.module_reexports(local_def_id) {
+ assert!(!reexports.is_empty());
+ record_array!(self.tables.module_reexports[def_id] <- reexports);
+ }
}
}
- fn encode_struct_ctor(&mut self, adt_def: ty::AdtDef<'tcx>, def_id: DefId) {
- debug!("EncodeContext::encode_struct_ctor({:?})", def_id);
- let tcx = self.tcx;
+ fn encode_struct_ctor(&mut self, adt_def: ty::AdtDef<'tcx>) {
let variant = adt_def.non_enum_variant();
+ let Some((ctor_kind, def_id)) = variant.ctor else { return };
+ debug!("EncodeContext::encode_struct_ctor({:?})", def_id);
let data = VariantData {
- ctor_kind: variant.ctor_kind,
discr: variant.discr,
- ctor: Some(def_id.index),
+ ctor: Some((ctor_kind, def_id.index)),
is_non_exhaustive: variant.is_field_list_non_exhaustive(),
};
record!(self.tables.repr_options[def_id] <- adt_def.repr());
record!(self.tables.variant_data[def_id] <- data);
self.tables.constness.set(def_id.index, hir::Constness::Const);
- if variant.ctor_kind == CtorKind::Fn {
- record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id));
+ if ctor_kind == CtorKind::Fn {
+ record!(self.tables.fn_sig[def_id] <- self.tcx.fn_sig(def_id));
}
}
@@ -1543,30 +1535,25 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemKind::Mod(ref m) => {
return self.encode_info_for_mod(item.owner_id.def_id, m);
}
- hir::ItemKind::OpaqueTy(..) => {
+ hir::ItemKind::OpaqueTy(ref opaque) => {
self.encode_explicit_item_bounds(def_id);
+ if matches!(opaque.origin, hir::OpaqueTyOrigin::TyAlias) {
+ self.tables.is_type_alias_impl_trait.set(def_id.index, ());
+ }
}
hir::ItemKind::Enum(..) => {
let adt_def = self.tcx.adt_def(def_id);
record!(self.tables.repr_options[def_id] <- adt_def.repr());
}
- hir::ItemKind::Struct(ref struct_def, _) => {
+ hir::ItemKind::Struct(..) => {
let adt_def = self.tcx.adt_def(def_id);
record!(self.tables.repr_options[def_id] <- adt_def.repr());
self.tables.constness.set(def_id.index, hir::Constness::Const);
- // Encode def_ids for each field and method
- // for methods, write all the stuff get_trait_method
- // needs to know
- let ctor = struct_def
- .ctor_hir_id()
- .map(|ctor_hir_id| self.tcx.hir().local_def_id(ctor_hir_id).local_def_index);
-
let variant = adt_def.non_enum_variant();
record!(self.tables.variant_data[def_id] <- VariantData {
- ctor_kind: variant.ctor_kind,
discr: variant.discr,
- ctor,
+ ctor: variant.ctor.map(|(kind, def_id)| (kind, def_id.index)),
is_non_exhaustive: variant.is_field_list_non_exhaustive(),
});
}
@@ -1576,9 +1563,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let variant = adt_def.non_enum_variant();
record!(self.tables.variant_data[def_id] <- VariantData {
- ctor_kind: variant.ctor_kind,
discr: variant.discr,
- ctor: None,
+ ctor: variant.ctor.map(|(kind, def_id)| (kind, def_id.index)),
is_non_exhaustive: variant.is_field_list_non_exhaustive(),
});
}
@@ -1631,7 +1617,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
for variant in tcx.adt_def(def_id).variants() {
yield variant.def_id.index;
// Encode constructors which take a separate slot in value namespace.
- if let Some(ctor_def_id) = variant.ctor_def_id {
+ if let Some(ctor_def_id) = variant.ctor_def_id() {
yield ctor_def_id.index;
}
}
@@ -1674,21 +1660,14 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
match item.kind {
hir::ItemKind::Enum(..) => {
let def = self.tcx.adt_def(item.owner_id.to_def_id());
- for (i, variant) in def.variants().iter_enumerated() {
+ for (i, _) in def.variants().iter_enumerated() {
self.encode_enum_variant_info(def, i);
-
- if let Some(_ctor_def_id) = variant.ctor_def_id {
- self.encode_enum_variant_ctor(def, i);
- }
+ self.encode_enum_variant_ctor(def, i);
}
}
- hir::ItemKind::Struct(ref struct_def, _) => {
+ hir::ItemKind::Struct(..) => {
let def = self.tcx.adt_def(item.owner_id.to_def_id());
- // If the struct has a constructor, encode it.
- if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
- let ctor_def_id = self.tcx.hir().local_def_id(ctor_hir_id);
- self.encode_struct_ctor(def, ctor_def_id.to_def_id());
- }
+ self.encode_struct_ctor(def);
}
hir::ItemKind::Impl { .. } => {
for &trait_item_def_id in
@@ -1708,12 +1687,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
}
}
- fn encode_info_for_closure(&mut self, hir_id: hir::HirId) {
- let def_id = self.tcx.hir().local_def_id(hir_id);
- debug!("EncodeContext::encode_info_for_closure({:?})", def_id);
+ #[instrument(level = "debug", skip(self))]
+ fn encode_info_for_closure(&mut self, def_id: LocalDefId) {
// NOTE(eddyb) `tcx.type_of(def_id)` isn't used because it's fully generic,
// including on the signature, which is inferred in `typeck.
let typeck_result: &'tcx ty::TypeckResults<'tcx> = self.tcx.typeck(def_id);
+ let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id);
let ty = typeck_result.node_type(hir_id);
match ty.kind() {
ty::Generator(..) => {
@@ -1905,22 +1884,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
self.lazy_array(diagnostic_items.iter().map(|(&name, def_id)| (name, def_id.index)))
}
- fn encode_lang_items(&mut self) -> LazyArray<(DefIndex, usize)> {
+ fn encode_lang_items(&mut self) -> LazyArray<(DefIndex, LangItem)> {
empty_proc_macro!(self);
- let tcx = self.tcx;
- let lang_items = tcx.lang_items();
- let lang_items = lang_items.items().iter();
- self.lazy_array(lang_items.enumerate().filter_map(|(i, &opt_def_id)| {
- if let Some(def_id) = opt_def_id {
- if def_id.is_local() {
- return Some((def_id.index, i));
- }
- }
- None
+ let lang_items = self.tcx.lang_items().iter();
+ self.lazy_array(lang_items.filter_map(|(lang_item, def_id)| {
+ def_id.as_local().map(|id| (id.local_def_index, lang_item))
}))
}
- fn encode_lang_items_missing(&mut self) -> LazyArray<lang_items::LangItem> {
+ fn encode_lang_items_missing(&mut self) -> LazyArray<LangItem> {
empty_proc_macro!(self);
let tcx = self.tcx;
self.lazy_array(&tcx.lang_items().missing)
@@ -2108,11 +2080,10 @@ impl<'a, 'tcx> Visitor<'tcx> for EncodeContext<'a, 'tcx> {
impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_info_for_generics(&mut self, generics: &hir::Generics<'tcx>) {
for param in generics.params {
- let def_id = self.tcx.hir().local_def_id(param.hir_id);
match param.kind {
hir::GenericParamKind::Lifetime { .. } | hir::GenericParamKind::Type { .. } => {}
hir::GenericParamKind::Const { ref default, .. } => {
- let def_id = def_id.to_def_id();
+ let def_id = param.def_id.to_def_id();
if default.is_some() {
record!(self.tables.const_param_default[def_id] <- self.tcx.const_param_default(def_id))
}
@@ -2122,8 +2093,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
}
fn encode_info_for_expr(&mut self, expr: &hir::Expr<'_>) {
- if let hir::ExprKind::Closure { .. } = expr.kind {
- self.encode_info_for_closure(expr.hir_id);
+ if let hir::ExprKind::Closure(closure) = expr.kind {
+ self.encode_info_for_closure(closure.def_id);
}
}
}
@@ -2196,7 +2167,7 @@ impl EncodedMetadata {
#[inline]
pub fn raw_data(&self) -> &[u8] {
- self.mmap.as_ref().map(|mmap| mmap.as_ref()).unwrap_or_default()
+ self.mmap.as_deref().unwrap_or_default()
}
}
diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs
index 27dc8ff16..571804644 100644
--- a/compiler/rustc_metadata/src/rmeta/mod.rs
+++ b/compiler/rustc_metadata/src/rmeta/mod.rs
@@ -12,7 +12,7 @@ use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind};
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, DefPathHash, StableCrateId};
use rustc_hir::definitions::DefKey;
-use rustc_hir::lang_items;
+use rustc_hir::lang_items::LangItem;
use rustc_index::bit_set::{BitSet, FiniteBitSet};
use rustc_index::vec::IndexVec;
use rustc_middle::metadata::ModChild;
@@ -223,6 +223,7 @@ pub(crate) struct CrateRoot {
panic_in_drop_strategy: PanicStrategy,
edition: Edition,
has_global_allocator: bool,
+ has_alloc_error_handler: bool,
has_panic_handler: bool,
has_default_lib_allocator: bool,
@@ -230,8 +231,8 @@ pub(crate) struct CrateRoot {
dylib_dependency_formats: LazyArray<Option<LinkagePreference>>,
lib_features: LazyArray<(Symbol, Option<Symbol>)>,
stability_implications: LazyArray<(Symbol, Symbol)>,
- lang_items: LazyArray<(DefIndex, usize)>,
- lang_items_missing: LazyArray<lang_items::LangItem>,
+ lang_items: LazyArray<(DefIndex, LangItem)>,
+ lang_items_missing: LazyArray<LangItem>,
diagnostic_items: LazyArray<(Symbol, DefIndex)>,
native_libraries: LazyArray<NativeLib>,
foreign_modules: LazyArray<ForeignModule>,
@@ -352,7 +353,7 @@ define_tables! {
explicit_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
generics_of: Table<DefIndex, LazyValue<ty::Generics>>,
// As an optimization, a missing entry indicates an empty `&[]`.
- inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Predicate<'static>, Span)>>,
+ inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
super_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
type_of: Table<DefIndex, LazyValue<Ty<'static>>>,
variances_of: Table<DefIndex, LazyArray<ty::Variance>>,
@@ -365,7 +366,7 @@ define_tables! {
mir_for_ctfe: Table<DefIndex, LazyValue<mir::Body<'static>>>,
promoted_mir: Table<DefIndex, LazyValue<IndexVec<mir::Promoted, mir::Body<'static>>>>,
// FIXME(compiler-errors): Why isn't this a LazyArray?
- thir_abstract_const: Table<DefIndex, LazyValue<&'static [ty::abstract_const::Node<'static>]>>,
+ thir_abstract_const: Table<DefIndex, LazyValue<ty::Const<'static>>>,
impl_parent: Table<DefIndex, RawDefId>,
impl_polarity: Table<DefIndex, ty::ImplPolarity>,
constness: Table<DefIndex, hir::Constness>,
@@ -399,20 +400,21 @@ define_tables! {
assoc_container: Table<DefIndex, ty::AssocItemContainer>,
// Slot is full when macro is macro_rules.
macro_rules: Table<DefIndex, ()>,
- macro_definition: Table<DefIndex, LazyValue<ast::MacArgs>>,
+ macro_definition: Table<DefIndex, LazyValue<ast::DelimArgs>>,
proc_macro: Table<DefIndex, MacroKind>,
module_reexports: Table<DefIndex, LazyArray<ModChild>>,
deduced_param_attrs: Table<DefIndex, LazyArray<DeducedParamAttrs>>,
+ // Slot is full when opaque is TAIT.
+ is_type_alias_impl_trait: Table<DefIndex, ()>,
trait_impl_trait_tys: Table<DefIndex, LazyValue<FxHashMap<DefId, Ty<'static>>>>,
}
#[derive(TyEncodable, TyDecodable)]
struct VariantData {
- ctor_kind: CtorKind,
discr: ty::VariantDiscr,
/// If this is unit or tuple-variant/struct, then this is the index of the ctor id.
- ctor: Option<DefIndex>,
+ ctor: Option<(CtorKind, DefIndex)>,
is_non_exhaustive: bool,
}
diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs
index e7c1abd12..29fe61107 100644
--- a/compiler/rustc_metadata/src/rmeta/table.rs
+++ b/compiler/rustc_metadata/src/rmeta/table.rs
@@ -101,10 +101,8 @@ fixed_size_enum! {
( Static(ast::Mutability::Mut) )
( Ctor(CtorOf::Struct, CtorKind::Fn) )
( Ctor(CtorOf::Struct, CtorKind::Const) )
- ( Ctor(CtorOf::Struct, CtorKind::Fictive) )
( Ctor(CtorOf::Variant, CtorKind::Fn) )
( Ctor(CtorOf::Variant, CtorKind::Const) )
- ( Ctor(CtorOf::Variant, CtorKind::Fictive) )
( Macro(MacroKind::Bang) )
( Macro(MacroKind::Attr) )
( Macro(MacroKind::Derive) )
diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml
index de916ea8c..cf1ab47de 100644
--- a/compiler/rustc_middle/Cargo.toml
+++ b/compiler/rustc_middle/Cargo.toml
@@ -4,11 +4,11 @@ version = "0.0.0"
edition = "2021"
[lib]
-doctest = false
[dependencies]
bitflags = "1.2.1"
-chalk-ir = "0.80.0"
+chalk-ir = "0.87.0"
+derive_more = "0.99.17"
either = "1.5.0"
gsgdt = "0.1.2"
polonius-engine = "0.13.0"
@@ -32,7 +32,7 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
[features]
diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs
index f8aae86fe..6de68841f 100644
--- a/compiler/rustc_middle/src/arena.rs
+++ b/compiler/rustc_middle/src/arena.rs
@@ -1,3 +1,5 @@
+#![allow(rustc::usage_of_ty_tykind)]
+
/// This higher-order macro declares a list of types which can be allocated by `Arena`.
///
/// Specifying the `decode` modifier will add decode impls for `&T` and `&[T]` where `T` is the type
@@ -6,7 +8,7 @@
macro_rules! arena_types {
($macro:path) => (
$macro!([
- [] layout: rustc_target::abi::LayoutS<'tcx>,
+ [] layout: rustc_target::abi::LayoutS<rustc_target::abi::VariantIdx>,
[] fn_abi: rustc_target::abi::call::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>,
// AdtDef are interned and compared by address
[decode] adt_def: rustc_middle::ty::AdtDefData,
@@ -28,6 +30,7 @@ macro_rules! arena_types {
[decode] typeck_results: rustc_middle::ty::TypeckResults<'tcx>,
[decode] borrowck_result:
rustc_middle::mir::BorrowCheckResult<'tcx>,
+ [] resolver: rustc_data_structures::steal::Steal<rustc_middle::ty::ResolverAstLowering>,
[decode] unsafety_check_result: rustc_middle::mir::UnsafetyCheckResult,
[decode] code_region: rustc_middle::mir::coverage::CodeRegion,
[] const_allocs: rustc_middle::mir::interpret::Allocation,
@@ -88,8 +91,8 @@ macro_rules! arena_types {
[] hir_id_set: rustc_hir::HirIdSet,
// Interned types
- [] tys: rustc_data_structures::intern::WithStableHash<rustc_middle::ty::TyS<'tcx>>,
- [] predicates: rustc_middle::ty::PredicateS<'tcx>,
+ [] tys: rustc_type_ir::WithCachedTypeInfo<rustc_middle::ty::TyKind<'tcx>>,
+ [] predicates: rustc_type_ir::WithCachedTypeInfo<rustc_middle::ty::PredicateKind<'tcx>>,
[] consts: rustc_middle::ty::ConstS<'tcx>,
// Note that this deliberately duplicates items in the `rustc_hir::arena`,
diff --git a/compiler/rustc_middle/src/error.rs b/compiler/rustc_middle/src/error.rs
index a7a7ac059..5e94da8cb 100644
--- a/compiler/rustc_middle/src/error.rs
+++ b/compiler/rustc_middle/src/error.rs
@@ -55,3 +55,20 @@ pub struct ConstEvalNonIntError {
#[primary_span]
pub span: Span,
}
+
+#[derive(Diagnostic)]
+#[diag(middle_strict_coherence_needs_negative_coherence)]
+pub(crate) struct StrictCoherenceNeedsNegativeCoherence {
+ #[primary_span]
+ pub span: Span,
+ #[label]
+ pub attr_span: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(middle_const_not_used_in_type_alias)]
+pub(super) struct ConstNotUsedTraitAlias {
+ pub ct: String,
+ #[primary_span]
+ pub span: Span,
+}
diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs
index 83a4d16d7..1bd8f9535 100644
--- a/compiler/rustc_middle/src/hir/map/mod.rs
+++ b/compiler/rustc_middle/src/hir/map/mod.rs
@@ -245,15 +245,15 @@ impl<'hir> Map<'hir> {
},
Node::Variant(_) => DefKind::Variant,
Node::Ctor(variant_data) => {
- // FIXME(eddyb) is this even possible, if we have a `Node::Ctor`?
- assert_ne!(variant_data.ctor_hir_id(), None);
-
let ctor_of = match self.find(self.get_parent_node(hir_id)) {
Some(Node::Item(..)) => def::CtorOf::Struct,
Some(Node::Variant(..)) => def::CtorOf::Variant,
_ => unreachable!(),
};
- DefKind::Ctor(ctor_of, def::CtorKind::from_hir(variant_data))
+ match variant_data.ctor_kind() {
+ Some(kind) => DefKind::Ctor(ctor_of, kind),
+ None => bug!("constructor node without a constructor"),
+ }
}
Node::AnonConst(_) => {
let inline = match self.find(self.get_parent_node(hir_id)) {
@@ -353,6 +353,10 @@ impl<'hir> Map<'hir> {
node.node.generics()
}
+ pub fn owner(self, id: OwnerId) -> OwnerNode<'hir> {
+ self.tcx.hir_owner(id).unwrap_or_else(|| bug!("expected owner for {:?}", id)).node
+ }
+
pub fn item(self, id: ItemId) -> &'hir Item<'hir> {
self.tcx.hir_owner(id.owner_id).unwrap().node.expect_item()
}
@@ -822,8 +826,11 @@ impl<'hir> Map<'hir> {
)
}
- pub fn expect_owner(self, id: OwnerId) -> OwnerNode<'hir> {
- self.tcx.hir_owner(id).unwrap_or_else(|| bug!("expected owner for {:?}", id)).node
+ pub fn expect_owner(self, def_id: LocalDefId) -> OwnerNode<'hir> {
+ self.tcx
+ .hir_owner(OwnerId { def_id })
+ .unwrap_or_else(|| bug!("expected owner for {:?}", def_id))
+ .node
}
pub fn expect_item(self, id: LocalDefId) -> &'hir Item<'hir> {
@@ -1015,7 +1022,7 @@ impl<'hir> Map<'hir> {
..
}) => {
// Ensure that the returned span has the item's SyntaxContext.
- fn_decl_span.find_ancestor_in_same_ctxt(*span).unwrap_or(*span)
+ fn_decl_span.find_ancestor_inside(*span).unwrap_or(*span)
}
_ => self.span_with_body(hir_id),
};
@@ -1051,7 +1058,7 @@ impl<'hir> Map<'hir> {
Node::Arm(arm) => arm.span,
Node::Block(block) => block.span,
Node::Ctor(..) => self.span_with_body(self.get_parent_node(hir_id)),
- Node::Lifetime(lifetime) => lifetime.span,
+ Node::Lifetime(lifetime) => lifetime.ident.span,
Node::GenericParam(param) => param.span,
Node::Infer(i) => i.span,
Node::Local(local) => local.span,
@@ -1079,10 +1086,10 @@ impl<'hir> Map<'hir> {
/// Returns the HirId of `N` in `struct Foo<const N: usize = { ... }>` when
/// called with the HirId for the `{ ... }` anon const
- pub fn opt_const_param_default_param_hir_id(self, anon_const: HirId) -> Option<HirId> {
+ pub fn opt_const_param_default_param_def_id(self, anon_const: HirId) -> Option<LocalDefId> {
match self.get(self.get_parent_node(anon_const)) {
Node::GenericParam(GenericParam {
- hir_id: param_id,
+ def_id: param_id,
kind: GenericParamKind::Const { .. },
..
}) => Some(*param_id),
@@ -1191,20 +1198,7 @@ fn upstream_crates(tcx: TyCtxt<'_>) -> Vec<(StableCrateId, Svh)> {
fn hir_id_to_string(map: Map<'_>, id: HirId) -> String {
let id_str = format!(" (hir_id={})", id);
- let path_str = || {
- // This functionality is used for debugging, try to use `TyCtxt` to get
- // the user-friendly path, otherwise fall back to stringifying `DefPath`.
- crate::ty::tls::with_opt(|tcx| {
- if let Some(tcx) = tcx {
- let def_id = map.local_def_id(id);
- tcx.def_path_str(def_id.to_def_id())
- } else if let Some(path) = map.def_path_from_hir_id(id) {
- path.data.into_iter().map(|elem| elem.to_string()).collect::<Vec<_>>().join("::")
- } else {
- String::from("<missing path>")
- }
- })
- };
+ let path_str = |def_id: LocalDefId| map.tcx.def_path_str(def_id.to_def_id());
let span_str = || map.tcx.sess.source_map().span_to_snippet(map.span(id)).unwrap_or_default();
let node_str = |prefix| format!("{} {}{}", prefix, span_str(), id_str);
@@ -1236,18 +1230,19 @@ fn hir_id_to_string(map: Map<'_>, id: HirId) -> String {
ItemKind::TraitAlias(..) => "trait alias",
ItemKind::Impl { .. } => "impl",
};
- format!("{} {}{}", item_str, path_str(), id_str)
+ format!("{} {}{}", item_str, path_str(item.owner_id.def_id), id_str)
+ }
+ Some(Node::ForeignItem(item)) => {
+ format!("foreign item {}{}", path_str(item.owner_id.def_id), id_str)
+ }
+ Some(Node::ImplItem(ii)) => {
+ let kind = match ii.kind {
+ ImplItemKind::Const(..) => "assoc const",
+ ImplItemKind::Fn(..) => "method",
+ ImplItemKind::Type(_) => "assoc type",
+ };
+ format!("{} {} in {}{}", kind, ii.ident, path_str(ii.owner_id.def_id), id_str)
}
- Some(Node::ForeignItem(_)) => format!("foreign item {}{}", path_str(), id_str),
- Some(Node::ImplItem(ii)) => match ii.kind {
- ImplItemKind::Const(..) => {
- format!("assoc const {} in {}{}", ii.ident, path_str(), id_str)
- }
- ImplItemKind::Fn(..) => format!("method {} in {}{}", ii.ident, path_str(), id_str),
- ImplItemKind::Type(_) => {
- format!("assoc type {} in {}{}", ii.ident, path_str(), id_str)
- }
- },
Some(Node::TraitItem(ti)) => {
let kind = match ti.kind {
TraitItemKind::Const(..) => "assoc constant",
@@ -1255,13 +1250,13 @@ fn hir_id_to_string(map: Map<'_>, id: HirId) -> String {
TraitItemKind::Type(..) => "assoc type",
};
- format!("{} {} in {}{}", kind, ti.ident, path_str(), id_str)
+ format!("{} {} in {}{}", kind, ti.ident, path_str(ti.owner_id.def_id), id_str)
}
Some(Node::Variant(ref variant)) => {
- format!("variant {} in {}{}", variant.ident, path_str(), id_str)
+ format!("variant {} in {}{}", variant.ident, path_str(variant.def_id), id_str)
}
Some(Node::Field(ref field)) => {
- format!("field {} in {}{}", field.ident, path_str(), id_str)
+ format!("field {} in {}{}", field.ident, path_str(field.def_id), id_str)
}
Some(Node::AnonConst(_)) => node_str("const"),
Some(Node::Expr(_)) => node_str("expr"),
@@ -1278,9 +1273,15 @@ fn hir_id_to_string(map: Map<'_>, id: HirId) -> String {
Some(Node::Block(_)) => node_str("block"),
Some(Node::Infer(_)) => node_str("infer"),
Some(Node::Local(_)) => node_str("local"),
- Some(Node::Ctor(..)) => format!("ctor {}{}", path_str(), id_str),
+ Some(Node::Ctor(ctor)) => format!(
+ "ctor {}{}",
+ ctor.ctor_def_id().map_or("<missing path>".into(), |def_id| path_str(def_id)),
+ id_str
+ ),
Some(Node::Lifetime(_)) => node_str("lifetime"),
- Some(Node::GenericParam(ref param)) => format!("generic_param {:?}{}", param, id_str),
+ Some(Node::GenericParam(ref param)) => {
+ format!("generic_param {}{}", path_str(param.def_id), id_str)
+ }
Some(Node::Crate(..)) => String::from("root_crate"),
None => format!("unknown node{}", id_str),
}
@@ -1400,13 +1401,13 @@ impl<'hir> Visitor<'hir> for ItemCollector<'hir> {
}
fn visit_anon_const(&mut self, c: &'hir AnonConst) {
- self.body_owners.push(self.tcx.hir().local_def_id(c.hir_id));
+ self.body_owners.push(c.def_id);
intravisit::walk_anon_const(self, c)
}
fn visit_expr(&mut self, ex: &'hir Expr<'hir>) {
- if matches!(ex.kind, ExprKind::Closure { .. }) {
- self.body_owners.push(self.tcx.hir().local_def_id(ex.hir_id));
+ if let ExprKind::Closure(closure) = ex.kind {
+ self.body_owners.push(closure.def_id);
}
intravisit::walk_expr(self, ex)
}
diff --git a/compiler/rustc_middle/src/hir/mod.rs b/compiler/rustc_middle/src/hir/mod.rs
index 1c6264ad0..02fd03c02 100644
--- a/compiler/rustc_middle/src/hir/mod.rs
+++ b/compiler/rustc_middle/src/hir/mod.rs
@@ -133,13 +133,8 @@ pub fn provide(providers: &mut Providers) {
// Accessing the local_parent is ok since its value is hashed as part of `id`'s DefPathHash.
tcx.opt_local_parent(id.def_id).map_or(CRATE_HIR_ID, |parent| {
let mut parent_hir_id = tcx.hir().local_def_id_to_hir_id(parent);
- if let Some(local_id) = tcx.hir_crate(()).owners[parent_hir_id.owner.def_id]
- .unwrap()
- .parenting
- .get(&id.def_id)
- {
- parent_hir_id.local_id = *local_id;
- }
+ parent_hir_id.local_id =
+ tcx.hir_crate(()).owners[parent_hir_id.owner.def_id].unwrap().parenting[&id.def_id];
parent_hir_id
})
};
diff --git a/compiler/rustc_middle/src/infer/canonical.rs b/compiler/rustc_middle/src/infer/canonical.rs
index d3cf519b6..0331d764b 100644
--- a/compiler/rustc_middle/src/infer/canonical.rs
+++ b/compiler/rustc_middle/src/infer/canonical.rs
@@ -336,15 +336,17 @@ impl<'tcx> CanonicalVarValues<'tcx> {
tcx.mk_ty(ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into())).into()
}
GenericArgKind::Lifetime(..) => {
- let br =
- ty::BoundRegion { var: ty::BoundVar::from_u32(i), kind: ty::BrAnon(i) };
+ let br = ty::BoundRegion {
+ var: ty::BoundVar::from_u32(i),
+ kind: ty::BrAnon(i, None),
+ };
tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br)).into()
}
GenericArgKind::Const(ct) => tcx
- .mk_const(ty::ConstS {
- ty: ct.ty(),
- kind: ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i)),
- })
+ .mk_const(
+ ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i)),
+ ct.ty(),
+ )
.into(),
})
.collect(),
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index a58cbc376..7e4063c2f 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -30,8 +30,10 @@
#![feature(core_intrinsics)]
#![feature(discriminant_kind)]
#![feature(exhaustive_patterns)]
+#![feature(generators)]
#![feature(get_mut_unchecked)]
#![feature(if_let_guard)]
+#![feature(iter_from_generator)]
#![feature(negative_impls)]
#![feature(never_type)]
#![feature(extern_types)]
@@ -43,7 +45,6 @@
#![feature(type_alias_impl_trait)]
#![feature(associated_type_bounds)]
#![feature(rustc_attrs)]
-#![cfg_attr(bootstrap, feature(half_open_range_patterns))]
#![feature(control_flow_enum)]
#![feature(associated_type_defaults)]
#![feature(trusted_step)]
diff --git a/compiler/rustc_middle/src/lint.rs b/compiler/rustc_middle/src/lint.rs
index 79522bd0b..51df42f6d 100644
--- a/compiler/rustc_middle/src/lint.rs
+++ b/compiler/rustc_middle/src/lint.rs
@@ -276,7 +276,7 @@ pub fn explain_lint_level_source(
/// The innermost function for emitting lints.
///
-/// If you are loocking to implement a lint, look for higher level functions,
+/// If you are looking to implement a lint, look for higher level functions,
/// for example:
/// - [`TyCtxt::emit_spanned_lint`]
/// - [`TyCtxt::struct_span_lint_hir`]
diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs
index 45d33a165..bea884c85 100644
--- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs
+++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs
@@ -26,8 +26,10 @@ pub struct CodegenFnAttrs {
/// The `#[target_feature(enable = "...")]` attribute and the enabled
/// features (only enabled features are supported right now).
pub target_features: Vec<Symbol>,
- /// The `#[linkage = "..."]` attribute and the value we found.
+ /// The `#[linkage = "..."]` attribute on Rust-defined items and the value we found.
pub linkage: Option<Linkage>,
+ /// The `#[linkage = "..."]` attribute on foreign items and the value we found.
+ pub import_linkage: Option<Linkage>,
/// The `#[link_section = "..."]` attribute, or what executable section this
/// should be placed in.
pub link_section: Option<Symbol>,
@@ -113,6 +115,7 @@ impl CodegenFnAttrs {
link_ordinal: None,
target_features: vec![],
linkage: None,
+ import_linkage: None,
link_section: None,
no_sanitize: SanitizerSet::empty(),
instruction_set: None,
diff --git a/compiler/rustc_middle/src/middle/lang_items.rs b/compiler/rustc_middle/src/middle/lang_items.rs
index 31c20fa14..343ea1f00 100644
--- a/compiler/rustc_middle/src/middle/lang_items.rs
+++ b/compiler/rustc_middle/src/middle/lang_items.rs
@@ -27,7 +27,10 @@ impl<'tcx> TyCtxt<'tcx> {
})
}
- pub fn fn_trait_kind_from_lang_item(self, id: DefId) -> Option<ty::ClosureKind> {
+ /// Given a [`DefId`] of a [`Fn`], [`FnMut`] or [`FnOnce`] traits,
+ /// returns a corresponding [`ty::ClosureKind`].
+ /// For any other [`DefId`] return `None`.
+ pub fn fn_trait_kind_from_def_id(self, id: DefId) -> Option<ty::ClosureKind> {
let items = self.lang_items();
match Some(id) {
x if x == items.fn_trait() => Some(ty::ClosureKind::Fn),
@@ -37,8 +40,9 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
- pub fn is_weak_lang_item(self, item_def_id: DefId) -> bool {
- self.lang_items().is_weak_lang_item(item_def_id)
+ /// Returns `true` if `id` is a `DefId` of [`Fn`], [`FnMut`] or [`FnOnce`] traits.
+ pub fn is_fn_trait(self, id: DefId) -> bool {
+ self.fn_trait_kind_from_def_id(id).is_some()
}
}
diff --git a/compiler/rustc_middle/src/middle/privacy.rs b/compiler/rustc_middle/src/middle/privacy.rs
index 9c68c7504..fc08d58cc 100644
--- a/compiler/rustc_middle/src/middle/privacy.rs
+++ b/compiler/rustc_middle/src/middle/privacy.rs
@@ -1,12 +1,12 @@
//! A pass that checks to make sure private fields and methods aren't used
//! outside their scopes. This pass will also generate a set of exported items
//! which are available for use externally when compiled as a library.
-use crate::ty::{DefIdTree, Visibility};
+use crate::ty::{DefIdTree, TyCtxt, Visibility};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_macros::HashStable;
use rustc_query_system::ich::StableHashingContext;
-use rustc_span::def_id::{DefId, LocalDefId};
+use rustc_span::def_id::LocalDefId;
use std::hash::Hash;
/// Represents the levels of effective visibility an item can have.
@@ -75,33 +75,33 @@ impl EffectiveVisibility {
}
/// Holds a map of effective visibilities for reachable HIR nodes.
-#[derive(Debug, Clone)]
+#[derive(Clone, Debug)]
pub struct EffectiveVisibilities<Id = LocalDefId> {
map: FxHashMap<Id, EffectiveVisibility>,
}
-impl<Id: Hash + Eq + Copy> EffectiveVisibilities<Id> {
- pub fn is_public_at_level(&self, id: Id, level: Level) -> bool {
+impl EffectiveVisibilities {
+ pub fn is_public_at_level(&self, id: LocalDefId, level: Level) -> bool {
self.effective_vis(id)
.map_or(false, |effective_vis| effective_vis.is_public_at_level(level))
}
/// See `Level::Reachable`.
- pub fn is_reachable(&self, id: Id) -> bool {
+ pub fn is_reachable(&self, id: LocalDefId) -> bool {
self.is_public_at_level(id, Level::Reachable)
}
/// See `Level::Reexported`.
- pub fn is_exported(&self, id: Id) -> bool {
+ pub fn is_exported(&self, id: LocalDefId) -> bool {
self.is_public_at_level(id, Level::Reexported)
}
/// See `Level::Direct`.
- pub fn is_directly_public(&self, id: Id) -> bool {
+ pub fn is_directly_public(&self, id: LocalDefId) -> bool {
self.is_public_at_level(id, Level::Direct)
}
- pub fn public_at_level(&self, id: Id) -> Option<Level> {
+ pub fn public_at_level(&self, id: LocalDefId) -> Option<Level> {
self.effective_vis(id).and_then(|effective_vis| {
for level in Level::all_levels() {
if effective_vis.is_public_at_level(level) {
@@ -112,31 +112,42 @@ impl<Id: Hash + Eq + Copy> EffectiveVisibilities<Id> {
})
}
- pub fn effective_vis(&self, id: Id) -> Option<&EffectiveVisibility> {
- self.map.get(&id)
- }
-
- pub fn iter(&self) -> impl Iterator<Item = (&Id, &EffectiveVisibility)> {
- self.map.iter()
- }
-
- pub fn map_id<OutId: Hash + Eq + Copy>(
- &self,
- f: impl Fn(Id) -> OutId,
- ) -> EffectiveVisibilities<OutId> {
- EffectiveVisibilities { map: self.map.iter().map(|(k, v)| (f(*k), *v)).collect() }
+ // FIXME: Share code with `fn update`.
+ pub fn update_eff_vis(
+ &mut self,
+ def_id: LocalDefId,
+ eff_vis: &EffectiveVisibility,
+ tree: impl DefIdTree,
+ ) {
+ use std::collections::hash_map::Entry;
+ match self.map.entry(def_id) {
+ Entry::Occupied(mut occupied) => {
+ let old_eff_vis = occupied.get_mut();
+ for l in Level::all_levels() {
+ let vis_at_level = eff_vis.at_level(l);
+ let old_vis_at_level = old_eff_vis.at_level_mut(l);
+ if vis_at_level != old_vis_at_level
+ && vis_at_level.is_at_least(*old_vis_at_level, tree)
+ {
+ *old_vis_at_level = *vis_at_level
+ }
+ }
+ old_eff_vis
+ }
+ Entry::Vacant(vacant) => vacant.insert(*eff_vis),
+ };
}
pub fn set_public_at_level(
&mut self,
- id: Id,
- default_vis: impl FnOnce() -> Visibility,
+ id: LocalDefId,
+ lazy_private_vis: impl FnOnce() -> Visibility,
level: Level,
) {
let mut effective_vis = self
.effective_vis(id)
.copied()
- .unwrap_or_else(|| EffectiveVisibility::from_vis(default_vis()));
+ .unwrap_or_else(|| EffectiveVisibility::from_vis(lazy_private_vis()));
for l in Level::all_levels() {
if l <= level {
*effective_vis.at_level_mut(l) = Visibility::Public;
@@ -144,61 +155,123 @@ impl<Id: Hash + Eq + Copy> EffectiveVisibilities<Id> {
}
self.map.insert(id, effective_vis);
}
+
+ pub fn check_invariants(&self, tcx: TyCtxt<'_>, early: bool) {
+ if !cfg!(debug_assertions) {
+ return;
+ }
+ for (&def_id, ev) in &self.map {
+ // More direct visibility levels can never go farther than less direct ones,
+ // neither of effective visibilities can go farther than nominal visibility,
+ // and all effective visibilities are larger or equal than private visibility.
+ let private_vis = Visibility::Restricted(tcx.parent_module_from_def_id(def_id));
+ let span = tcx.def_span(def_id.to_def_id());
+ if !ev.direct.is_at_least(private_vis, tcx) {
+ span_bug!(span, "private {:?} > direct {:?}", private_vis, ev.direct);
+ }
+ if !ev.reexported.is_at_least(ev.direct, tcx) {
+ span_bug!(span, "direct {:?} > reexported {:?}", ev.direct, ev.reexported);
+ }
+ if !ev.reachable.is_at_least(ev.reexported, tcx) {
+ span_bug!(span, "reexported {:?} > reachable {:?}", ev.reexported, ev.reachable);
+ }
+ if !ev.reachable_through_impl_trait.is_at_least(ev.reachable, tcx) {
+ span_bug!(
+ span,
+ "reachable {:?} > reachable_through_impl_trait {:?}",
+ ev.reachable,
+ ev.reachable_through_impl_trait
+ );
+ }
+ let nominal_vis = tcx.visibility(def_id);
+ // FIXME: `rustc_privacy` is not yet updated for the new logic and can set
+ // effective visibilities that are larger than the nominal one.
+ if !nominal_vis.is_at_least(ev.reachable_through_impl_trait, tcx) && early {
+ span_bug!(
+ span,
+ "{:?}: reachable_through_impl_trait {:?} > nominal {:?}",
+ def_id,
+ ev.reachable_through_impl_trait,
+ nominal_vis
+ );
+ }
+ }
+ }
+}
+
+pub trait IntoDefIdTree {
+ type Tree: DefIdTree;
+ fn tree(self) -> Self::Tree;
}
-impl<Id: Hash + Eq + Copy + Into<DefId>> EffectiveVisibilities<Id> {
- // `parent_id` is not necessarily a parent in source code tree,
- // it is the node from which the maximum effective visibility is inherited.
- pub fn update(
+impl<Id: Eq + Hash> EffectiveVisibilities<Id> {
+ pub fn iter(&self) -> impl Iterator<Item = (&Id, &EffectiveVisibility)> {
+ self.map.iter()
+ }
+
+ pub fn effective_vis(&self, id: Id) -> Option<&EffectiveVisibility> {
+ self.map.get(&id)
+ }
+
+ // FIXME: Share code with `fn update`.
+ pub fn effective_vis_or_private(
+ &mut self,
+ id: Id,
+ lazy_private_vis: impl FnOnce() -> Visibility,
+ ) -> &EffectiveVisibility {
+ self.map.entry(id).or_insert_with(|| EffectiveVisibility::from_vis(lazy_private_vis()))
+ }
+
+ pub fn update<T: IntoDefIdTree>(
&mut self,
id: Id,
nominal_vis: Visibility,
- default_vis: impl FnOnce() -> Visibility,
- parent_id: Id,
+ lazy_private_vis: impl FnOnce(T) -> (Visibility, T),
+ inherited_effective_vis: EffectiveVisibility,
level: Level,
- tree: impl DefIdTree,
+ mut into_tree: T,
) -> bool {
let mut changed = false;
- let mut current_effective_vis = self.effective_vis(id).copied().unwrap_or_else(|| {
- if id.into().is_crate_root() {
- EffectiveVisibility::from_vis(Visibility::Public)
- } else {
- EffectiveVisibility::from_vis(default_vis())
+ let mut current_effective_vis = match self.map.get(&id).copied() {
+ Some(eff_vis) => eff_vis,
+ None => {
+ let private_vis;
+ (private_vis, into_tree) = lazy_private_vis(into_tree);
+ EffectiveVisibility::from_vis(private_vis)
}
- });
- if let Some(inherited_effective_vis) = self.effective_vis(parent_id) {
- let mut inherited_effective_vis_at_prev_level =
- *inherited_effective_vis.at_level(level);
- let mut calculated_effective_vis = inherited_effective_vis_at_prev_level;
- for l in Level::all_levels() {
- if level >= l {
- let inherited_effective_vis_at_level = *inherited_effective_vis.at_level(l);
- let current_effective_vis_at_level = current_effective_vis.at_level_mut(l);
- // effective visibility for id shouldn't be recalculated if
- // inherited from parent_id effective visibility isn't changed at next level
- if !(inherited_effective_vis_at_prev_level == inherited_effective_vis_at_level
- && level != l)
- {
- calculated_effective_vis =
- if nominal_vis.is_at_least(inherited_effective_vis_at_level, tree) {
- inherited_effective_vis_at_level
- } else {
- nominal_vis
- };
- }
- // effective visibility can't be decreased at next update call for the
- // same id
- if *current_effective_vis_at_level != calculated_effective_vis
- && calculated_effective_vis
- .is_at_least(*current_effective_vis_at_level, tree)
- {
- changed = true;
- *current_effective_vis_at_level = calculated_effective_vis;
- }
- inherited_effective_vis_at_prev_level = inherited_effective_vis_at_level;
+ };
+ let tree = into_tree.tree();
+
+ let mut inherited_effective_vis_at_prev_level = *inherited_effective_vis.at_level(level);
+ let mut calculated_effective_vis = inherited_effective_vis_at_prev_level;
+ for l in Level::all_levels() {
+ if level >= l {
+ let inherited_effective_vis_at_level = *inherited_effective_vis.at_level(l);
+ let current_effective_vis_at_level = current_effective_vis.at_level_mut(l);
+ // effective visibility for id shouldn't be recalculated if
+ // inherited from parent_id effective visibility isn't changed at next level
+ if !(inherited_effective_vis_at_prev_level == inherited_effective_vis_at_level
+ && level != l)
+ {
+ calculated_effective_vis =
+ if nominal_vis.is_at_least(inherited_effective_vis_at_level, tree) {
+ inherited_effective_vis_at_level
+ } else {
+ nominal_vis
+ };
+ }
+ // effective visibility can't be decreased at next update call for the
+ // same id
+ if *current_effective_vis_at_level != calculated_effective_vis
+ && calculated_effective_vis.is_at_least(*current_effective_vis_at_level, tree)
+ {
+ changed = true;
+ *current_effective_vis_at_level = calculated_effective_vis;
}
+ inherited_effective_vis_at_prev_level = inherited_effective_vis_at_level;
}
}
+
self.map.insert(id, current_effective_vis);
changed
}
diff --git a/compiler/rustc_middle/src/mir/generic_graphviz.rs b/compiler/rustc_middle/src/mir/generic_graphviz.rs
index 11ac45943..ccae7e159 100644
--- a/compiler/rustc_middle/src/mir/generic_graphviz.rs
+++ b/compiler/rustc_middle/src/mir/generic_graphviz.rs
@@ -126,7 +126,7 @@ impl<
write!(
w,
r#"<tr><td align="left" balign="left">{}</td></tr>"#,
- dot::escape_html(&section).replace('\n', "<br/>")
+ dot::escape_html(&section)
)?;
}
@@ -147,7 +147,7 @@ impl<
let src = self.node(source);
let trg = self.node(target);
let escaped_edge_label = if let Some(edge_label) = edge_labels.get(index) {
- dot::escape_html(edge_label).replace('\n', r#"<br align="left"/>"#)
+ dot::escape_html(edge_label)
} else {
"".to_owned()
};
@@ -162,8 +162,7 @@ impl<
where
W: Write,
{
- let lines = label.split('\n').map(|s| dot::escape_html(s)).collect::<Vec<_>>();
- let escaped_label = lines.join(r#"<br align="left"/>"#);
+ let escaped_label = dot::escape_html(label);
writeln!(w, r#" label=<<br/><br/>{}<br align="left"/><br/><br/><br/>>;"#, escaped_label)
}
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs
index 37ec04b07..221105ac4 100644
--- a/compiler/rustc_middle/src/mir/interpret/allocation.rs
+++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs
@@ -1,16 +1,20 @@
//! The virtual memory representation of the MIR interpreter.
+mod init_mask;
+mod provenance_map;
+#[cfg(test)]
+mod tests;
+
use std::borrow::Cow;
-use std::convert::{TryFrom, TryInto};
use std::fmt;
use std::hash;
-use std::iter;
-use std::ops::{Deref, Range};
+use std::ops::Range;
use std::ptr;
+use either::{Left, Right};
+
use rustc_ast::Mutability;
use rustc_data_structures::intern::Interned;
-use rustc_data_structures::sorted_map::SortedMap;
use rustc_span::DUMMY_SP;
use rustc_target::abi::{Align, HasDataLayout, Size};
@@ -20,6 +24,10 @@ use super::{
UnsupportedOpInfo,
};
use crate::ty;
+use init_mask::*;
+use provenance_map::*;
+
+pub use init_mask::{InitChunk, InitChunkIter};
/// This type represents an Allocation in the Miri/CTFE core engine.
///
@@ -28,9 +36,9 @@ use crate::ty;
/// module provides higher-level access.
// Note: for performance reasons when interning, some of the `Allocation` fields can be partially
// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
-#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
+#[derive(Clone, Eq, PartialEq, TyEncodable, TyDecodable)]
#[derive(HashStable)]
-pub struct Allocation<Prov = AllocId, Extra = ()> {
+pub struct Allocation<Prov: Provenance = AllocId, Extra = ()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer.
bytes: Box<[u8]>,
@@ -95,27 +103,25 @@ impl hash::Hash for Allocation {
/// Interned types generally have an `Outer` type and an `Inner` type, where
/// `Outer` is a newtype around `Interned<Inner>`, and all the operations are
/// done on `Outer`, because all occurrences are interned. E.g. `Ty` is an
-/// outer type and `TyS` is its inner type.
+/// outer type and `TyKind` is its inner type.
///
/// Here things are different because only const allocations are interned. This
/// means that both the inner type (`Allocation`) and the outer type
/// (`ConstAllocation`) are used quite a bit.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
-pub struct ConstAllocation<'tcx, Prov = AllocId, Extra = ()>(
- pub Interned<'tcx, Allocation<Prov, Extra>>,
-);
+pub struct ConstAllocation<'tcx>(pub Interned<'tcx, Allocation>);
impl<'tcx> fmt::Debug for ConstAllocation<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- // This matches how `Allocation` is printed. We print it like this to
- // avoid having to update expected output in a lot of tests.
- write!(f, "{:?}", self.inner())
+ // The debug representation of this is very verbose and basically useless,
+ // so don't print it.
+ write!(f, "ConstAllocation {{ .. }}")
}
}
-impl<'tcx, Prov, Extra> ConstAllocation<'tcx, Prov, Extra> {
- pub fn inner(self) -> &'tcx Allocation<Prov, Extra> {
+impl<'tcx> ConstAllocation<'tcx> {
+ pub fn inner(self) -> &'tcx Allocation {
self.0.0
}
}
@@ -183,12 +189,21 @@ pub fn alloc_range(start: Size, size: Size) -> AllocRange {
AllocRange { start, size }
}
-impl AllocRange {
+impl From<Range<Size>> for AllocRange {
#[inline]
- pub fn from(r: Range<Size>) -> Self {
+ fn from(r: Range<Size>) -> Self {
alloc_range(r.start, r.end - r.start) // `Size` subtraction (overflow-checked)
}
+}
+impl From<Range<usize>> for AllocRange {
+ #[inline]
+ fn from(r: Range<usize>) -> Self {
+ AllocRange::from(Size::from_bytes(r.start)..Size::from_bytes(r.end))
+ }
+}
+
+impl AllocRange {
#[inline(always)]
pub fn end(self) -> Size {
self.start + self.size // This does overflow checking.
@@ -205,7 +220,7 @@ impl AllocRange {
}
// The constructors are all without extra; the extra gets added by a machine hook later.
-impl<Prov> Allocation<Prov> {
+impl<Prov: Provenance> Allocation<Prov> {
/// Creates an allocation initialized by the given bytes
pub fn from_bytes<'a>(
slice: impl Into<Cow<'a, [u8]>>,
@@ -263,7 +278,7 @@ impl<Prov> Allocation<Prov> {
impl Allocation {
/// Adjust allocation from the ones in tcx to a custom Machine instance
/// with a different Provenance and Extra type.
- pub fn adjust_from_tcx<Prov, Extra, Err>(
+ pub fn adjust_from_tcx<Prov: Provenance, Extra, Err>(
self,
cx: &impl HasDataLayout,
extra: Extra,
@@ -271,10 +286,10 @@ impl Allocation {
) -> Result<Allocation<Prov, Extra>, Err> {
// Compute new pointer provenance, which also adjusts the bytes.
let mut bytes = self.bytes;
- let mut new_provenance = Vec::with_capacity(self.provenance.0.len());
+ let mut new_provenance = Vec::with_capacity(self.provenance.ptrs().len());
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
let endian = cx.data_layout().endian;
- for &(offset, alloc_id) in self.provenance.iter() {
+ for &(offset, alloc_id) in self.provenance.ptrs().iter() {
let idx = offset.bytes_usize();
let ptr_bytes = &mut bytes[idx..idx + ptr_size];
let bits = read_target_uint(endian, ptr_bytes).unwrap();
@@ -286,7 +301,7 @@ impl Allocation {
// Create allocation.
Ok(Allocation {
bytes,
- provenance: ProvenanceMap::from_presorted(new_provenance),
+ provenance: ProvenanceMap::from_presorted_ptrs(new_provenance),
init_mask: self.init_mask,
align: self.align,
mutability: self.mutability,
@@ -296,7 +311,7 @@ impl Allocation {
}
/// Raw accessors. Provide access to otherwise private bytes.
-impl<Prov, Extra> Allocation<Prov, Extra> {
+impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
pub fn len(&self) -> usize {
self.bytes.len()
}
@@ -349,9 +364,14 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
cx: &impl HasDataLayout,
range: AllocRange,
) -> AllocResult<&[u8]> {
- self.check_init(range)?;
+ self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
+ AllocError::InvalidUninitBytes(Some(UninitBytesAccess {
+ access: range,
+ uninit: uninit_range,
+ }))
+ })?;
if !Prov::OFFSET_IS_ADDR {
- if self.range_has_provenance(cx, range) {
+ if !self.provenance.range_empty(range, cx) {
return Err(AllocError::ReadPointerAsBytes);
}
}
@@ -370,7 +390,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
range: AllocRange,
) -> AllocResult<&mut [u8]> {
self.mark_init(range, true);
- self.clear_provenance(cx, range)?;
+ self.provenance.clear(range, cx)?;
Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()])
}
@@ -382,7 +402,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
range: AllocRange,
) -> AllocResult<*mut [u8]> {
self.mark_init(range, true);
- self.clear_provenance(cx, range)?;
+ self.provenance.clear(range, cx)?;
assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize());
@@ -393,6 +413,15 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
/// Reading and writing.
impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
+ /// Sets the init bit for the given range.
+ fn mark_init(&mut self, range: AllocRange, is_init: bool) {
+ if range.size.bytes() == 0 {
+ return;
+ }
+ assert!(self.mutability == Mutability::Mut);
+ self.init_mask.set_range(range, is_init);
+ }
+
/// Reads a *non-ZST* scalar.
///
/// If `read_provenance` is `true`, this will also read provenance; otherwise (if the machine
@@ -410,7 +439,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
read_provenance: bool,
) -> AllocResult<Scalar<Prov>> {
// First and foremost, if anything is uninit, bail.
- if self.is_init(range).is_err() {
+ if self.init_mask.is_range_initialized(range).is_err() {
return Err(AllocError::InvalidUninitBytes(None));
}
@@ -423,7 +452,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
// When reading data with provenance, the easy case is finding provenance exactly where we
// are reading, then we can put data and provenance back together and return that.
- if let Some(&prov) = self.provenance.get(&range.start) {
+ if let Some(prov) = self.provenance.get_ptr(range.start) {
// Now we can return the bits, with their appropriate provenance.
let ptr = Pointer::new(prov, Size::from_bytes(bits));
return Ok(Scalar::from_pointer(ptr, cx));
@@ -431,10 +460,9 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
// If we can work on pointers byte-wise, join the byte-wise provenances.
if Prov::OFFSET_IS_ADDR {
- let mut prov = self.offset_get_provenance(cx, range.start);
- for offset in 1..range.size.bytes() {
- let this_prov =
- self.offset_get_provenance(cx, range.start + Size::from_bytes(offset));
+ let mut prov = self.provenance.get(range.start, cx);
+ for offset in Size::from_bytes(1)..range.size {
+ let this_prov = self.provenance.get(range.start + offset, cx);
prov = Prov::join(prov, this_prov);
}
// Now use this provenance.
@@ -452,7 +480,7 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
// Fallback path for when we cannot treat provenance bytewise or ignore it.
assert!(!Prov::OFFSET_IS_ADDR);
- if self.range_has_provenance(cx, range) {
+ if !self.provenance.range_empty(range, cx) {
return Err(AllocError::ReadPointerAsBytes);
}
// There is no provenance, we can just return the bits.
@@ -466,7 +494,6 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
///
/// It is the caller's responsibility to check bounds and alignment beforehand.
/// Most likely, you want to call `InterpCx::write_scalar` instead of this method.
- #[instrument(skip(self, cx), level = "debug")]
pub fn write_scalar(
&mut self,
cx: &impl HasDataLayout,
@@ -478,11 +505,11 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
// `to_bits_or_ptr_internal` is the right method because we just want to store this data
// as-is into memory.
let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size)? {
- Err(val) => {
- let (provenance, offset) = val.into_parts();
+ Right(ptr) => {
+ let (provenance, offset) = ptr.into_parts();
(u128::from(offset.bytes()), Some(provenance))
}
- Ok(data) => (data, None),
+ Left(data) => (data, None),
};
let endian = cx.data_layout().endian;
@@ -491,7 +518,8 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
// See if we have to also store some provenance.
if let Some(provenance) = provenance {
- self.provenance.0.insert(range.start, provenance);
+ assert_eq!(range.size, cx.data_layout().pointer_size);
+ self.provenance.insert_ptr(range.start, provenance, cx);
}
Ok(())
@@ -500,750 +528,25 @@ impl<Prov: Provenance, Extra> Allocation<Prov, Extra> {
/// Write "uninit" to the given memory range.
pub fn write_uninit(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult {
self.mark_init(range, false);
- self.clear_provenance(cx, range)?;
+ self.provenance.clear(range, cx)?;
return Ok(());
}
-}
-
-/// Provenance.
-impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
- /// Returns all provenance overlapping with the given pointer-offset pair.
- fn range_get_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Prov)] {
- // We have to go back `pointer_size - 1` bytes, as that one would still overlap with
- // the beginning of this range.
- let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
- self.provenance.range(Size::from_bytes(start)..range.end())
- }
-
- /// Get the provenance of a single byte.
- fn offset_get_provenance(&self, cx: &impl HasDataLayout, offset: Size) -> Option<Prov> {
- let prov = self.range_get_provenance(cx, alloc_range(offset, Size::from_bytes(1)));
- assert!(prov.len() <= 1);
- prov.first().map(|(_offset, prov)| *prov)
- }
-
- /// Returns whether this allocation has progrnance overlapping with the given range.
- ///
- /// Note: this function exists to allow `range_get_provenance` to be private, in order to somewhat
- /// limit access to provenance outside of the `Allocation` abstraction.
- ///
- pub fn range_has_provenance(&self, cx: &impl HasDataLayout, range: AllocRange) -> bool {
- !self.range_get_provenance(cx, range).is_empty()
- }
-
- /// Removes all provenance inside the given range.
- /// If there is provenance overlapping with the edges, it
- /// are removed as well *and* the bytes they cover are marked as
- /// uninitialized. This is a somewhat odd "spooky action at a distance",
- /// but it allows strictly more code to run than if we would just error
- /// immediately in that case.
- fn clear_provenance(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult
- where
- Prov: Provenance,
- {
- // Find the start and end of the given range and its outermost provenance.
- let (first, last) = {
- // Find all provenance overlapping the given range.
- let provenance = self.range_get_provenance(cx, range);
- if provenance.is_empty() {
- return Ok(());
- }
-
- (
- provenance.first().unwrap().0,
- provenance.last().unwrap().0 + cx.data_layout().pointer_size,
- )
- };
- let start = range.start;
- let end = range.end();
-
- // We need to handle clearing the provenance from parts of a pointer.
- // FIXME: Miri should preserve partial provenance; see
- // https://github.com/rust-lang/miri/issues/2181.
- if first < start {
- if Prov::ERR_ON_PARTIAL_PTR_OVERWRITE {
- return Err(AllocError::PartialPointerOverwrite(first));
- }
- warn!(
- "Partial pointer overwrite! De-initializing memory at offsets {first:?}..{start:?}."
- );
- self.init_mask.set_range(first, start, false);
- }
- if last > end {
- if Prov::ERR_ON_PARTIAL_PTR_OVERWRITE {
- return Err(AllocError::PartialPointerOverwrite(
- last - cx.data_layout().pointer_size,
- ));
- }
- warn!(
- "Partial pointer overwrite! De-initializing memory at offsets {end:?}..{last:?}."
- );
- self.init_mask.set_range(end, last, false);
- }
-
- // Forget all the provenance.
- // Since provenance do not overlap, we know that removing until `last` (exclusive) is fine,
- // i.e., this will not remove any other provenance just after the ones we care about.
- self.provenance.0.remove_range(first..last);
-
- Ok(())
- }
-}
-
-/// Stores the provenance information of pointers stored in memory.
-#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
-pub struct ProvenanceMap<Prov = AllocId>(SortedMap<Size, Prov>);
-
-impl<Prov> ProvenanceMap<Prov> {
- pub fn new() -> Self {
- ProvenanceMap(SortedMap::new())
- }
-
- // The caller must guarantee that the given provenance list is already sorted
- // by address and contain no duplicates.
- pub fn from_presorted(r: Vec<(Size, Prov)>) -> Self {
- ProvenanceMap(SortedMap::from_presorted_elements(r))
- }
-}
-
-impl<Prov> Deref for ProvenanceMap<Prov> {
- type Target = SortedMap<Size, Prov>;
-
- fn deref(&self) -> &Self::Target {
- &self.0
- }
-}
-
-/// A partial, owned list of provenance to transfer into another allocation.
-///
-/// Offsets are already adjusted to the destination allocation.
-pub struct AllocationProvenance<Prov> {
- dest_provenance: Vec<(Size, Prov)>,
-}
-
-impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
- pub fn prepare_provenance_copy(
- &self,
- cx: &impl HasDataLayout,
- src: AllocRange,
- dest: Size,
- count: u64,
- ) -> AllocationProvenance<Prov> {
- let provenance = self.range_get_provenance(cx, src);
- if provenance.is_empty() {
- return AllocationProvenance { dest_provenance: Vec::new() };
- }
-
- let size = src.size;
- let mut new_provenance = Vec::with_capacity(provenance.len() * (count as usize));
-
- // If `count` is large, this is rather wasteful -- we are allocating a big array here, which
- // is mostly filled with redundant information since it's just N copies of the same `Prov`s
- // at slightly adjusted offsets. The reason we do this is so that in `mark_provenance_range`
- // we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
- // the right sequence of provenance for all N copies.
- for i in 0..count {
- new_provenance.extend(provenance.iter().map(|&(offset, reloc)| {
- // compute offset for current repetition
- let dest_offset = dest + size * i; // `Size` operations
- (
- // shift offsets from source allocation to destination allocation
- (offset + dest_offset) - src.start, // `Size` operations
- reloc,
- )
- }));
- }
-
- AllocationProvenance { dest_provenance: new_provenance }
- }
- /// Applies a provenance copy.
- /// The affected range, as defined in the parameters to `prepare_provenance_copy` is expected
+ /// Applies a previously prepared provenance copy.
+ /// The affected range, as defined in the parameters to `provenance().prepare_copy` is expected
/// to be clear of provenance.
///
/// This is dangerous to use as it can violate internal `Allocation` invariants!
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
- pub fn mark_provenance_range(&mut self, provenance: AllocationProvenance<Prov>) {
- self.provenance.0.insert_presorted(provenance.dest_provenance);
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-// Uninitialized byte tracking
-////////////////////////////////////////////////////////////////////////////////
-
-type Block = u64;
-
-/// A bitmask where each bit refers to the byte with the same index. If the bit is `true`, the byte
-/// is initialized. If it is `false` the byte is uninitialized.
-// Note: for performance reasons when interning, some of the `InitMask` fields can be partially
-// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
-#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
-#[derive(HashStable)]
-pub struct InitMask {
- blocks: Vec<Block>,
- len: Size,
-}
-
-// Const allocations are only hashed for interning. However, they can be large, making the hashing
-// expensive especially since it uses `FxHash`: it's better suited to short keys, not potentially
-// big buffers like the allocation's init mask. We can partially hash some fields when they're
-// large.
-impl hash::Hash for InitMask {
- fn hash<H: hash::Hasher>(&self, state: &mut H) {
- const MAX_BLOCKS_TO_HASH: usize = MAX_BYTES_TO_HASH / std::mem::size_of::<Block>();
- const MAX_BLOCKS_LEN: usize = MAX_HASHED_BUFFER_LEN / std::mem::size_of::<Block>();
-
- // Partially hash the `blocks` buffer when it is large. To limit collisions with common
- // prefixes and suffixes, we hash the length and some slices of the buffer.
- let block_count = self.blocks.len();
- if block_count > MAX_BLOCKS_LEN {
- // Hash the buffer's length.
- block_count.hash(state);
-
- // And its head and tail.
- self.blocks[..MAX_BLOCKS_TO_HASH].hash(state);
- self.blocks[block_count - MAX_BLOCKS_TO_HASH..].hash(state);
- } else {
- self.blocks.hash(state);
- }
-
- // Hash the other fields as usual.
- self.len.hash(state);
- }
-}
-
-impl InitMask {
- pub const BLOCK_SIZE: u64 = 64;
-
- #[inline]
- fn bit_index(bits: Size) -> (usize, usize) {
- // BLOCK_SIZE is the number of bits that can fit in a `Block`.
- // Each bit in a `Block` represents the initialization state of one byte of an allocation,
- // so we use `.bytes()` here.
- let bits = bits.bytes();
- let a = bits / InitMask::BLOCK_SIZE;
- let b = bits % InitMask::BLOCK_SIZE;
- (usize::try_from(a).unwrap(), usize::try_from(b).unwrap())
- }
-
- #[inline]
- fn size_from_bit_index(block: impl TryInto<u64>, bit: impl TryInto<u64>) -> Size {
- let block = block.try_into().ok().unwrap();
- let bit = bit.try_into().ok().unwrap();
- Size::from_bytes(block * InitMask::BLOCK_SIZE + bit)
- }
-
- pub fn new(size: Size, state: bool) -> Self {
- let mut m = InitMask { blocks: vec![], len: Size::ZERO };
- m.grow(size, state);
- m
- }
-
- pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
- let len = self.len;
- if end > len {
- self.grow(end - len, new_state);
- }
- self.set_range_inbounds(start, end, new_state);
- }
-
- pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
- let (blocka, bita) = Self::bit_index(start);
- let (blockb, bitb) = Self::bit_index(end);
- if blocka == blockb {
- // First set all bits except the first `bita`,
- // then unset the last `64 - bitb` bits.
- let range = if bitb == 0 {
- u64::MAX << bita
- } else {
- (u64::MAX << bita) & (u64::MAX >> (64 - bitb))
- };
- if new_state {
- self.blocks[blocka] |= range;
- } else {
- self.blocks[blocka] &= !range;
- }
- return;
- }
- // across block boundaries
- if new_state {
- // Set `bita..64` to `1`.
- self.blocks[blocka] |= u64::MAX << bita;
- // Set `0..bitb` to `1`.
- if bitb != 0 {
- self.blocks[blockb] |= u64::MAX >> (64 - bitb);
- }
- // Fill in all the other blocks (much faster than one bit at a time).
- for block in (blocka + 1)..blockb {
- self.blocks[block] = u64::MAX;
- }
- } else {
- // Set `bita..64` to `0`.
- self.blocks[blocka] &= !(u64::MAX << bita);
- // Set `0..bitb` to `0`.
- if bitb != 0 {
- self.blocks[blockb] &= !(u64::MAX >> (64 - bitb));
- }
- // Fill in all the other blocks (much faster than one bit at a time).
- for block in (blocka + 1)..blockb {
- self.blocks[block] = 0;
- }
- }
- }
-
- #[inline]
- pub fn get(&self, i: Size) -> bool {
- let (block, bit) = Self::bit_index(i);
- (self.blocks[block] & (1 << bit)) != 0
+ pub fn provenance_apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
+ self.provenance.apply_copy(copy)
}
- #[inline]
- pub fn set(&mut self, i: Size, new_state: bool) {
- let (block, bit) = Self::bit_index(i);
- self.set_bit(block, bit, new_state);
- }
-
- #[inline]
- fn set_bit(&mut self, block: usize, bit: usize, new_state: bool) {
- if new_state {
- self.blocks[block] |= 1 << bit;
- } else {
- self.blocks[block] &= !(1 << bit);
- }
- }
-
- pub fn grow(&mut self, amount: Size, new_state: bool) {
- if amount.bytes() == 0 {
- return;
- }
- let unused_trailing_bits =
- u64::try_from(self.blocks.len()).unwrap() * Self::BLOCK_SIZE - self.len.bytes();
- if amount.bytes() > unused_trailing_bits {
- let additional_blocks = amount.bytes() / Self::BLOCK_SIZE + 1;
- self.blocks.extend(
- // FIXME(oli-obk): optimize this by repeating `new_state as Block`.
- iter::repeat(0).take(usize::try_from(additional_blocks).unwrap()),
- );
- }
- let start = self.len;
- self.len += amount;
- self.set_range_inbounds(start, start + amount, new_state); // `Size` operation
- }
-
- /// Returns the index of the first bit in `start..end` (end-exclusive) that is equal to is_init.
- fn find_bit(&self, start: Size, end: Size, is_init: bool) -> Option<Size> {
- /// A fast implementation of `find_bit`,
- /// which skips over an entire block at a time if it's all 0s (resp. 1s),
- /// and finds the first 1 (resp. 0) bit inside a block using `trailing_zeros` instead of a loop.
- ///
- /// Note that all examples below are written with 8 (instead of 64) bit blocks for simplicity,
- /// and with the least significant bit (and lowest block) first:
- /// ```text
- /// 00000000|00000000
- /// ^ ^ ^ ^
- /// index: 0 7 8 15
- /// ```
- /// Also, if not stated, assume that `is_init = true`, that is, we are searching for the first 1 bit.
- fn find_bit_fast(
- init_mask: &InitMask,
- start: Size,
- end: Size,
- is_init: bool,
- ) -> Option<Size> {
- /// Search one block, returning the index of the first bit equal to `is_init`.
- fn search_block(
- bits: Block,
- block: usize,
- start_bit: usize,
- is_init: bool,
- ) -> Option<Size> {
- // For the following examples, assume this function was called with:
- // bits = 0b00111011
- // start_bit = 3
- // is_init = false
- // Note that, for the examples in this function, the most significant bit is written first,
- // which is backwards compared to the comments in `find_bit`/`find_bit_fast`.
-
- // Invert bits so we're always looking for the first set bit.
- // ! 0b00111011
- // bits = 0b11000100
- let bits = if is_init { bits } else { !bits };
- // Mask off unused start bits.
- // 0b11000100
- // & 0b11111000
- // bits = 0b11000000
- let bits = bits & (!0 << start_bit);
- // Find set bit, if any.
- // bit = trailing_zeros(0b11000000)
- // bit = 6
- if bits == 0 {
- None
- } else {
- let bit = bits.trailing_zeros();
- Some(InitMask::size_from_bit_index(block, bit))
- }
- }
-
- if start >= end {
- return None;
- }
-
- // Convert `start` and `end` to block indexes and bit indexes within each block.
- // We must convert `end` to an inclusive bound to handle block boundaries correctly.
- //
- // For example:
- //
- // (a) 00000000|00000000 (b) 00000000|
- // ^~~~~~~~~~~^ ^~~~~~~~~^
- // start end start end
- //
- // In both cases, the block index of `end` is 1.
- // But we do want to search block 1 in (a), and we don't in (b).
- //
- // We subtract 1 from both end positions to make them inclusive:
- //
- // (a) 00000000|00000000 (b) 00000000|
- // ^~~~~~~~~~^ ^~~~~~~^
- // start end_inclusive start end_inclusive
- //
- // For (a), the block index of `end_inclusive` is 1, and for (b), it's 0.
- // This provides the desired behavior of searching blocks 0 and 1 for (a),
- // and searching only block 0 for (b).
- // There is no concern of overflows since we checked for `start >= end` above.
- let (start_block, start_bit) = InitMask::bit_index(start);
- let end_inclusive = Size::from_bytes(end.bytes() - 1);
- let (end_block_inclusive, _) = InitMask::bit_index(end_inclusive);
-
- // Handle first block: need to skip `start_bit` bits.
- //
- // We need to handle the first block separately,
- // because there may be bits earlier in the block that should be ignored,
- // such as the bit marked (1) in this example:
- //
- // (1)
- // -|------
- // (c) 01000000|00000000|00000001
- // ^~~~~~~~~~~~~~~~~~^
- // start end
- if let Some(i) =
- search_block(init_mask.blocks[start_block], start_block, start_bit, is_init)
- {
- // If the range is less than a block, we may find a matching bit after `end`.
- //
- // For example, we shouldn't successfully find bit (2), because it's after `end`:
- //
- // (2)
- // -------|
- // (d) 00000001|00000000|00000001
- // ^~~~~^
- // start end
- //
- // An alternative would be to mask off end bits in the same way as we do for start bits,
- // but performing this check afterwards is faster and simpler to implement.
- if i < end {
- return Some(i);
- } else {
- return None;
- }
- }
-
- // Handle remaining blocks.
- //
- // We can skip over an entire block at once if it's all 0s (resp. 1s).
- // The block marked (3) in this example is the first block that will be handled by this loop,
- // and it will be skipped for that reason:
- //
- // (3)
- // --------
- // (e) 01000000|00000000|00000001
- // ^~~~~~~~~~~~~~~~~~^
- // start end
- if start_block < end_block_inclusive {
- // This loop is written in a specific way for performance.
- // Notably: `..end_block_inclusive + 1` is used for an inclusive range instead of `..=end_block_inclusive`,
- // and `.zip(start_block + 1..)` is used to track the index instead of `.enumerate().skip().take()`,
- // because both alternatives result in significantly worse codegen.
- // `end_block_inclusive + 1` is guaranteed not to wrap, because `end_block_inclusive <= end / BLOCK_SIZE`,
- // and `BLOCK_SIZE` (the number of bits per block) will always be at least 8 (1 byte).
- for (&bits, block) in init_mask.blocks[start_block + 1..end_block_inclusive + 1]
- .iter()
- .zip(start_block + 1..)
- {
- if let Some(i) = search_block(bits, block, 0, is_init) {
- // If this is the last block, we may find a matching bit after `end`.
- //
- // For example, we shouldn't successfully find bit (4), because it's after `end`:
- //
- // (4)
- // -------|
- // (f) 00000001|00000000|00000001
- // ^~~~~~~~~~~~~~~~~~^
- // start end
- //
- // As above with example (d), we could handle the end block separately and mask off end bits,
- // but unconditionally searching an entire block at once and performing this check afterwards
- // is faster and much simpler to implement.
- if i < end {
- return Some(i);
- } else {
- return None;
- }
- }
- }
- }
-
- None
- }
-
- #[cfg_attr(not(debug_assertions), allow(dead_code))]
- fn find_bit_slow(
- init_mask: &InitMask,
- start: Size,
- end: Size,
- is_init: bool,
- ) -> Option<Size> {
- (start..end).find(|&i| init_mask.get(i) == is_init)
- }
-
- let result = find_bit_fast(self, start, end, is_init);
-
- debug_assert_eq!(
- result,
- find_bit_slow(self, start, end, is_init),
- "optimized implementation of find_bit is wrong for start={:?} end={:?} is_init={} init_mask={:#?}",
- start,
- end,
- is_init,
- self
- );
-
- result
- }
-}
-
-/// A contiguous chunk of initialized or uninitialized memory.
-pub enum InitChunk {
- Init(Range<Size>),
- Uninit(Range<Size>),
-}
-
-impl InitChunk {
- #[inline]
- pub fn is_init(&self) -> bool {
- match self {
- Self::Init(_) => true,
- Self::Uninit(_) => false,
- }
- }
-
- #[inline]
- pub fn range(&self) -> Range<Size> {
- match self {
- Self::Init(r) => r.clone(),
- Self::Uninit(r) => r.clone(),
- }
- }
-}
-
-impl InitMask {
- /// Checks whether the range `start..end` (end-exclusive) is entirely initialized.
- ///
- /// Returns `Ok(())` if it's initialized. Otherwise returns a range of byte
- /// indexes for the first contiguous span of the uninitialized access.
- #[inline]
- pub fn is_range_initialized(&self, start: Size, end: Size) -> Result<(), AllocRange> {
- if end > self.len {
- return Err(AllocRange::from(self.len..end));
- }
-
- let uninit_start = self.find_bit(start, end, false);
-
- match uninit_start {
- Some(uninit_start) => {
- let uninit_end = self.find_bit(uninit_start, end, true).unwrap_or(end);
- Err(AllocRange::from(uninit_start..uninit_end))
- }
- None => Ok(()),
- }
- }
-
- /// Returns an iterator, yielding a range of byte indexes for each contiguous region
- /// of initialized or uninitialized bytes inside the range `start..end` (end-exclusive).
- ///
- /// The iterator guarantees the following:
- /// - Chunks are nonempty.
- /// - Chunks are adjacent (each range's start is equal to the previous range's end).
- /// - Chunks span exactly `start..end` (the first starts at `start`, the last ends at `end`).
- /// - Chunks alternate between [`InitChunk::Init`] and [`InitChunk::Uninit`].
- #[inline]
- pub fn range_as_init_chunks(&self, start: Size, end: Size) -> InitChunkIter<'_> {
- assert!(end <= self.len);
-
- let is_init = if start < end {
- self.get(start)
- } else {
- // `start..end` is empty: there are no chunks, so use some arbitrary value
- false
- };
-
- InitChunkIter { init_mask: self, is_init, start, end }
- }
-}
-
-/// Yields [`InitChunk`]s. See [`InitMask::range_as_init_chunks`].
-#[derive(Clone)]
-pub struct InitChunkIter<'a> {
- init_mask: &'a InitMask,
- /// Whether the next chunk we will return is initialized.
- /// If there are no more chunks, contains some arbitrary value.
- is_init: bool,
- /// The current byte index into `init_mask`.
- start: Size,
- /// The end byte index into `init_mask`.
- end: Size,
-}
-
-impl<'a> Iterator for InitChunkIter<'a> {
- type Item = InitChunk;
-
- #[inline]
- fn next(&mut self) -> Option<Self::Item> {
- if self.start >= self.end {
- return None;
- }
-
- let end_of_chunk =
- self.init_mask.find_bit(self.start, self.end, !self.is_init).unwrap_or(self.end);
- let range = self.start..end_of_chunk;
-
- let ret =
- Some(if self.is_init { InitChunk::Init(range) } else { InitChunk::Uninit(range) });
-
- self.is_init = !self.is_init;
- self.start = end_of_chunk;
-
- ret
- }
-}
-
-/// Uninitialized bytes.
-impl<Prov: Copy, Extra> Allocation<Prov, Extra> {
- /// Checks whether the given range is entirely initialized.
- ///
- /// Returns `Ok(())` if it's initialized. Otherwise returns the range of byte
- /// indexes of the first contiguous uninitialized access.
- fn is_init(&self, range: AllocRange) -> Result<(), AllocRange> {
- self.init_mask.is_range_initialized(range.start, range.end()) // `Size` addition
- }
-
- /// Checks that a range of bytes is initialized. If not, returns the `InvalidUninitBytes`
- /// error which will report the first range of bytes which is uninitialized.
- fn check_init(&self, range: AllocRange) -> AllocResult {
- self.is_init(range).map_err(|uninit_range| {
- AllocError::InvalidUninitBytes(Some(UninitBytesAccess {
- access: range,
- uninit: uninit_range,
- }))
- })
- }
-
- fn mark_init(&mut self, range: AllocRange, is_init: bool) {
- if range.size.bytes() == 0 {
- return;
- }
- assert!(self.mutability == Mutability::Mut);
- self.init_mask.set_range(range.start, range.end(), is_init);
- }
-}
-
-/// Run-length encoding of the uninit mask.
-/// Used to copy parts of a mask multiple times to another allocation.
-pub struct InitMaskCompressed {
- /// Whether the first range is initialized.
- initial: bool,
- /// The lengths of ranges that are run-length encoded.
- /// The initialization state of the ranges alternate starting with `initial`.
- ranges: smallvec::SmallVec<[u64; 1]>,
-}
-
-impl InitMaskCompressed {
- pub fn no_bytes_init(&self) -> bool {
- // The `ranges` are run-length encoded and of alternating initialization state.
- // So if `ranges.len() > 1` then the second block is an initialized range.
- !self.initial && self.ranges.len() == 1
- }
-}
-
-/// Transferring the initialization mask to other allocations.
-impl<Prov, Extra> Allocation<Prov, Extra> {
- /// Creates a run-length encoding of the initialization mask; panics if range is empty.
- ///
- /// This is essentially a more space-efficient version of
- /// `InitMask::range_as_init_chunks(...).collect::<Vec<_>>()`.
- pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed {
- // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
- // a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
- // the source and write it to the destination. Even if we optimized the memory accesses,
- // we'd be doing all of this `repeat` times.
- // Therefore we precompute a compressed version of the initialization mask of the source value and
- // then write it back `repeat` times without computing any more information from the source.
-
- // A precomputed cache for ranges of initialized / uninitialized bits
- // 0000010010001110 will become
- // `[5, 1, 2, 1, 3, 3, 1]`,
- // where each element toggles the state.
-
- let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
-
- let mut chunks = self.init_mask.range_as_init_chunks(range.start, range.end()).peekable();
-
- let initial = chunks.peek().expect("range should be nonempty").is_init();
-
- // Here we rely on `range_as_init_chunks` to yield alternating init/uninit chunks.
- for chunk in chunks {
- let len = chunk.range().end.bytes() - chunk.range().start.bytes();
- ranges.push(len);
- }
-
- InitMaskCompressed { ranges, initial }
- }
-
- /// Applies multiple instances of the run-length encoding to the initialization mask.
+ /// Applies a previously prepared copy of the init mask.
///
/// This is dangerous to use as it can violate internal `Allocation` invariants!
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
- pub fn mark_compressed_init_range(
- &mut self,
- defined: &InitMaskCompressed,
- range: AllocRange,
- repeat: u64,
- ) {
- // An optimization where we can just overwrite an entire range of initialization
- // bits if they are going to be uniformly `1` or `0`.
- if defined.ranges.len() <= 1 {
- self.init_mask.set_range_inbounds(
- range.start,
- range.start + range.size * repeat, // `Size` operations
- defined.initial,
- );
- return;
- }
-
- for mut j in 0..repeat {
- j *= range.size.bytes();
- j += range.start.bytes();
- let mut cur = defined.initial;
- for range in &defined.ranges {
- let old_j = j;
- j += range;
- self.init_mask.set_range_inbounds(
- Size::from_bytes(old_j),
- Size::from_bytes(j),
- cur,
- );
- cur = !cur;
- }
- }
+ pub fn init_mask_apply_copy(&mut self, copy: InitCopy, range: AllocRange, repeat: u64) {
+ self.init_mask.apply_copy(copy, range, repeat)
}
}
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation/init_mask.rs b/compiler/rustc_middle/src/mir/interpret/allocation/init_mask.rs
new file mode 100644
index 000000000..82e9a961a
--- /dev/null
+++ b/compiler/rustc_middle/src/mir/interpret/allocation/init_mask.rs
@@ -0,0 +1,530 @@
+use std::hash;
+use std::iter;
+use std::ops::Range;
+
+use rustc_target::abi::Size;
+
+use super::AllocRange;
+
+type Block = u64;
+
+/// A bitmask where each bit refers to the byte with the same index. If the bit is `true`, the byte
+/// is initialized. If it is `false` the byte is uninitialized.
+// Note: for performance reasons when interning, some of the `InitMask` fields can be partially
+// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
+#[derive(Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable)]
+#[derive(HashStable)]
+pub struct InitMask {
+ blocks: Vec<Block>,
+ len: Size,
+}
+
+// Const allocations are only hashed for interning. However, they can be large, making the hashing
+// expensive especially since it uses `FxHash`: it's better suited to short keys, not potentially
+// big buffers like the allocation's init mask. We can partially hash some fields when they're
+// large.
+impl hash::Hash for InitMask {
+ fn hash<H: hash::Hasher>(&self, state: &mut H) {
+ const MAX_BLOCKS_TO_HASH: usize = super::MAX_BYTES_TO_HASH / std::mem::size_of::<Block>();
+ const MAX_BLOCKS_LEN: usize = super::MAX_HASHED_BUFFER_LEN / std::mem::size_of::<Block>();
+
+ // Partially hash the `blocks` buffer when it is large. To limit collisions with common
+ // prefixes and suffixes, we hash the length and some slices of the buffer.
+ let block_count = self.blocks.len();
+ if block_count > MAX_BLOCKS_LEN {
+ // Hash the buffer's length.
+ block_count.hash(state);
+
+ // And its head and tail.
+ self.blocks[..MAX_BLOCKS_TO_HASH].hash(state);
+ self.blocks[block_count - MAX_BLOCKS_TO_HASH..].hash(state);
+ } else {
+ self.blocks.hash(state);
+ }
+
+ // Hash the other fields as usual.
+ self.len.hash(state);
+ }
+}
+
+impl InitMask {
+ pub const BLOCK_SIZE: u64 = 64;
+
+ pub fn new(size: Size, state: bool) -> Self {
+ let mut m = InitMask { blocks: vec![], len: Size::ZERO };
+ m.grow(size, state);
+ m
+ }
+
+ #[inline]
+ fn bit_index(bits: Size) -> (usize, usize) {
+ // BLOCK_SIZE is the number of bits that can fit in a `Block`.
+ // Each bit in a `Block` represents the initialization state of one byte of an allocation,
+ // so we use `.bytes()` here.
+ let bits = bits.bytes();
+ let a = bits / InitMask::BLOCK_SIZE;
+ let b = bits % InitMask::BLOCK_SIZE;
+ (usize::try_from(a).unwrap(), usize::try_from(b).unwrap())
+ }
+
+ #[inline]
+ fn size_from_bit_index(block: impl TryInto<u64>, bit: impl TryInto<u64>) -> Size {
+ let block = block.try_into().ok().unwrap();
+ let bit = bit.try_into().ok().unwrap();
+ Size::from_bytes(block * InitMask::BLOCK_SIZE + bit)
+ }
+
+ /// Checks whether the `range` is entirely initialized.
+ ///
+ /// Returns `Ok(())` if it's initialized. Otherwise returns a range of byte
+ /// indexes for the first contiguous span of the uninitialized access.
+ #[inline]
+ pub fn is_range_initialized(&self, range: AllocRange) -> Result<(), AllocRange> {
+ let end = range.end();
+ if end > self.len {
+ return Err(AllocRange::from(self.len..end));
+ }
+
+ let uninit_start = self.find_bit(range.start, end, false);
+
+ match uninit_start {
+ Some(uninit_start) => {
+ let uninit_end = self.find_bit(uninit_start, end, true).unwrap_or(end);
+ Err(AllocRange::from(uninit_start..uninit_end))
+ }
+ None => Ok(()),
+ }
+ }
+
+ pub fn set_range(&mut self, range: AllocRange, new_state: bool) {
+ let end = range.end();
+ let len = self.len;
+ if end > len {
+ self.grow(end - len, new_state);
+ }
+ self.set_range_inbounds(range.start, end, new_state);
+ }
+
+ fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
+ let (blocka, bita) = Self::bit_index(start);
+ let (blockb, bitb) = Self::bit_index(end);
+ if blocka == blockb {
+ // First set all bits except the first `bita`,
+ // then unset the last `64 - bitb` bits.
+ let range = if bitb == 0 {
+ u64::MAX << bita
+ } else {
+ (u64::MAX << bita) & (u64::MAX >> (64 - bitb))
+ };
+ if new_state {
+ self.blocks[blocka] |= range;
+ } else {
+ self.blocks[blocka] &= !range;
+ }
+ return;
+ }
+ // across block boundaries
+ if new_state {
+ // Set `bita..64` to `1`.
+ self.blocks[blocka] |= u64::MAX << bita;
+ // Set `0..bitb` to `1`.
+ if bitb != 0 {
+ self.blocks[blockb] |= u64::MAX >> (64 - bitb);
+ }
+ // Fill in all the other blocks (much faster than one bit at a time).
+ for block in (blocka + 1)..blockb {
+ self.blocks[block] = u64::MAX;
+ }
+ } else {
+ // Set `bita..64` to `0`.
+ self.blocks[blocka] &= !(u64::MAX << bita);
+ // Set `0..bitb` to `0`.
+ if bitb != 0 {
+ self.blocks[blockb] &= !(u64::MAX >> (64 - bitb));
+ }
+ // Fill in all the other blocks (much faster than one bit at a time).
+ for block in (blocka + 1)..blockb {
+ self.blocks[block] = 0;
+ }
+ }
+ }
+
+ #[inline]
+ pub fn get(&self, i: Size) -> bool {
+ let (block, bit) = Self::bit_index(i);
+ (self.blocks[block] & (1 << bit)) != 0
+ }
+
+ fn grow(&mut self, amount: Size, new_state: bool) {
+ if amount.bytes() == 0 {
+ return;
+ }
+ let unused_trailing_bits =
+ u64::try_from(self.blocks.len()).unwrap() * Self::BLOCK_SIZE - self.len.bytes();
+ if amount.bytes() > unused_trailing_bits {
+ let additional_blocks = amount.bytes() / Self::BLOCK_SIZE + 1;
+ self.blocks.extend(
+ // FIXME(oli-obk): optimize this by repeating `new_state as Block`.
+ iter::repeat(0).take(usize::try_from(additional_blocks).unwrap()),
+ );
+ }
+ let start = self.len;
+ self.len += amount;
+ self.set_range_inbounds(start, start + amount, new_state); // `Size` operation
+ }
+
+ /// Returns the index of the first bit in `start..end` (end-exclusive) that is equal to is_init.
+ fn find_bit(&self, start: Size, end: Size, is_init: bool) -> Option<Size> {
+ /// A fast implementation of `find_bit`,
+ /// which skips over an entire block at a time if it's all 0s (resp. 1s),
+ /// and finds the first 1 (resp. 0) bit inside a block using `trailing_zeros` instead of a loop.
+ ///
+ /// Note that all examples below are written with 8 (instead of 64) bit blocks for simplicity,
+ /// and with the least significant bit (and lowest block) first:
+ /// ```text
+ /// 00000000|00000000
+ /// ^ ^ ^ ^
+ /// index: 0 7 8 15
+ /// ```
+ /// Also, if not stated, assume that `is_init = true`, that is, we are searching for the first 1 bit.
+ fn find_bit_fast(
+ init_mask: &InitMask,
+ start: Size,
+ end: Size,
+ is_init: bool,
+ ) -> Option<Size> {
+ /// Search one block, returning the index of the first bit equal to `is_init`.
+ fn search_block(
+ bits: Block,
+ block: usize,
+ start_bit: usize,
+ is_init: bool,
+ ) -> Option<Size> {
+ // For the following examples, assume this function was called with:
+ // bits = 0b00111011
+ // start_bit = 3
+ // is_init = false
+ // Note that, for the examples in this function, the most significant bit is written first,
+ // which is backwards compared to the comments in `find_bit`/`find_bit_fast`.
+
+ // Invert bits so we're always looking for the first set bit.
+ // ! 0b00111011
+ // bits = 0b11000100
+ let bits = if is_init { bits } else { !bits };
+ // Mask off unused start bits.
+ // 0b11000100
+ // & 0b11111000
+ // bits = 0b11000000
+ let bits = bits & (!0 << start_bit);
+ // Find set bit, if any.
+ // bit = trailing_zeros(0b11000000)
+ // bit = 6
+ if bits == 0 {
+ None
+ } else {
+ let bit = bits.trailing_zeros();
+ Some(InitMask::size_from_bit_index(block, bit))
+ }
+ }
+
+ if start >= end {
+ return None;
+ }
+
+ // Convert `start` and `end` to block indexes and bit indexes within each block.
+ // We must convert `end` to an inclusive bound to handle block boundaries correctly.
+ //
+ // For example:
+ //
+ // (a) 00000000|00000000 (b) 00000000|
+ // ^~~~~~~~~~~^ ^~~~~~~~~^
+ // start end start end
+ //
+ // In both cases, the block index of `end` is 1.
+ // But we do want to search block 1 in (a), and we don't in (b).
+ //
+ // We subtract 1 from both end positions to make them inclusive:
+ //
+ // (a) 00000000|00000000 (b) 00000000|
+ // ^~~~~~~~~~^ ^~~~~~~^
+ // start end_inclusive start end_inclusive
+ //
+ // For (a), the block index of `end_inclusive` is 1, and for (b), it's 0.
+ // This provides the desired behavior of searching blocks 0 and 1 for (a),
+ // and searching only block 0 for (b).
+ // There is no concern of overflows since we checked for `start >= end` above.
+ let (start_block, start_bit) = InitMask::bit_index(start);
+ let end_inclusive = Size::from_bytes(end.bytes() - 1);
+ let (end_block_inclusive, _) = InitMask::bit_index(end_inclusive);
+
+ // Handle first block: need to skip `start_bit` bits.
+ //
+ // We need to handle the first block separately,
+ // because there may be bits earlier in the block that should be ignored,
+ // such as the bit marked (1) in this example:
+ //
+ // (1)
+ // -|------
+ // (c) 01000000|00000000|00000001
+ // ^~~~~~~~~~~~~~~~~~^
+ // start end
+ if let Some(i) =
+ search_block(init_mask.blocks[start_block], start_block, start_bit, is_init)
+ {
+ // If the range is less than a block, we may find a matching bit after `end`.
+ //
+ // For example, we shouldn't successfully find bit (2), because it's after `end`:
+ //
+ // (2)
+ // -------|
+ // (d) 00000001|00000000|00000001
+ // ^~~~~^
+ // start end
+ //
+ // An alternative would be to mask off end bits in the same way as we do for start bits,
+ // but performing this check afterwards is faster and simpler to implement.
+ if i < end {
+ return Some(i);
+ } else {
+ return None;
+ }
+ }
+
+ // Handle remaining blocks.
+ //
+ // We can skip over an entire block at once if it's all 0s (resp. 1s).
+ // The block marked (3) in this example is the first block that will be handled by this loop,
+ // and it will be skipped for that reason:
+ //
+ // (3)
+ // --------
+ // (e) 01000000|00000000|00000001
+ // ^~~~~~~~~~~~~~~~~~^
+ // start end
+ if start_block < end_block_inclusive {
+ // This loop is written in a specific way for performance.
+ // Notably: `..end_block_inclusive + 1` is used for an inclusive range instead of `..=end_block_inclusive`,
+ // and `.zip(start_block + 1..)` is used to track the index instead of `.enumerate().skip().take()`,
+ // because both alternatives result in significantly worse codegen.
+ // `end_block_inclusive + 1` is guaranteed not to wrap, because `end_block_inclusive <= end / BLOCK_SIZE`,
+ // and `BLOCK_SIZE` (the number of bits per block) will always be at least 8 (1 byte).
+ for (&bits, block) in init_mask.blocks[start_block + 1..end_block_inclusive + 1]
+ .iter()
+ .zip(start_block + 1..)
+ {
+ if let Some(i) = search_block(bits, block, 0, is_init) {
+ // If this is the last block, we may find a matching bit after `end`.
+ //
+ // For example, we shouldn't successfully find bit (4), because it's after `end`:
+ //
+ // (4)
+ // -------|
+ // (f) 00000001|00000000|00000001
+ // ^~~~~~~~~~~~~~~~~~^
+ // start end
+ //
+ // As above with example (d), we could handle the end block separately and mask off end bits,
+ // but unconditionally searching an entire block at once and performing this check afterwards
+ // is faster and much simpler to implement.
+ if i < end {
+ return Some(i);
+ } else {
+ return None;
+ }
+ }
+ }
+ }
+
+ None
+ }
+
+ #[cfg_attr(not(debug_assertions), allow(dead_code))]
+ fn find_bit_slow(
+ init_mask: &InitMask,
+ start: Size,
+ end: Size,
+ is_init: bool,
+ ) -> Option<Size> {
+ (start..end).find(|&i| init_mask.get(i) == is_init)
+ }
+
+ let result = find_bit_fast(self, start, end, is_init);
+
+ debug_assert_eq!(
+ result,
+ find_bit_slow(self, start, end, is_init),
+ "optimized implementation of find_bit is wrong for start={:?} end={:?} is_init={} init_mask={:#?}",
+ start,
+ end,
+ is_init,
+ self
+ );
+
+ result
+ }
+}
+
+/// A contiguous chunk of initialized or uninitialized memory.
+pub enum InitChunk {
+ Init(Range<Size>),
+ Uninit(Range<Size>),
+}
+
+impl InitChunk {
+ #[inline]
+ pub fn is_init(&self) -> bool {
+ match self {
+ Self::Init(_) => true,
+ Self::Uninit(_) => false,
+ }
+ }
+
+ #[inline]
+ pub fn range(&self) -> Range<Size> {
+ match self {
+ Self::Init(r) => r.clone(),
+ Self::Uninit(r) => r.clone(),
+ }
+ }
+}
+
+impl InitMask {
+ /// Returns an iterator, yielding a range of byte indexes for each contiguous region
+ /// of initialized or uninitialized bytes inside the range `start..end` (end-exclusive).
+ ///
+ /// The iterator guarantees the following:
+ /// - Chunks are nonempty.
+ /// - Chunks are adjacent (each range's start is equal to the previous range's end).
+ /// - Chunks span exactly `start..end` (the first starts at `start`, the last ends at `end`).
+ /// - Chunks alternate between [`InitChunk::Init`] and [`InitChunk::Uninit`].
+ #[inline]
+ pub fn range_as_init_chunks(&self, range: AllocRange) -> InitChunkIter<'_> {
+ let start = range.start;
+ let end = range.end();
+ assert!(end <= self.len);
+
+ let is_init = if start < end {
+ self.get(start)
+ } else {
+ // `start..end` is empty: there are no chunks, so use some arbitrary value
+ false
+ };
+
+ InitChunkIter { init_mask: self, is_init, start, end }
+ }
+}
+
+/// Yields [`InitChunk`]s. See [`InitMask::range_as_init_chunks`].
+#[derive(Clone)]
+pub struct InitChunkIter<'a> {
+ init_mask: &'a InitMask,
+ /// Whether the next chunk we will return is initialized.
+ /// If there are no more chunks, contains some arbitrary value.
+ is_init: bool,
+ /// The current byte index into `init_mask`.
+ start: Size,
+ /// The end byte index into `init_mask`.
+ end: Size,
+}
+
+impl<'a> Iterator for InitChunkIter<'a> {
+ type Item = InitChunk;
+
+ #[inline]
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.start >= self.end {
+ return None;
+ }
+
+ let end_of_chunk =
+ self.init_mask.find_bit(self.start, self.end, !self.is_init).unwrap_or(self.end);
+ let range = self.start..end_of_chunk;
+
+ let ret =
+ Some(if self.is_init { InitChunk::Init(range) } else { InitChunk::Uninit(range) });
+
+ self.is_init = !self.is_init;
+ self.start = end_of_chunk;
+
+ ret
+ }
+}
+
+/// Run-length encoding of the uninit mask.
+/// Used to copy parts of a mask multiple times to another allocation.
+pub struct InitCopy {
+ /// Whether the first range is initialized.
+ initial: bool,
+ /// The lengths of ranges that are run-length encoded.
+ /// The initialization state of the ranges alternate starting with `initial`.
+ ranges: smallvec::SmallVec<[u64; 1]>,
+}
+
+impl InitCopy {
+ pub fn no_bytes_init(&self) -> bool {
+ // The `ranges` are run-length encoded and of alternating initialization state.
+ // So if `ranges.len() > 1` then the second block is an initialized range.
+ !self.initial && self.ranges.len() == 1
+ }
+}
+
+/// Transferring the initialization mask to other allocations.
+impl InitMask {
+ /// Creates a run-length encoding of the initialization mask; panics if range is empty.
+ ///
+ /// This is essentially a more space-efficient version of
+ /// `InitMask::range_as_init_chunks(...).collect::<Vec<_>>()`.
+ pub fn prepare_copy(&self, range: AllocRange) -> InitCopy {
+ // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
+ // a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
+ // the source and write it to the destination. Even if we optimized the memory accesses,
+ // we'd be doing all of this `repeat` times.
+ // Therefore we precompute a compressed version of the initialization mask of the source value and
+ // then write it back `repeat` times without computing any more information from the source.
+
+ // A precomputed cache for ranges of initialized / uninitialized bits
+ // 0000010010001110 will become
+ // `[5, 1, 2, 1, 3, 3, 1]`,
+ // where each element toggles the state.
+
+ let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
+
+ let mut chunks = self.range_as_init_chunks(range).peekable();
+
+ let initial = chunks.peek().expect("range should be nonempty").is_init();
+
+ // Here we rely on `range_as_init_chunks` to yield alternating init/uninit chunks.
+ for chunk in chunks {
+ let len = chunk.range().end.bytes() - chunk.range().start.bytes();
+ ranges.push(len);
+ }
+
+ InitCopy { ranges, initial }
+ }
+
+ /// Applies multiple instances of the run-length encoding to the initialization mask.
+ pub fn apply_copy(&mut self, defined: InitCopy, range: AllocRange, repeat: u64) {
+ // An optimization where we can just overwrite an entire range of initialization
+ // bits if they are going to be uniformly `1` or `0`.
+ if defined.ranges.len() <= 1 {
+ self.set_range_inbounds(
+ range.start,
+ range.start + range.size * repeat, // `Size` operations
+ defined.initial,
+ );
+ return;
+ }
+
+ for mut j in 0..repeat {
+ j *= range.size.bytes();
+ j += range.start.bytes();
+ let mut cur = defined.initial;
+ for range in &defined.ranges {
+ let old_j = j;
+ j += range;
+ self.set_range_inbounds(Size::from_bytes(old_j), Size::from_bytes(j), cur);
+ cur = !cur;
+ }
+ }
+ }
+}
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs
new file mode 100644
index 000000000..ddd3f3943
--- /dev/null
+++ b/compiler/rustc_middle/src/mir/interpret/allocation/provenance_map.rs
@@ -0,0 +1,321 @@
+//! Store the provenance for each byte in the range, with a more efficient
+//! representation for the common case where PTR_SIZE consecutive bytes have the same provenance.
+
+use std::cmp;
+
+use rustc_data_structures::sorted_map::SortedMap;
+use rustc_target::abi::{HasDataLayout, Size};
+
+use super::{alloc_range, AllocError, AllocId, AllocRange, AllocResult, Provenance};
+use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
+
+/// Stores the provenance information of pointers stored in memory.
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(HashStable)]
+pub struct ProvenanceMap<Prov = AllocId> {
+ /// Provenance in this map applies from the given offset for an entire pointer-size worth of
+ /// bytes. Two entires in this map are always at least a pointer size apart.
+ ptrs: SortedMap<Size, Prov>,
+ /// Provenance in this map only applies to the given single byte.
+ /// This map is disjoint from the previous. It will always be empty when
+ /// `Prov::OFFSET_IS_ADDR` is false.
+ bytes: Option<Box<SortedMap<Size, Prov>>>,
+}
+
+impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
+ fn decode(d: &mut D) -> Self {
+ assert!(!Prov::OFFSET_IS_ADDR); // only `AllocId` is ever serialized
+ Self { ptrs: Decodable::decode(d), bytes: None }
+ }
+}
+
+impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
+ fn encode(&self, s: &mut S) {
+ let Self { ptrs, bytes } = self;
+ assert!(!Prov::OFFSET_IS_ADDR); // only `AllocId` is ever serialized
+ debug_assert!(bytes.is_none());
+ ptrs.encode(s)
+ }
+}
+
+impl<Prov> ProvenanceMap<Prov> {
+ pub fn new() -> Self {
+ ProvenanceMap { ptrs: SortedMap::new(), bytes: None }
+ }
+
+ /// The caller must guarantee that the given provenance list is already sorted
+ /// by address and contain no duplicates.
+ pub fn from_presorted_ptrs(r: Vec<(Size, Prov)>) -> Self {
+ ProvenanceMap { ptrs: SortedMap::from_presorted_elements(r), bytes: None }
+ }
+}
+
+impl ProvenanceMap {
+ /// Give access to the ptr-sized provenances (which can also be thought of as relocations, and
+ /// indeed that is how codegen treats them).
+ ///
+ /// Only exposed with `AllocId` provenance, since it panics if there is bytewise provenance.
+ #[inline]
+ pub fn ptrs(&self) -> &SortedMap<Size, AllocId> {
+ debug_assert!(self.bytes.is_none()); // `AllocId::OFFSET_IS_ADDR` is false so this cannot fail
+ &self.ptrs
+ }
+}
+
+impl<Prov: Provenance> ProvenanceMap<Prov> {
+ /// Returns all ptr-sized provenance in the given range.
+ /// If the range has length 0, returns provenance that crosses the edge between `start-1` and
+ /// `start`.
+ fn range_get_ptrs(&self, range: AllocRange, cx: &impl HasDataLayout) -> &[(Size, Prov)] {
+ // We have to go back `pointer_size - 1` bytes, as that one would still overlap with
+ // the beginning of this range.
+ let adjusted_start = Size::from_bytes(
+ range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1),
+ );
+ self.ptrs.range(adjusted_start..range.end())
+ }
+
+ /// Returns all byte-wise provenance in the given range.
+ fn range_get_bytes(&self, range: AllocRange) -> &[(Size, Prov)] {
+ if let Some(bytes) = self.bytes.as_ref() {
+ bytes.range(range.start..range.end())
+ } else {
+ &[]
+ }
+ }
+
+ /// Get the provenance of a single byte.
+ pub fn get(&self, offset: Size, cx: &impl HasDataLayout) -> Option<Prov> {
+ let prov = self.range_get_ptrs(alloc_range(offset, Size::from_bytes(1)), cx);
+ debug_assert!(prov.len() <= 1);
+ if let Some(entry) = prov.first() {
+ // If it overlaps with this byte, it is on this byte.
+ debug_assert!(self.bytes.as_ref().map_or(true, |b| b.get(&offset).is_none()));
+ Some(entry.1)
+ } else {
+ // Look up per-byte provenance.
+ self.bytes.as_ref().and_then(|b| b.get(&offset).copied())
+ }
+ }
+
+ /// Check if here is ptr-sized provenance at the given index.
+ /// Does not mean anything for bytewise provenance! But can be useful as an optimization.
+ pub fn get_ptr(&self, offset: Size) -> Option<Prov> {
+ self.ptrs.get(&offset).copied()
+ }
+
+ /// Returns whether this allocation has provenance overlapping with the given range.
+ ///
+ /// Note: this function exists to allow `range_get_provenance` to be private, in order to somewhat
+ /// limit access to provenance outside of the `Allocation` abstraction.
+ ///
+ pub fn range_empty(&self, range: AllocRange, cx: &impl HasDataLayout) -> bool {
+ self.range_get_ptrs(range, cx).is_empty() && self.range_get_bytes(range).is_empty()
+ }
+
+ /// Yields all the provenances stored in this map.
+ pub fn provenances(&self) -> impl Iterator<Item = Prov> + '_ {
+ let bytes = self.bytes.iter().flat_map(|b| b.values());
+ self.ptrs.values().chain(bytes).copied()
+ }
+
+ pub fn insert_ptr(&mut self, offset: Size, prov: Prov, cx: &impl HasDataLayout) {
+ debug_assert!(self.range_empty(alloc_range(offset, cx.data_layout().pointer_size), cx));
+ self.ptrs.insert(offset, prov);
+ }
+
+ /// Removes all provenance inside the given range.
+ /// If there is provenance overlapping with the edges, might result in an error.
+ pub fn clear(&mut self, range: AllocRange, cx: &impl HasDataLayout) -> AllocResult {
+ let start = range.start;
+ let end = range.end();
+ // Clear the bytewise part -- this is easy.
+ if Prov::OFFSET_IS_ADDR {
+ if let Some(bytes) = self.bytes.as_mut() {
+ bytes.remove_range(start..end);
+ }
+ } else {
+ debug_assert!(self.bytes.is_none());
+ }
+
+ // For the ptr-sized part, find the first (inclusive) and last (exclusive) byte of
+ // provenance that overlaps with the given range.
+ let (first, last) = {
+ // Find all provenance overlapping the given range.
+ let provenance = self.range_get_ptrs(range, cx);
+ if provenance.is_empty() {
+ // No provenance in this range, we are done.
+ return Ok(());
+ }
+
+ (
+ provenance.first().unwrap().0,
+ provenance.last().unwrap().0 + cx.data_layout().pointer_size,
+ )
+ };
+
+ // We need to handle clearing the provenance from parts of a pointer.
+ if first < start {
+ if !Prov::OFFSET_IS_ADDR {
+ // We can't split up the provenance into less than a pointer.
+ return Err(AllocError::PartialPointerOverwrite(first));
+ }
+ // Insert the remaining part in the bytewise provenance.
+ let prov = self.ptrs[&first];
+ let bytes = self.bytes.get_or_insert_with(Box::default);
+ for offset in first..start {
+ bytes.insert(offset, prov);
+ }
+ }
+ if last > end {
+ let begin_of_last = last - cx.data_layout().pointer_size;
+ if !Prov::OFFSET_IS_ADDR {
+ // We can't split up the provenance into less than a pointer.
+ return Err(AllocError::PartialPointerOverwrite(begin_of_last));
+ }
+ // Insert the remaining part in the bytewise provenance.
+ let prov = self.ptrs[&begin_of_last];
+ let bytes = self.bytes.get_or_insert_with(Box::default);
+ for offset in end..last {
+ bytes.insert(offset, prov);
+ }
+ }
+
+ // Forget all the provenance.
+ // Since provenance do not overlap, we know that removing until `last` (exclusive) is fine,
+ // i.e., this will not remove any other provenance just after the ones we care about.
+ self.ptrs.remove_range(first..last);
+
+ Ok(())
+ }
+}
+
+/// A partial, owned list of provenance to transfer into another allocation.
+///
+/// Offsets are already adjusted to the destination allocation.
+pub struct ProvenanceCopy<Prov> {
+ dest_ptrs: Option<Box<[(Size, Prov)]>>,
+ dest_bytes: Option<Box<[(Size, Prov)]>>,
+}
+
+impl<Prov: Provenance> ProvenanceMap<Prov> {
+ pub fn prepare_copy(
+ &self,
+ src: AllocRange,
+ dest: Size,
+ count: u64,
+ cx: &impl HasDataLayout,
+ ) -> AllocResult<ProvenanceCopy<Prov>> {
+ let shift_offset = move |idx, offset| {
+ // compute offset for current repetition
+ let dest_offset = dest + src.size * idx; // `Size` operations
+ // shift offsets from source allocation to destination allocation
+ (offset - src.start) + dest_offset // `Size` operations
+ };
+ let ptr_size = cx.data_layout().pointer_size;
+
+ // # Pointer-sized provenances
+ // Get the provenances that are entirely within this range.
+ // (Different from `range_get_ptrs` which asks if they overlap the range.)
+ // Only makes sense if we are copying at least one pointer worth of bytes.
+ let mut dest_ptrs_box = None;
+ if src.size >= ptr_size {
+ let adjusted_end = Size::from_bytes(src.end().bytes() - (ptr_size.bytes() - 1));
+ let ptrs = self.ptrs.range(src.start..adjusted_end);
+ // If `count` is large, this is rather wasteful -- we are allocating a big array here, which
+ // is mostly filled with redundant information since it's just N copies of the same `Prov`s
+ // at slightly adjusted offsets. The reason we do this is so that in `mark_provenance_range`
+ // we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
+ // the right sequence of provenance for all N copies.
+ // Basically, this large array would have to be created anyway in the target allocation.
+ let mut dest_ptrs = Vec::with_capacity(ptrs.len() * (count as usize));
+ for i in 0..count {
+ dest_ptrs
+ .extend(ptrs.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
+ }
+ debug_assert_eq!(dest_ptrs.len(), dest_ptrs.capacity());
+ dest_ptrs_box = Some(dest_ptrs.into_boxed_slice());
+ };
+
+ // # Byte-sized provenances
+ // This includes the existing bytewise provenance in the range, and ptr provenance
+ // that overlaps with the begin/end of the range.
+ let mut dest_bytes_box = None;
+ let begin_overlap = self.range_get_ptrs(alloc_range(src.start, Size::ZERO), cx).first();
+ let end_overlap = self.range_get_ptrs(alloc_range(src.end(), Size::ZERO), cx).first();
+ if !Prov::OFFSET_IS_ADDR {
+ // There can't be any bytewise provenance, and we cannot split up the begin/end overlap.
+ if let Some(entry) = begin_overlap {
+ return Err(AllocError::PartialPointerCopy(entry.0));
+ }
+ if let Some(entry) = end_overlap {
+ return Err(AllocError::PartialPointerCopy(entry.0));
+ }
+ debug_assert!(self.bytes.is_none());
+ } else {
+ let mut bytes = Vec::new();
+ // First, if there is a part of a pointer at the start, add that.
+ if let Some(entry) = begin_overlap {
+ trace!("start overlapping entry: {entry:?}");
+ // For really small copies, make sure we don't run off the end of the `src` range.
+ let entry_end = cmp::min(entry.0 + ptr_size, src.end());
+ for offset in src.start..entry_end {
+ bytes.push((offset, entry.1));
+ }
+ } else {
+ trace!("no start overlapping entry");
+ }
+ // Then the main part, bytewise provenance from `self.bytes`.
+ if let Some(all_bytes) = self.bytes.as_ref() {
+ bytes.extend(all_bytes.range(src.start..src.end()));
+ }
+ // And finally possibly parts of a pointer at the end.
+ if let Some(entry) = end_overlap {
+ trace!("end overlapping entry: {entry:?}");
+ // For really small copies, make sure we don't start before `src` does.
+ let entry_start = cmp::max(entry.0, src.start);
+ for offset in entry_start..src.end() {
+ if bytes.last().map_or(true, |bytes_entry| bytes_entry.0 < offset) {
+ // The last entry, if it exists, has a lower offset than us.
+ bytes.push((offset, entry.1));
+ } else {
+ // There already is an entry for this offset in there! This can happen when the
+ // start and end range checks actually end up hitting the same pointer, so we
+ // already added this in the "pointer at the start" part above.
+ assert!(entry.0 <= src.start);
+ }
+ }
+ } else {
+ trace!("no end overlapping entry");
+ }
+ trace!("byte provenances: {bytes:?}");
+
+ // And again a buffer for the new list on the target side.
+ let mut dest_bytes = Vec::with_capacity(bytes.len() * (count as usize));
+ for i in 0..count {
+ dest_bytes
+ .extend(bytes.iter().map(|&(offset, reloc)| (shift_offset(i, offset), reloc)));
+ }
+ debug_assert_eq!(dest_bytes.len(), dest_bytes.capacity());
+ dest_bytes_box = Some(dest_bytes.into_boxed_slice());
+ }
+
+ Ok(ProvenanceCopy { dest_ptrs: dest_ptrs_box, dest_bytes: dest_bytes_box })
+ }
+
+ /// Applies a provenance copy.
+ /// The affected range, as defined in the parameters to `prepare_copy` is expected
+ /// to be clear of provenance.
+ pub fn apply_copy(&mut self, copy: ProvenanceCopy<Prov>) {
+ if let Some(dest_ptrs) = copy.dest_ptrs {
+ self.ptrs.insert_presorted(dest_ptrs.into());
+ }
+ if Prov::OFFSET_IS_ADDR {
+ if let Some(dest_bytes) = copy.dest_bytes && !dest_bytes.is_empty() {
+ self.bytes.get_or_insert_with(Box::default).insert_presorted(dest_bytes.into());
+ }
+ } else {
+ debug_assert!(copy.dest_bytes.is_none());
+ }
+ }
+}
diff --git a/compiler/rustc_middle/src/mir/interpret/allocation/tests.rs b/compiler/rustc_middle/src/mir/interpret/allocation/tests.rs
new file mode 100644
index 000000000..c9c3c50c5
--- /dev/null
+++ b/compiler/rustc_middle/src/mir/interpret/allocation/tests.rs
@@ -0,0 +1,19 @@
+use super::*;
+
+#[test]
+fn uninit_mask() {
+ let mut mask = InitMask::new(Size::from_bytes(500), false);
+ assert!(!mask.get(Size::from_bytes(499)));
+ mask.set_range(alloc_range(Size::from_bytes(499), Size::from_bytes(1)), true);
+ assert!(mask.get(Size::from_bytes(499)));
+ mask.set_range((100..256).into(), true);
+ for i in 0..100 {
+ assert!(!mask.get(Size::from_bytes(i)), "{i} should not be set");
+ }
+ for i in 100..256 {
+ assert!(mask.get(Size::from_bytes(i)), "{i} should be set");
+ }
+ for i in 256..499 {
+ assert!(!mask.get(Size::from_bytes(i)), "{i} should not be set");
+ }
+}
diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs
index b5a50cc15..bd9cd53e1 100644
--- a/compiler/rustc_middle/src/mir/interpret/error.rs
+++ b/compiler/rustc_middle/src/mir/interpret/error.rs
@@ -16,8 +16,6 @@ pub enum ErrorHandled {
/// Already reported an error for this evaluation, and the compilation is
/// *guaranteed* to fail. Warnings/lints *must not* produce `Reported`.
Reported(ErrorGuaranteed),
- /// Already emitted a lint for this evaluation.
- Linted,
/// Don't emit an error, the evaluation failed because the MIR was generic
/// and the substs didn't fully monomorphize it.
TooGeneric,
@@ -89,18 +87,6 @@ fn print_backtrace(backtrace: &Backtrace) {
eprintln!("\n\nAn error occurred in miri:\n{}", backtrace);
}
-impl From<ErrorHandled> for InterpErrorInfo<'_> {
- fn from(err: ErrorHandled) -> Self {
- match err {
- ErrorHandled::Reported(ErrorGuaranteed { .. }) | ErrorHandled::Linted => {
- err_inval!(ReferencedConstant)
- }
- ErrorHandled::TooGeneric => err_inval!(TooGeneric),
- }
- .into()
- }
-}
-
impl From<ErrorGuaranteed> for InterpErrorInfo<'_> {
fn from(err: ErrorGuaranteed) -> Self {
InterpError::InvalidProgram(InvalidProgramInfo::AlreadyReported(err)).into()
@@ -138,9 +124,6 @@ impl<'tcx> From<InterpError<'tcx>> for InterpErrorInfo<'tcx> {
pub enum InvalidProgramInfo<'tcx> {
/// Resolution can fail if we are in a too generic context.
TooGeneric,
- /// Cannot compute this constant because it depends on another one
- /// which already produced an error.
- ReferencedConstant,
/// Abort in case errors are already reported.
AlreadyReported(ErrorGuaranteed),
/// An error occurred during layout computation.
@@ -158,9 +141,11 @@ impl fmt::Display for InvalidProgramInfo<'_> {
use InvalidProgramInfo::*;
match self {
TooGeneric => write!(f, "encountered overly generic constant"),
- ReferencedConstant => write!(f, "referenced constant has errors"),
AlreadyReported(ErrorGuaranteed { .. }) => {
- write!(f, "encountered constants with type errors, stopping evaluation")
+ write!(
+ f,
+ "an error has already been reported elsewhere (this should not usually be printed)"
+ )
}
Layout(ref err) => write!(f, "{err}"),
FnAbiAdjustForForeignAbi(ref err) => write!(f, "{err}"),
@@ -401,16 +386,15 @@ impl fmt::Display for UndefinedBehaviorInfo {
pub enum UnsupportedOpInfo {
/// Free-form case. Only for errors that are never caught!
Unsupported(String),
- /// Overwriting parts of a pointer; the resulting state cannot be represented in our
- /// `Allocation` data structure. See <https://github.com/rust-lang/miri/issues/2181>.
- PartialPointerOverwrite(Pointer<AllocId>),
- /// Attempting to `copy` parts of a pointer to somewhere else; the resulting state cannot be
- /// represented in our `Allocation` data structure. See
- /// <https://github.com/rust-lang/miri/issues/2181>.
- PartialPointerCopy(Pointer<AllocId>),
//
// The variants below are only reachable from CTFE/const prop, miri will never emit them.
//
+ /// Overwriting parts of a pointer; without knowing absolute addresses, the resulting state
+ /// cannot be represented by the CTFE interpreter.
+ PartialPointerOverwrite(Pointer<AllocId>),
+ /// Attempting to `copy` parts of a pointer to somewhere else; without knowing absolute
+ /// addresses, the resulting state cannot be represented by the CTFE interpreter.
+ PartialPointerCopy(Pointer<AllocId>),
/// Encountered a pointer where we needed raw bytes.
ReadPointerAsBytes,
/// Accessing thread local statics
diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs
index 5e3dfcbcc..d79cd8b7a 100644
--- a/compiler/rustc_middle/src/mir/interpret/mod.rs
+++ b/compiler/rustc_middle/src/mir/interpret/mod.rs
@@ -106,6 +106,7 @@ use rustc_ast::LitKind;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::{HashMapExt, Lock};
use rustc_data_structures::tiny_list::TinyList;
+use rustc_errors::ErrorGuaranteed;
use rustc_hir::def_id::DefId;
use rustc_macros::HashStable;
use rustc_middle::ty::print::with_no_trimmed_paths;
@@ -127,8 +128,8 @@ pub use self::error::{
pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
pub use self::allocation::{
- alloc_range, AllocRange, Allocation, ConstAllocation, InitChunk, InitChunkIter, InitMask,
- ProvenanceMap,
+ alloc_range, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation, InitChunk,
+ InitChunkIter,
};
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
@@ -176,7 +177,7 @@ pub enum LitToConstError {
/// This is used for graceful error handling (`delay_span_bug`) in
/// type checking (`Const::from_anon_const`).
TypeError,
- Reported,
+ Reported(ErrorGuaranteed),
}
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
diff --git a/compiler/rustc_middle/src/mir/interpret/pointer.rs b/compiler/rustc_middle/src/mir/interpret/pointer.rs
index 23c2ce647..9c270ba1e 100644
--- a/compiler/rustc_middle/src/mir/interpret/pointer.rs
+++ b/compiler/rustc_middle/src/mir/interpret/pointer.rs
@@ -103,8 +103,7 @@ impl<T: HasDataLayout> PointerArithmetic for T {}
/// This trait abstracts over the kind of provenance that is associated with a `Pointer`. It is
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
/// some global state.
-/// We don't actually care about this `Debug` bound (we use `Provenance::fmt` to format the entire
-/// pointer), but `derive` adds some unnecessary bounds.
+/// The `Debug` rendering is used to distplay bare provenance, and for the default impl of `fmt`.
pub trait Provenance: Copy + fmt::Debug {
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
/// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are
@@ -115,14 +114,23 @@ pub trait Provenance: Copy + fmt::Debug {
/// pointer, and implement ptr-to-int transmutation by stripping provenance.
const OFFSET_IS_ADDR: bool;
- /// We also use this trait to control whether to abort execution when a pointer is being partially overwritten
- /// (this avoids a separate trait in `allocation.rs` just for this purpose).
- const ERR_ON_PARTIAL_PTR_OVERWRITE: bool;
-
/// Determines how a pointer should be printed.
+ ///
+ /// Default impl is only good for when `OFFSET_IS_ADDR == true`.
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result
where
- Self: Sized;
+ Self: Sized,
+ {
+ assert!(Self::OFFSET_IS_ADDR);
+ let (prov, addr) = ptr.into_parts(); // address is absolute
+ write!(f, "{:#x}", addr.bytes())?;
+ if f.alternate() {
+ write!(f, "{prov:#?}")?;
+ } else {
+ write!(f, "{prov:?}")?;
+ }
+ Ok(())
+ }
/// If `OFFSET_IS_ADDR == false`, provenance must always be able to
/// identify the allocation this ptr points to (i.e., this must return `Some`).
@@ -139,9 +147,6 @@ impl Provenance for AllocId {
// so ptr-to-int casts are not possible (since we do not know the global physical offset).
const OFFSET_IS_ADDR: bool = false;
- // For now, do not allow this, so that we keep our options open.
- const ERR_ON_PARTIAL_PTR_OVERWRITE: bool = true;
-
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Forward `alternate` flag to `alloc_id` printing.
if f.alternate() {
@@ -168,7 +173,7 @@ impl Provenance for AllocId {
/// Represents a pointer in the Miri engine.
///
/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
-#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
+#[derive(Copy, Clone, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct Pointer<Prov = AllocId> {
pub(super) offset: Size, // kept private to avoid accidental misinterpretation (meaning depends on `Prov` type)
diff --git a/compiler/rustc_middle/src/mir/interpret/queries.rs b/compiler/rustc_middle/src/mir/interpret/queries.rs
index 473894ac1..b6c6e9d55 100644
--- a/compiler/rustc_middle/src/mir/interpret/queries.rs
+++ b/compiler/rustc_middle/src/mir/interpret/queries.rs
@@ -175,6 +175,8 @@ impl<'tcx> TyCtxt<'tcx> {
impl<'tcx> TyCtxtAt<'tcx> {
/// Evaluate a static's initializer, returning the allocation of the initializer's memory.
+ ///
+ /// The span is entirely ignored here, but still helpful for better query cycle errors.
pub fn eval_static_initializer(
self,
def_id: DefId,
@@ -187,6 +189,8 @@ impl<'tcx> TyCtxtAt<'tcx> {
}
/// Evaluate anything constant-like, returning the allocation of the final memory.
+ ///
+ /// The span is entirely ignored here, but still helpful for better query cycle errors.
fn eval_to_allocation(
self,
gid: GlobalId<'tcx>,
diff --git a/compiler/rustc_middle/src/mir/interpret/value.rs b/compiler/rustc_middle/src/mir/interpret/value.rs
index ac5fddb7a..e6636e50e 100644
--- a/compiler/rustc_middle/src/mir/interpret/value.rs
+++ b/compiler/rustc_middle/src/mir/interpret/value.rs
@@ -1,6 +1,8 @@
use std::convert::{TryFrom, TryInto};
use std::fmt;
+use either::{Either, Left, Right};
+
use rustc_apfloat::{
ieee::{Double, Single},
Float,
@@ -18,15 +20,15 @@ use super::{
/// Represents the result of const evaluation via the `eval_to_allocation` query.
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
pub struct ConstAlloc<'tcx> {
- // the value lives here, at offset 0, and that allocation definitely is an `AllocKind::Memory`
- // (so you can use `AllocMap::unwrap_memory`).
+ /// The value lives here, at offset 0, and that allocation definitely is an `AllocKind::Memory`
+ /// (so you can use `AllocMap::unwrap_memory`).
pub alloc_id: AllocId,
pub ty: Ty<'tcx>,
}
/// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
/// array length computations, enum discriminants and the pattern matching logic.
-#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable, Hash)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable, Lift)]
pub enum ConstValue<'tcx> {
/// Used only for types with `layout::abi::Scalar` ABI.
@@ -108,7 +110,7 @@ impl<'tcx> ConstValue<'tcx> {
///
/// These variants would be private if there was a convenient way to achieve that in Rust.
/// Do *not* match on a `Scalar`! Use the various `to_*` methods instead.
-#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
+#[derive(Clone, Copy, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub enum Scalar<Prov = AllocId> {
/// The raw bytes of a simple value.
@@ -293,10 +295,10 @@ impl<Prov> Scalar<Prov> {
pub fn to_bits_or_ptr_internal(
self,
target_size: Size,
- ) -> Result<Result<u128, Pointer<Prov>>, ScalarSizeMismatch> {
+ ) -> Result<Either<u128, Pointer<Prov>>, ScalarSizeMismatch> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
Ok(match self {
- Scalar::Int(int) => Ok(int.to_bits(target_size).map_err(|size| {
+ Scalar::Int(int) => Left(int.to_bits(target_size).map_err(|size| {
ScalarSizeMismatch { target_size: target_size.bytes(), data_size: size.bytes() }
})?),
Scalar::Ptr(ptr, sz) => {
@@ -306,7 +308,7 @@ impl<Prov> Scalar<Prov> {
data_size: sz.into(),
});
}
- Err(ptr)
+ Right(ptr)
}
})
}
@@ -318,8 +320,8 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
.to_bits_or_ptr_internal(cx.pointer_size())
.map_err(|s| err_ub!(ScalarSizeMismatch(s)))?
{
- Err(ptr) => Ok(ptr.into()),
- Ok(bits) => {
+ Right(ptr) => Ok(ptr.into()),
+ Left(bits) => {
let addr = u64::try_from(bits).unwrap();
Ok(Pointer::from_addr(addr))
}
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index 79db35a76..a513444e1 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -10,7 +10,7 @@ use crate::ty::codec::{TyDecoder, TyEncoder};
use crate::ty::fold::{FallibleTypeFolder, TypeFoldable};
use crate::ty::print::{FmtPrinter, Printer};
use crate::ty::visit::{TypeVisitable, TypeVisitor};
-use crate::ty::{self, List, Ty, TyCtxt};
+use crate::ty::{self, DefIdTree, List, Ty, TyCtxt};
use crate::ty::{AdtDef, InstanceDef, ScalarInt, UserTypeAnnotationIndex};
use crate::ty::{GenericArg, InternalSubsts, SubstsRef};
@@ -100,13 +100,9 @@ impl<'tcx> HasLocalDecls<'tcx> for Body<'tcx> {
/// pass will be named after the type, and it will consist of a main
/// loop that goes over each available MIR and applies `run_pass`.
pub trait MirPass<'tcx> {
- fn name(&self) -> Cow<'_, str> {
+ fn name(&self) -> &str {
let name = std::any::type_name::<Self>();
- if let Some(tail) = name.rfind(':') {
- Cow::from(&name[tail + 1..])
- } else {
- Cow::from(name)
- }
+ if let Some((_, tail)) = name.rsplit_once(':') { tail } else { name }
}
/// Returns `true` if this pass is enabled with the current combination of compiler flags.
@@ -138,33 +134,46 @@ impl MirPhase {
}
}
}
-}
-impl Display for MirPhase {
- fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
- match self {
- MirPhase::Built => write!(f, "built"),
- MirPhase::Analysis(p) => write!(f, "analysis-{}", p),
- MirPhase::Runtime(p) => write!(f, "runtime-{}", p),
+ /// Parses an `MirPhase` from a pair of strings. Panics if this isn't possible for any reason.
+ pub fn parse(dialect: String, phase: Option<String>) -> Self {
+ match &*dialect.to_ascii_lowercase() {
+ "built" => {
+ assert!(phase.is_none(), "Cannot specify a phase for `Built` MIR");
+ MirPhase::Built
+ }
+ "analysis" => Self::Analysis(AnalysisPhase::parse(phase)),
+ "runtime" => Self::Runtime(RuntimePhase::parse(phase)),
+ _ => panic!("Unknown MIR dialect {}", dialect),
}
}
}
-impl Display for AnalysisPhase {
- fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
- match self {
- AnalysisPhase::Initial => write!(f, "initial"),
- AnalysisPhase::PostCleanup => write!(f, "post_cleanup"),
+impl AnalysisPhase {
+ pub fn parse(phase: Option<String>) -> Self {
+ let Some(phase) = phase else {
+ return Self::Initial;
+ };
+
+ match &*phase.to_ascii_lowercase() {
+ "initial" => Self::Initial,
+ "post_cleanup" | "post-cleanup" | "postcleanup" => Self::PostCleanup,
+ _ => panic!("Unknown analysis phase {}", phase),
}
}
}
-impl Display for RuntimePhase {
- fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
- match self {
- RuntimePhase::Initial => write!(f, "initial"),
- RuntimePhase::PostCleanup => write!(f, "post_cleanup"),
- RuntimePhase::Optimized => write!(f, "optimized"),
+impl RuntimePhase {
+ pub fn parse(phase: Option<String>) -> Self {
+ let Some(phase) = phase else {
+ return Self::Initial;
+ };
+
+ match &*phase.to_ascii_lowercase() {
+ "initial" => Self::Initial,
+ "post_cleanup" | "post-cleanup" | "postcleanup" => Self::PostCleanup,
+ "optimized" => Self::Optimized,
+ _ => panic!("Unknown runtime phase {}", phase),
}
}
}
@@ -293,6 +302,13 @@ pub struct Body<'tcx> {
/// potentially allow things like `[u8; std::mem::size_of::<T>() * 0]` due to this.
pub is_polymorphic: bool,
+ /// The phase at which this MIR should be "injected" into the compilation process.
+ ///
+ /// Everything that comes before this `MirPhase` should be skipped.
+ ///
+ /// This is only `Some` if the function that this body comes from was annotated with `rustc_custom_mir`.
+ pub injection_phase: Option<MirPhase>,
+
pub tainted_by_errors: Option<ErrorGuaranteed>,
}
@@ -319,7 +335,7 @@ impl<'tcx> Body<'tcx> {
let mut body = Body {
phase: MirPhase::Built,
- pass_count: 1,
+ pass_count: 0,
source,
basic_blocks: BasicBlocks::new(basic_blocks),
source_scopes,
@@ -339,6 +355,7 @@ impl<'tcx> Body<'tcx> {
span,
required_consts: Vec::new(),
is_polymorphic: false,
+ injection_phase: None,
tainted_by_errors,
};
body.is_polymorphic = body.has_non_region_param();
@@ -353,7 +370,7 @@ impl<'tcx> Body<'tcx> {
pub fn new_cfg_only(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>) -> Self {
let mut body = Body {
phase: MirPhase::Built,
- pass_count: 1,
+ pass_count: 0,
source: MirSource::item(CRATE_DEF_ID.to_def_id()),
basic_blocks: BasicBlocks::new(basic_blocks),
source_scopes: IndexVec::new(),
@@ -366,6 +383,7 @@ impl<'tcx> Body<'tcx> {
required_consts: Vec::new(),
var_debug_info: Vec::new(),
is_polymorphic: false,
+ injection_phase: None,
tainted_by_errors: None,
};
body.is_polymorphic = body.has_non_region_param();
@@ -508,6 +526,14 @@ impl<'tcx> Body<'tcx> {
pub fn generator_kind(&self) -> Option<GeneratorKind> {
self.generator.as_ref().map(|generator| generator.generator_kind)
}
+
+ #[inline]
+ pub fn should_skip(&self) -> bool {
+ let Some(injection_phase) = self.injection_phase else {
+ return false;
+ };
+ injection_phase > self.phase
+ }
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, TyEncodable, TyDecodable, HashStable)]
@@ -1012,6 +1038,18 @@ pub enum VarDebugInfoContents<'tcx> {
/// based on a `Local`, not a `Static`, and contains no indexing.
Place(Place<'tcx>),
Const(Constant<'tcx>),
+ /// The user variable's data is split across several fragments,
+ /// each described by a `VarDebugInfoFragment`.
+ /// See DWARF 5's "2.6.1.2 Composite Location Descriptions"
+ /// and LLVM's `DW_OP_LLVM_fragment` for more details on
+ /// the underlying debuginfo feature this relies on.
+ Composite {
+ /// Type of the original user variable.
+ ty: Ty<'tcx>,
+ /// All the parts of the original user variable, which ended
+ /// up in disjoint places, due to optimizations.
+ fragments: Vec<VarDebugInfoFragment<'tcx>>,
+ },
}
impl<'tcx> Debug for VarDebugInfoContents<'tcx> {
@@ -1019,7 +1057,48 @@ impl<'tcx> Debug for VarDebugInfoContents<'tcx> {
match self {
VarDebugInfoContents::Const(c) => write!(fmt, "{}", c),
VarDebugInfoContents::Place(p) => write!(fmt, "{:?}", p),
+ VarDebugInfoContents::Composite { ty, fragments } => {
+ write!(fmt, "{:?}{{ ", ty)?;
+ for f in fragments.iter() {
+ write!(fmt, "{:?}, ", f)?;
+ }
+ write!(fmt, "}}")
+ }
+ }
+ }
+}
+
+#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
+pub struct VarDebugInfoFragment<'tcx> {
+ /// Where in the composite user variable this fragment is,
+ /// represented as a "projection" into the composite variable.
+ /// At lower levels, this corresponds to a byte/bit range.
+ // NOTE(eddyb) there's an unenforced invariant that this contains
+ // only `Field`s, and not into `enum` variants or `union`s.
+ // FIXME(eddyb) support this for `enum`s by either using DWARF's
+ // more advanced control-flow features (unsupported by LLVM?)
+ // to match on the discriminant, or by using custom type debuginfo
+ // with non-overlapping variants for the composite variable.
+ pub projection: Vec<PlaceElem<'tcx>>,
+
+ /// Where the data for this fragment can be found.
+ // NOTE(eddyb) There's an unenforced invariant that this `Place` is
+ // contains no indexing (with a non-constant index).
+ pub contents: Place<'tcx>,
+}
+
+impl Debug for VarDebugInfoFragment<'_> {
+ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
+ for elem in self.projection.iter() {
+ match elem {
+ ProjectionElem::Field(field, _) => {
+ write!(fmt, ".{:?}", field.index())?;
+ }
+ _ => bug!("unsupported fragment projection `{:?}`", elem),
+ }
}
+
+ write!(fmt, " => {:?}", self.contents)
}
}
@@ -1186,6 +1265,11 @@ impl<'tcx> BasicBlockData<'tcx> {
pub fn visitable(&self, index: usize) -> &dyn MirVisitable<'tcx> {
if index < self.statements.len() { &self.statements[index] } else { &self.terminator }
}
+
+ /// Does the block have no statements and an unreachable terminator?
+ pub fn is_empty_unreachable(&self) -> bool {
+ self.statements.is_empty() && matches!(self.terminator().kind, TerminatorKind::Unreachable)
+ }
}
impl<O> AssertKind<O> {
@@ -1477,7 +1561,7 @@ impl<'tcx> Place<'tcx> {
/// If MirPhase >= Derefered and if projection contains Deref,
/// It's guaranteed to be in the first place
pub fn has_deref(&self) -> bool {
- // To make sure this is not accidently used in wrong mir phase
+ // To make sure this is not accidentally used in wrong mir phase
debug_assert!(
self.projection.is_empty() || !self.projection[1..].contains(&PlaceElem::Deref)
);
@@ -1767,7 +1851,7 @@ impl<'tcx> Operand<'tcx> {
substs: SubstsRef<'tcx>,
span: Span,
) -> Self {
- let ty = tcx.bound_type_of(def_id).subst(tcx, substs);
+ let ty = tcx.mk_fn_def(def_id, substs);
Operand::Constant(Box::new(Constant {
span,
user_ty: None,
@@ -1893,6 +1977,7 @@ impl BorrowKind {
}
}
+ // FIXME: won't be used after diagnostic migration
pub fn describe_mutability(&self) -> &str {
match *self {
BorrowKind::Shared | BorrowKind::Shallow | BorrowKind::Unique => "immutable",
@@ -1997,10 +2082,10 @@ impl<'tcx> Debug for Rvalue<'tcx> {
.print_def_path(variant_def.def_id, substs)?
.into_buffer();
- match variant_def.ctor_kind {
- CtorKind::Const => fmt.write_str(&name),
- CtorKind::Fn => fmt_tuple(fmt, &name),
- CtorKind::Fictive => {
+ match variant_def.ctor_kind() {
+ Some(CtorKind::Const) => fmt.write_str(&name),
+ Some(CtorKind::Fn) => fmt_tuple(fmt, &name),
+ None => {
let mut struct_fmt = fmt.debug_struct(&name);
for (field, place) in iter::zip(&variant_def.fields, places) {
struct_fmt.field(field.name.as_str(), place);
@@ -2186,8 +2271,10 @@ impl<'tcx> ConstantKind<'tcx> {
// FIXME: We might want to have a `try_eval`-like function on `Unevaluated`
match tcx.const_eval_resolve(param_env, uneval, None) {
Ok(val) => Self::Val(val, ty),
- Err(ErrorHandled::TooGeneric | ErrorHandled::Linted) => self,
- Err(_) => Self::Ty(tcx.const_error(ty)),
+ Err(ErrorHandled::TooGeneric) => self,
+ Err(ErrorHandled::Reported(guar)) => {
+ Self::Ty(tcx.const_error_with_guaranteed(ty, guar))
+ }
}
}
}
@@ -2403,16 +2490,11 @@ impl<'tcx> ConstantKind<'tcx> {
ExprKind::Path(QPath::Resolved(_, &Path { res: Res::Def(ConstParam, def_id), .. })) => {
// Find the name and index of the const parameter by indexing the generics of
// the parent item and construct a `ParamConst`.
- let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
- let item_id = tcx.hir().get_parent_node(hir_id);
- let item_def_id = tcx.hir().local_def_id(item_id);
- let generics = tcx.generics_of(item_def_id.to_def_id());
+ let item_def_id = tcx.parent(def_id);
+ let generics = tcx.generics_of(item_def_id);
let index = generics.param_def_id_to_index[&def_id];
- let name = tcx.hir().name(hir_id);
- let ty_const = tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Param(ty::ParamConst::new(index, name)),
- ty,
- });
+ let name = tcx.item_name(def_id);
+ let ty_const = tcx.mk_const(ty::ParamConst::new(index, name), ty);
debug!(?ty_const);
return Self::Ty(ty_const);
@@ -2839,14 +2921,14 @@ fn pretty_print_const_value<'tcx>(
let cx = cx.print_value_path(variant_def.def_id, substs)?;
fmt.write_str(&cx.into_buffer())?;
- match variant_def.ctor_kind {
- CtorKind::Const => {}
- CtorKind::Fn => {
+ match variant_def.ctor_kind() {
+ Some(CtorKind::Const) => {}
+ Some(CtorKind::Fn) => {
fmt.write_str("(")?;
comma_sep(fmt, fields)?;
fmt.write_str(")")?;
}
- CtorKind::Fictive => {
+ None => {
fmt.write_str(" {{ ")?;
let mut first = true;
for (field_def, field) in iter::zip(&variant_def.fields, fields)
diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs
index 05dcfba77..2a4ff4b88 100644
--- a/compiler/rustc_middle/src/mir/pretty.rs
+++ b/compiler/rustc_middle/src/mir/pretty.rs
@@ -12,11 +12,10 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_middle::mir::interpret::{
- read_target_uint, AllocId, Allocation, ConstAllocation, ConstValue, GlobalAlloc, Pointer,
- Provenance,
+ alloc_range, read_target_uint, AllocId, Allocation, ConstAllocation, ConstValue, GlobalAlloc,
+ Pointer, Provenance,
};
use rustc_middle::mir::visit::Visitor;
-use rustc_middle::mir::MirSource;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, TyCtxt};
use rustc_target::abi::Size;
@@ -74,7 +73,7 @@ pub enum PassWhere {
#[inline]
pub fn dump_mir<'tcx, F>(
tcx: TyCtxt<'tcx>,
- pass_num: Option<&dyn Display>,
+ pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
@@ -111,7 +110,7 @@ pub fn dump_enabled<'tcx>(tcx: TyCtxt<'tcx>, pass_name: &str, def_id: DefId) ->
fn dump_matched_mir_node<'tcx, F>(
tcx: TyCtxt<'tcx>,
- pass_num: Option<&dyn Display>,
+ pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
body: &Body<'tcx>,
@@ -120,8 +119,7 @@ fn dump_matched_mir_node<'tcx, F>(
F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
{
let _: io::Result<()> = try {
- let mut file =
- create_dump_file(tcx, "mir", pass_num, pass_name, disambiguator, body.source)?;
+ let mut file = create_dump_file(tcx, "mir", pass_num, pass_name, disambiguator, body)?;
// see notes on #41697 above
let def_path =
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
@@ -143,16 +141,14 @@ fn dump_matched_mir_node<'tcx, F>(
if tcx.sess.opts.unstable_opts.dump_mir_graphviz {
let _: io::Result<()> = try {
- let mut file =
- create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, body.source)?;
+ let mut file = create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, body)?;
write_mir_fn_graphviz(tcx, body, false, &mut file)?;
};
}
if let Some(spanview) = tcx.sess.opts.unstable_opts.dump_mir_spanview {
let _: io::Result<()> = try {
- let file_basename =
- dump_file_basename(tcx, pass_num, pass_name, disambiguator, body.source);
+ let file_basename = dump_file_basename(tcx, pass_num, pass_name, disambiguator, body);
let mut file = create_dump_file_with_basename(tcx, &file_basename, "html")?;
if body.source.def_id().is_local() {
write_mir_fn_spanview(tcx, body, spanview, &file_basename, &mut file)?;
@@ -165,11 +161,12 @@ fn dump_matched_mir_node<'tcx, F>(
/// where we should dump a MIR representation output files.
fn dump_file_basename<'tcx>(
tcx: TyCtxt<'tcx>,
- pass_num: Option<&dyn Display>,
+ pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
- source: MirSource<'tcx>,
+ body: &Body<'tcx>,
) -> String {
+ let source = body.source;
let promotion_id = match source.promoted {
Some(id) => format!("-{:?}", id),
None => String::new(),
@@ -178,9 +175,10 @@ fn dump_file_basename<'tcx>(
let pass_num = if tcx.sess.opts.unstable_opts.dump_mir_exclude_pass_number {
String::new()
} else {
- match pass_num {
- None => ".-------".to_string(),
- Some(pass_num) => format!(".{}", pass_num),
+ if pass_num {
+ format!(".{:03}-{:03}", body.phase.phase_index(), body.pass_count)
+ } else {
+ ".-------".to_string()
}
};
@@ -250,14 +248,14 @@ fn create_dump_file_with_basename(
pub fn create_dump_file<'tcx>(
tcx: TyCtxt<'tcx>,
extension: &str,
- pass_num: Option<&dyn Display>,
+ pass_num: bool,
pass_name: &str,
disambiguator: &dyn Display,
- source: MirSource<'tcx>,
+ body: &Body<'tcx>,
) -> io::Result<io::BufWriter<fs::File>> {
create_dump_file_with_basename(
tcx,
- &dump_file_basename(tcx, pass_num, pass_name, disambiguator, source),
+ &dump_file_basename(tcx, pass_num, pass_name, disambiguator, body),
extension,
)
}
@@ -476,6 +474,7 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> {
// These variants shouldn't exist in the MIR.
ty::ConstKind::Placeholder(_)
| ty::ConstKind::Infer(_)
+ | ty::ConstKind::Expr(_)
| ty::ConstKind::Bound(..) => bug!("unexpected MIR constant: {:?}", literal),
},
ConstantKind::Unevaluated(uv, _) => {
@@ -685,7 +684,7 @@ pub fn write_allocations<'tcx>(
fn alloc_ids_from_alloc(
alloc: ConstAllocation<'_>,
) -> impl DoubleEndedIterator<Item = AllocId> + '_ {
- alloc.inner().provenance().values().map(|id| *id)
+ alloc.inner().provenance().ptrs().values().map(|id| *id)
}
fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
@@ -788,7 +787,7 @@ pub fn write_allocations<'tcx>(
/// After the hex dump, an ascii dump follows, replacing all unprintable characters (control
/// characters or characters whose value is larger than 127) with a `.`
/// This also prints provenance adequately.
-pub fn display_allocation<'a, 'tcx, Prov, Extra>(
+pub fn display_allocation<'a, 'tcx, Prov: Provenance, Extra>(
tcx: TyCtxt<'tcx>,
alloc: &'a Allocation<Prov, Extra>,
) -> RenderAllocation<'a, 'tcx, Prov, Extra> {
@@ -796,7 +795,7 @@ pub fn display_allocation<'a, 'tcx, Prov, Extra>(
}
#[doc(hidden)]
-pub struct RenderAllocation<'a, 'tcx, Prov, Extra> {
+pub struct RenderAllocation<'a, 'tcx, Prov: Provenance, Extra> {
tcx: TyCtxt<'tcx>,
alloc: &'a Allocation<Prov, Extra>,
}
@@ -882,9 +881,9 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
if i != line_start {
write!(w, " ")?;
}
- if let Some(&prov) = alloc.provenance().get(&i) {
+ if let Some(prov) = alloc.provenance().get_ptr(i) {
// Memory with provenance must be defined
- assert!(alloc.init_mask().is_range_initialized(i, i + ptr_size).is_ok());
+ assert!(alloc.init_mask().is_range_initialized(alloc_range(i, ptr_size)).is_ok());
let j = i.bytes_usize();
let offset = alloc
.inspect_with_uninit_and_ptr_outside_interpreter(j..j + ptr_size.bytes_usize());
@@ -904,9 +903,9 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
let overflow = ptr_size - remainder;
let remainder_width = provenance_width(remainder.bytes_usize()) - 2;
let overflow_width = provenance_width(overflow.bytes_usize() - 1) + 1;
- ascii.push('╾');
- for _ in 0..remainder.bytes() - 1 {
- ascii.push('─');
+ ascii.push('╾'); // HEAVY LEFT AND LIGHT RIGHT
+ for _ in 1..remainder.bytes() {
+ ascii.push('─'); // LIGHT HORIZONTAL
}
if overflow_width > remainder_width && overflow_width >= target.len() {
// The case where the provenance fits into the part in the next line
@@ -926,7 +925,7 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
for _ in 0..overflow.bytes() - 1 {
ascii.push('─');
}
- ascii.push('╼');
+ ascii.push('╼'); // LIGHT LEFT AND HEAVY RIGHT
i += ptr_size;
continue;
} else {
@@ -941,7 +940,23 @@ fn write_allocation_bytes<'tcx, Prov: Provenance, Extra>(
ascii.push('╼');
i += ptr_size;
}
- } else if alloc.init_mask().is_range_initialized(i, i + Size::from_bytes(1)).is_ok() {
+ } else if let Some(prov) = alloc.provenance().get(i, &tcx) {
+ // Memory with provenance must be defined
+ assert!(
+ alloc.init_mask().is_range_initialized(alloc_range(i, Size::from_bytes(1))).is_ok()
+ );
+ ascii.push('━'); // HEAVY HORIZONTAL
+ // We have two characters to display this, which is obviously not enough.
+ // Format is similar to "oversized" above.
+ let j = i.bytes_usize();
+ let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
+ write!(w, "╾{:02x}{:#?} (1 ptr byte)╼", c, prov)?;
+ i += Size::from_bytes(1);
+ } else if alloc
+ .init_mask()
+ .is_range_initialized(alloc_range(i, Size::from_bytes(1)))
+ .is_ok()
+ {
let j = i.bytes_usize();
// Checked definedness (and thus range) and provenance. This access also doesn't
diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs
index 85ef51f12..5ba053820 100644
--- a/compiler/rustc_middle/src/mir/syntax.rs
+++ b/compiler/rustc_middle/src/mir/syntax.rs
@@ -6,6 +6,7 @@
use super::{BasicBlock, Constant, Field, Local, SwitchTargets, UserTypeProjection};
use crate::mir::coverage::{CodeRegion, CoverageKind};
+use crate::traits::Reveal;
use crate::ty::adjustment::PointerCast;
use crate::ty::subst::SubstsRef;
use crate::ty::{self, List, Ty};
@@ -85,10 +86,30 @@ pub enum MirPhase {
///
/// Also note that the lint pass which reports eg `200_u8 + 200_u8` as an error is run as a part
/// of analysis to runtime MIR lowering. To ensure lints are reported reliably, this means that
- /// transformations which may supress such errors should not run on analysis MIR.
+ /// transformations which may suppress such errors should not run on analysis MIR.
Runtime(RuntimePhase),
}
+impl MirPhase {
+ pub fn name(&self) -> &'static str {
+ match *self {
+ MirPhase::Built => "built",
+ MirPhase::Analysis(AnalysisPhase::Initial) => "analysis",
+ MirPhase::Analysis(AnalysisPhase::PostCleanup) => "analysis-post-cleanup",
+ MirPhase::Runtime(RuntimePhase::Initial) => "runtime",
+ MirPhase::Runtime(RuntimePhase::PostCleanup) => "runtime-post-cleanup",
+ MirPhase::Runtime(RuntimePhase::Optimized) => "runtime-optimized",
+ }
+ }
+
+ pub fn reveal(&self) -> Reveal {
+ match *self {
+ MirPhase::Built | MirPhase::Analysis(_) => Reveal::UserFacing,
+ MirPhase::Runtime(_) => Reveal::All,
+ }
+ }
+}
+
/// See [`MirPhase::Analysis`].
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[derive(HashStable)]
@@ -387,7 +408,7 @@ impl std::fmt::Display for NonDivergingIntrinsic<'_> {
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub enum RetagKind {
- /// The initial retag when entering a function.
+ /// The initial retag of arguments when entering a function.
FnEntry,
/// Retag preparing for a two-phase borrow.
TwoPhase,
@@ -1185,7 +1206,8 @@ pub enum NullOp {
AlignOf,
}
-#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[derive(HashStable, TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
pub enum UnOp {
/// The `!` operator for logical inversion
Not,
@@ -1193,7 +1215,8 @@ pub enum UnOp {
Neg,
}
-#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, TyEncodable, TyDecodable, Hash, HashStable)]
+#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Ord, Eq, Hash)]
+#[derive(TyEncodable, TyDecodable, HashStable, TypeFoldable, TypeVisitable)]
pub enum BinOp {
/// The `+` operator (addition)
Add,
diff --git a/compiler/rustc_middle/src/mir/type_foldable.rs b/compiler/rustc_middle/src/mir/type_foldable.rs
index 4c0974f86..0705b4cff 100644
--- a/compiler/rustc_middle/src/mir/type_foldable.rs
+++ b/compiler/rustc_middle/src/mir/type_foldable.rs
@@ -16,9 +16,7 @@ TrivialTypeTraversalAndLiftImpls! {
UserTypeAnnotationIndex,
BorrowKind,
CastKind,
- BinOp,
NullOp,
- UnOp,
hir::Movability,
BasicBlock,
SwitchTargets,
diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs
index ddcf3711b..b21f50ae5 100644
--- a/compiler/rustc_middle/src/mir/visit.rs
+++ b/compiler/rustc_middle/src/mir/visit.rs
@@ -847,6 +847,17 @@ macro_rules! make_mir_visitor {
PlaceContext::NonUse(NonUseContext::VarDebugInfo),
location
),
+ VarDebugInfoContents::Composite { ty, fragments } => {
+ // FIXME(eddyb) use a better `TyContext` here.
+ self.visit_ty($(& $mutability)? *ty, TyContext::Location(location));
+ for VarDebugInfoFragment { projection: _, contents } in fragments {
+ self.visit_place(
+ contents,
+ PlaceContext::NonUse(NonUseContext::VarDebugInfo),
+ location,
+ );
+ }
+ }
}
}
@@ -1320,6 +1331,15 @@ impl PlaceContext {
)
}
+ /// Returns `true` if this place context represents an address-of.
+ pub fn is_address_of(&self) -> bool {
+ matches!(
+ self,
+ PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
+ | PlaceContext::MutatingUse(MutatingUseContext::AddressOf)
+ )
+ }
+
/// Returns `true` if this place context represents a storage live or storage dead marker.
#[inline]
pub fn is_storage_marker(&self) -> bool {
diff --git a/compiler/rustc_query_impl/src/keys.rs b/compiler/rustc_middle/src/query/keys.rs
index 8be2e2be8..880632561 100644
--- a/compiler/rustc_query_impl/src/keys.rs
+++ b/compiler/rustc_middle/src/query/keys.rs
@@ -1,19 +1,22 @@
//! Defines the set of legal keys that can be used in queries.
+use crate::infer::canonical::Canonical;
+use crate::mir;
+use crate::traits;
+use crate::ty::fast_reject::SimplifiedType;
+use crate::ty::subst::{GenericArg, SubstsRef};
+use crate::ty::{self, layout::TyAndLayout, Ty, TyCtxt};
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::hir_id::{HirId, OwnerId};
-use rustc_middle::infer::canonical::Canonical;
-use rustc_middle::mir;
-use rustc_middle::traits;
-use rustc_middle::ty::fast_reject::SimplifiedType;
-use rustc_middle::ty::subst::{GenericArg, SubstsRef};
-use rustc_middle::ty::{self, layout::TyAndLayout, Ty, TyCtxt};
+use rustc_query_system::query::{DefaultCacheSelector, VecCacheSelector};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
/// The `Key` trait controls what types can legally be used as the key
/// for a query.
-pub trait Key {
+pub trait Key: Sized {
+ type CacheSelector = DefaultCacheSelector<Self>;
+
/// Given an instance of this key, what crate is it referring to?
/// This is used to find the provider.
fn query_crate_is_local(&self) -> bool;
@@ -100,6 +103,8 @@ impl<'tcx> Key for mir::interpret::LitToConstInput<'tcx> {
}
impl Key for CrateNum {
+ type CacheSelector = VecCacheSelector<Self>;
+
#[inline(always)]
fn query_crate_is_local(&self) -> bool {
*self == LOCAL_CRATE
@@ -110,6 +115,8 @@ impl Key for CrateNum {
}
impl Key for OwnerId {
+ type CacheSelector = VecCacheSelector<Self>;
+
#[inline(always)]
fn query_crate_is_local(&self) -> bool {
true
@@ -123,6 +130,8 @@ impl Key for OwnerId {
}
impl Key for LocalDefId {
+ type CacheSelector = VecCacheSelector<Self>;
+
#[inline(always)]
fn query_crate_is_local(&self) -> bool {
true
diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs
index 3d720f09b..34415e2a1 100644
--- a/compiler/rustc_middle/src/query/mod.rs
+++ b/compiler/rustc_middle/src/query/mod.rs
@@ -7,6 +7,9 @@
use crate::ty::{self, print::describe_as_module, TyCtxt};
use rustc_span::def_id::LOCAL_CRATE;
+mod keys;
+pub use keys::Key;
+
// Each of these queries corresponds to a function pointer field in the
// `Providers` struct for requesting a value of that type, and a method
// on `tcx: TyCtxt` (and `tcx.at(span)`) for doing that request in a way
@@ -30,7 +33,7 @@ rustc_queries! {
}
query resolver_for_lowering(_: ()) -> &'tcx Steal<ty::ResolverAstLowering> {
- eval_always
+ feedable
no_hash
desc { "getting the resolver for lowering" }
}
@@ -172,6 +175,12 @@ rustc_queries! {
separate_provide_extern
}
+ query is_type_alias_impl_trait(key: DefId) -> bool
+ {
+ desc { "determine whether the opaque is a type-alias impl trait" }
+ separate_provide_extern
+ }
+
query analysis(key: ()) -> Result<(), ErrorGuaranteed> {
eval_always
desc { "running analysis passes on this crate" }
@@ -271,6 +280,10 @@ rustc_queries! {
desc { |tcx| "elaborating item bounds for `{}`", tcx.def_path_str(key) }
}
+ /// Look up all native libraries this crate depends on.
+ /// These are assembled from the following places:
+ /// - `extern` blocks (depending on their `link` attributes)
+ /// - the `libs` (`-l`) option
query native_libraries(_: CrateNum) -> Vec<NativeLib> {
arena_cache
desc { "looking up the native libraries of a linked crate" }
@@ -393,7 +406,7 @@ rustc_queries! {
/// Try to build an abstract representation of the given constant.
query thir_abstract_const(
key: DefId
- ) -> Result<Option<&'tcx [ty::abstract_const::Node<'tcx>]>, ErrorGuaranteed> {
+ ) -> Result<Option<ty::Const<'tcx>>, ErrorGuaranteed> {
desc {
|tcx| "building an abstract representation for `{}`", tcx.def_path_str(key),
}
@@ -402,7 +415,7 @@ rustc_queries! {
/// Try to build an abstract representation of the given constant.
query thir_abstract_const_of_const_arg(
key: (LocalDefId, DefId)
- ) -> Result<Option<&'tcx [ty::abstract_const::Node<'tcx>]>, ErrorGuaranteed> {
+ ) -> Result<Option<ty::Const<'tcx>>, ErrorGuaranteed> {
desc {
|tcx|
"building an abstract representation for the const argument `{}`",
@@ -410,15 +423,6 @@ rustc_queries! {
}
}
- query try_unify_abstract_consts(key:
- ty::ParamEnvAnd<'tcx, (ty::UnevaluatedConst<'tcx>, ty::UnevaluatedConst<'tcx>
- )>) -> bool {
- desc {
- |tcx| "trying to unify the generic constants `{}` and `{}`",
- tcx.def_path_str(key.value.0.def.did), tcx.def_path_str(key.value.1.def.did)
- }
- }
-
query mir_drops_elaborated_and_const_checked(
key: ty::WithOptConstParam<LocalDefId>
) -> &'tcx Steal<mir::Body<'tcx>> {
@@ -564,7 +568,7 @@ rustc_queries! {
/// Returns the inferred outlives predicates (e.g., for `struct
/// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`).
- query inferred_outlives_of(key: DefId) -> &'tcx [(ty::Predicate<'tcx>, Span)] {
+ query inferred_outlives_of(key: DefId) -> &'tcx [(ty::Clause<'tcx>, Span)] {
desc { |tcx| "computing inferred outlives predicates of `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
separate_provide_extern
@@ -1111,6 +1115,7 @@ rustc_queries! {
desc { |tcx| "looking up span for `{}`", tcx.def_path_str(def_id) }
cache_on_disk_if { def_id.is_local() }
separate_provide_extern
+ feedable
}
/// Gets the span for the identifier of the definition.
@@ -1391,6 +1396,13 @@ rustc_queries! {
desc { "checking if the crate has_global_allocator" }
separate_provide_extern
}
+ query has_alloc_error_handler(_: CrateNum) -> bool {
+ // This query depends on untracked global state in CStore
+ eval_always
+ fatal_cycle
+ desc { "checking if the crate has_alloc_error_handler" }
+ separate_provide_extern
+ }
query has_panic_handler(_: CrateNum) -> bool {
fatal_cycle
desc { "checking if the crate has_panic_handler" }
@@ -1532,6 +1544,7 @@ rustc_queries! {
desc { "available upstream drop-glue for `{:?}`", substs }
}
+ /// Returns a list of all `extern` blocks of a crate.
query foreign_modules(_: CrateNum) -> FxHashMap<DefId, ForeignModule> {
arena_cache
desc { "looking up the foreign modules of a linked crate" }
@@ -1543,9 +1556,12 @@ rustc_queries! {
query entry_fn(_: ()) -> Option<(DefId, EntryFnType)> {
desc { "looking up the entry function of a crate" }
}
+
+ /// Finds the `rustc_proc_macro_decls` item of a crate.
query proc_macro_decls_static(_: ()) -> Option<LocalDefId> {
- desc { "looking up the derive registrar for a crate" }
+ desc { "looking up the proc macro declarations for a crate" }
}
+
// The macro which defines `rustc_metadata::provide_extern` depends on this query's name.
// Changing the name should cause a compiler error, but in case that changes, be aware.
query crate_hash(_: CrateNum) -> Svh {
@@ -1553,17 +1569,24 @@ rustc_queries! {
desc { "looking up the hash a crate" }
separate_provide_extern
}
+
+ /// Gets the hash for the host proc macro. Used to support -Z dual-proc-macro.
query crate_host_hash(_: CrateNum) -> Option<Svh> {
eval_always
desc { "looking up the hash of a host version of a crate" }
separate_provide_extern
}
+
+ /// Gets the extra data to put in each output filename for a crate.
+ /// For example, compiling the `foo` crate with `extra-filename=-a` creates a `libfoo-b.rlib` file.
query extra_filename(_: CrateNum) -> String {
arena_cache
eval_always
desc { "looking up the extra filename for a crate" }
separate_provide_extern
}
+
+ /// Gets the paths where the crate came from in the file system.
query crate_extern_paths(_: CrateNum) -> Vec<PathBuf> {
arena_cache
eval_always
@@ -1587,23 +1610,15 @@ rustc_queries! {
separate_provide_extern
}
+ /// Get the corresponding native library from the `native_libraries` query
query native_library(def_id: DefId) -> Option<&'tcx NativeLib> {
desc { |tcx| "getting the native library for `{}`", tcx.def_path_str(def_id) }
}
- /// Does lifetime resolution, but does not descend into trait items. This
- /// should only be used for resolving lifetimes of on trait definitions,
- /// and is used to avoid cycles. Importantly, `resolve_lifetimes` still visits
- /// the same lifetimes and is responsible for diagnostics.
- /// See `rustc_resolve::late::lifetimes for details.
- query resolve_lifetimes_trait_definition(_: LocalDefId) -> ResolveLifetimes {
- arena_cache
- desc { "resolving lifetimes for a trait definition" }
- }
/// Does lifetime resolution on items. Importantly, we can't resolve
/// lifetimes directly on things like trait methods, because of trait params.
/// See `rustc_resolve::late::lifetimes for details.
- query resolve_lifetimes(_: LocalDefId) -> ResolveLifetimes {
+ query resolve_lifetimes(_: hir::OwnerId) -> ResolveLifetimes {
arena_cache
desc { "resolving lifetimes" }
}
@@ -1634,6 +1649,8 @@ rustc_queries! {
/// a generic type parameter will panic if you call this method on it:
///
/// ```
+ /// use std::fmt::Debug;
+ ///
/// pub trait Foo<T: Debug> {}
/// ```
///
@@ -1705,7 +1722,7 @@ rustc_queries! {
}
/// Returns the lang items defined in another crate by loading it from metadata.
- query defined_lang_items(_: CrateNum) -> &'tcx [(DefId, usize)] {
+ query defined_lang_items(_: CrateNum) -> &'tcx [(DefId, LangItem)] {
desc { "calculating the lang items defined in a crate" }
separate_provide_extern
}
@@ -1761,6 +1778,10 @@ rustc_queries! {
eval_always
desc { "getting the allocator kind for the current crate" }
}
+ query alloc_error_handler_kind(_: ()) -> Option<AllocatorKind> {
+ eval_always
+ desc { "alloc error handler kind for the current crate" }
+ }
query upvars_mentioned(def_id: DefId) -> Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>> {
desc { |tcx| "collecting upvars mentioned in `{}`", tcx.def_path_str(def_id) }
@@ -2054,19 +2075,8 @@ rustc_queries! {
remap_env_constness
}
- query normalize_opaque_types(key: &'tcx ty::List<ty::Predicate<'tcx>>) -> &'tcx ty::List<ty::Predicate<'tcx>> {
- desc { "normalizing opaque types in `{:?}`", key }
- }
-
- /// Checks whether a type is definitely uninhabited. This is
- /// conservative: for some types that are uninhabited we return `false`,
- /// but we only return `true` for types that are definitely uninhabited.
- /// `ty.conservative_is_privately_uninhabited` implies that any value of type `ty`
- /// will be `Abi::Uninhabited`. (Note that uninhabited types may have nonzero
- /// size, to account for partial initialisation. See #49298 for details.)
- query conservative_is_privately_uninhabited(key: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool {
- desc { "conservatively checking if `{}` is privately uninhabited", key.value }
- remap_env_constness
+ query reveal_opaque_types_in_bounds(key: &'tcx ty::List<ty::Predicate<'tcx>>) -> &'tcx ty::List<ty::Predicate<'tcx>> {
+ desc { "revealing opaque types in `{:?}`", key }
}
query limits(key: ()) -> Limits {
diff --git a/compiler/rustc_middle/src/thir.rs b/compiler/rustc_middle/src/thir.rs
index ea7a507d7..8bef9dfe0 100644
--- a/compiler/rustc_middle/src/thir.rs
+++ b/compiler/rustc_middle/src/thir.rs
@@ -10,7 +10,6 @@
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
use rustc_hir as hir;
-use rustc_hir::def::CtorKind;
use rustc_hir::def_id::DefId;
use rustc_hir::RangeEnd;
use rustc_index::newtype_index;
@@ -751,7 +750,7 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
// Only for Adt we can have `S {...}`,
// which we handle separately here.
- if variant.ctor_kind == CtorKind::Fictive {
+ if variant.ctor.is_none() {
write!(f, " {{ ")?;
let mut printed = 0;
diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs
index e73d44bbb..143435cb2 100644
--- a/compiler/rustc_middle/src/traits/mod.rs
+++ b/compiler/rustc_middle/src/traits/mod.rs
@@ -203,13 +203,20 @@ pub struct UnifyReceiverContext<'tcx> {
pub substs: SubstsRef<'tcx>,
}
-#[derive(Clone, Debug, PartialEq, Eq, Hash, Lift, Default)]
+#[derive(Clone, PartialEq, Eq, Hash, Lift, Default)]
pub struct InternedObligationCauseCode<'tcx> {
/// `None` for `ObligationCauseCode::MiscObligation` (a common case, occurs ~60% of
/// the time). `Some` otherwise.
code: Option<Lrc<ObligationCauseCode<'tcx>>>,
}
+impl<'tcx> std::fmt::Debug for InternedObligationCauseCode<'tcx> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let cause: &ObligationCauseCode<'_> = self;
+ cause.fmt(f)
+ }
+}
+
impl<'tcx> ObligationCauseCode<'tcx> {
#[inline(always)]
fn into(self) -> InternedObligationCauseCode<'tcx> {
@@ -431,6 +438,8 @@ pub enum ObligationCauseCode<'tcx> {
},
AscribeUserTypeProvePredicate(Span),
+
+ RustCall,
}
/// The 'location' at which we try to perform HIR-based wf checking.
@@ -462,7 +471,7 @@ pub struct ImplDerivedObligationCause<'tcx> {
}
impl<'tcx> ObligationCauseCode<'tcx> {
- // Return the base obligation, ignoring derived obligations.
+ /// Returns the base obligation, ignoring derived obligations.
pub fn peel_derives(&self) -> &Self {
let mut base_cause = self;
while let Some((parent_code, _)) = base_cause.parent() {
@@ -567,9 +576,6 @@ pub enum SelectionError<'tcx> {
/// Signaling that an error has already been emitted, to avoid
/// multiple errors being shown.
ErrorReporting,
- /// Multiple applicable `impl`s where found. The `DefId`s correspond to
- /// all the `impl`s' Items.
- Ambiguous(Vec<DefId>),
}
/// When performing resolution, it is typically the case that there
@@ -645,15 +651,12 @@ pub enum ImplSource<'tcx, N> {
/// Same as above, but for a function pointer type with the given signature.
FnPointer(ImplSourceFnPointerData<'tcx, N>),
- /// ImplSource for a builtin `DeterminantKind` trait implementation.
- DiscriminantKind(ImplSourceDiscriminantKindData),
-
- /// ImplSource for a builtin `Pointee` trait implementation.
- Pointee(ImplSourcePointeeData),
-
/// ImplSource automatically generated for a generator.
Generator(ImplSourceGeneratorData<'tcx, N>),
+ /// ImplSource automatically generated for a generator backing an async future.
+ Future(ImplSourceFutureData<'tcx, N>),
+
/// ImplSource for a trait alias.
TraitAlias(ImplSourceTraitAliasData<'tcx, N>),
@@ -670,10 +673,9 @@ impl<'tcx, N> ImplSource<'tcx, N> {
ImplSource::AutoImpl(d) => d.nested,
ImplSource::Closure(c) => c.nested,
ImplSource::Generator(c) => c.nested,
+ ImplSource::Future(c) => c.nested,
ImplSource::Object(d) => d.nested,
ImplSource::FnPointer(d) => d.nested,
- ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData)
- | ImplSource::Pointee(ImplSourcePointeeData) => vec![],
ImplSource::TraitAlias(d) => d.nested,
ImplSource::TraitUpcasting(d) => d.nested,
ImplSource::ConstDestruct(i) => i.nested,
@@ -688,10 +690,9 @@ impl<'tcx, N> ImplSource<'tcx, N> {
ImplSource::AutoImpl(d) => &d.nested,
ImplSource::Closure(c) => &c.nested,
ImplSource::Generator(c) => &c.nested,
+ ImplSource::Future(c) => &c.nested,
ImplSource::Object(d) => &d.nested,
ImplSource::FnPointer(d) => &d.nested,
- ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData)
- | ImplSource::Pointee(ImplSourcePointeeData) => &[],
ImplSource::TraitAlias(d) => &d.nested,
ImplSource::TraitUpcasting(d) => &d.nested,
ImplSource::ConstDestruct(i) => &i.nested,
@@ -731,16 +732,15 @@ impl<'tcx, N> ImplSource<'tcx, N> {
substs: c.substs,
nested: c.nested.into_iter().map(f).collect(),
}),
+ ImplSource::Future(c) => ImplSource::Future(ImplSourceFutureData {
+ generator_def_id: c.generator_def_id,
+ substs: c.substs,
+ nested: c.nested.into_iter().map(f).collect(),
+ }),
ImplSource::FnPointer(p) => ImplSource::FnPointer(ImplSourceFnPointerData {
fn_ty: p.fn_ty,
nested: p.nested.into_iter().map(f).collect(),
}),
- ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData) => {
- ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData)
- }
- ImplSource::Pointee(ImplSourcePointeeData) => {
- ImplSource::Pointee(ImplSourcePointeeData)
- }
ImplSource::TraitAlias(d) => ImplSource::TraitAlias(ImplSourceTraitAliasData {
alias_def_id: d.alias_def_id,
substs: d.substs,
@@ -792,6 +792,16 @@ pub struct ImplSourceGeneratorData<'tcx, N> {
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, Lift)]
#[derive(TypeFoldable, TypeVisitable)]
+pub struct ImplSourceFutureData<'tcx, N> {
+ pub generator_def_id: DefId,
+ pub substs: SubstsRef<'tcx>,
+ /// Nested obligations. This can be non-empty if the generator
+ /// signature contains associated types.
+ pub nested: Vec<N>,
+}
+
+#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, Lift)]
+#[derive(TypeFoldable, TypeVisitable)]
pub struct ImplSourceClosureData<'tcx, N> {
pub closure_def_id: DefId,
pub substs: SubstsRef<'tcx>,
@@ -850,13 +860,6 @@ pub struct ImplSourceFnPointerData<'tcx, N> {
pub nested: Vec<N>,
}
-// FIXME(@lcnr): This should be refactored and merged with other builtin vtables.
-#[derive(Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, HashStable)]
-pub struct ImplSourceDiscriminantKindData;
-
-#[derive(Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, HashStable)]
-pub struct ImplSourcePointeeData;
-
#[derive(Clone, PartialEq, Eq, TyEncodable, TyDecodable, HashStable, Lift)]
#[derive(TypeFoldable, TypeVisitable)]
pub struct ImplSourceConstDestructData<N> {
@@ -918,10 +921,13 @@ impl ObjectSafetyViolation {
}
ObjectSafetyViolation::Method(
name,
- MethodViolationCode::ReferencesImplTraitInTrait,
+ MethodViolationCode::ReferencesImplTraitInTrait(_),
_,
) => format!("method `{}` references an `impl Trait` type in its return type", name)
.into(),
+ ObjectSafetyViolation::Method(name, MethodViolationCode::AsyncFn, _) => {
+ format!("method `{}` is `async`", name).into()
+ }
ObjectSafetyViolation::Method(
name,
MethodViolationCode::WhereClauseReferencesSelf,
@@ -1029,7 +1035,10 @@ pub enum MethodViolationCode {
ReferencesSelfOutput,
/// e.g., `fn foo(&self) -> impl Sized`
- ReferencesImplTraitInTrait,
+ ReferencesImplTraitInTrait(Span),
+
+ /// e.g., `async fn foo(&self)`
+ AsyncFn,
/// e.g., `fn foo(&self) where Self: Clone`
WhereClauseReferencesSelf,
diff --git a/compiler/rustc_middle/src/traits/select.rs b/compiler/rustc_middle/src/traits/select.rs
index 85ead3171..ec69864c9 100644
--- a/compiler/rustc_middle/src/traits/select.rs
+++ b/compiler/rustc_middle/src/traits/select.rs
@@ -105,6 +105,12 @@ pub type EvaluationCache<'tcx> = Cache<
/// parameter environment.
#[derive(PartialEq, Eq, Debug, Clone, TypeFoldable, TypeVisitable)]
pub enum SelectionCandidate<'tcx> {
+ /// A builtin implementation for some specific traits, used in cases
+ /// where we cannot rely an ordinary library implementations.
+ ///
+ /// The most notable examples are `sized`, `Copy` and `Clone`. This is also
+ /// used for the `DiscriminantKind` and `Pointee` trait, both of which have
+ /// an associated type.
BuiltinCandidate {
/// `false` if there are no *further* obligations.
has_nested: bool,
@@ -131,18 +137,16 @@ pub enum SelectionCandidate<'tcx> {
/// generated for a generator.
GeneratorCandidate,
+ /// Implementation of a `Future` trait by one of the generator types
+ /// generated for an async construct.
+ FutureCandidate,
+
/// Implementation of a `Fn`-family trait by one of the anonymous
/// types generated for a fn pointer type (e.g., `fn(int) -> int`)
FnPointerCandidate {
is_const: bool,
},
- /// Builtin implementation of `DiscriminantKind`.
- DiscriminantKindCandidate,
-
- /// Builtin implementation of `Pointee`.
- PointeeCandidate,
-
TraitAliasCandidate,
/// Matching `dyn Trait` with a supertrait of `Trait`. The index is the
diff --git a/compiler/rustc_middle/src/traits/specialization_graph.rs b/compiler/rustc_middle/src/traits/specialization_graph.rs
index 0a2819fee..cccedc9ec 100644
--- a/compiler/rustc_middle/src/traits/specialization_graph.rs
+++ b/compiler/rustc_middle/src/traits/specialization_graph.rs
@@ -1,3 +1,4 @@
+use crate::error::StrictCoherenceNeedsNegativeCoherence;
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::visit::TypeVisitable;
use crate::ty::{self, TyCtxt};
@@ -65,9 +66,21 @@ impl OverlapMode {
if with_negative_coherence {
if strict_coherence { OverlapMode::Strict } else { OverlapMode::WithNegative }
- } else if strict_coherence {
- bug!("To use strict_coherence you need to set with_negative_coherence feature flag");
} else {
+ if strict_coherence {
+ let attr_span = trait_id
+ .as_local()
+ .into_iter()
+ .flat_map(|local_def_id| {
+ tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(local_def_id))
+ })
+ .find(|attr| attr.has_name(sym::rustc_strict_coherence))
+ .map(|attr| attr.span);
+ tcx.sess.emit_err(StrictCoherenceNeedsNegativeCoherence {
+ span: tcx.def_span(trait_id),
+ attr_span,
+ });
+ }
OverlapMode::Stable
}
}
@@ -249,7 +262,7 @@ pub fn ancestors<'tcx>(
if let Some(reported) = specialization_graph.has_errored {
Err(reported)
- } else if let Some(reported) = tcx.type_of(start_from_impl).error_reported() {
+ } else if let Err(reported) = tcx.type_of(start_from_impl).error_reported() {
Err(reported)
} else {
Ok(Ancestors {
diff --git a/compiler/rustc_middle/src/traits/structural_impls.rs b/compiler/rustc_middle/src/traits/structural_impls.rs
index 7fbd57ac7..6acb7745d 100644
--- a/compiler/rustc_middle/src/traits/structural_impls.rs
+++ b/compiler/rustc_middle/src/traits/structural_impls.rs
@@ -15,11 +15,9 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSource<'tcx, N> {
super::ImplSource::Generator(ref d) => write!(f, "{:?}", d),
- super::ImplSource::FnPointer(ref d) => write!(f, "({:?})", d),
-
- super::ImplSource::DiscriminantKind(ref d) => write!(f, "{:?}", d),
+ super::ImplSource::Future(ref d) => write!(f, "{:?}", d),
- super::ImplSource::Pointee(ref d) => write!(f, "{:?}", d),
+ super::ImplSource::FnPointer(ref d) => write!(f, "({:?})", d),
super::ImplSource::Object(ref d) => write!(f, "{:?}", d),
@@ -58,6 +56,16 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSourceGeneratorData<'tcx, N
}
}
+impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSourceFutureData<'tcx, N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "ImplSourceFutureData(generator_def_id={:?}, substs={:?}, nested={:?})",
+ self.generator_def_id, self.substs, self.nested
+ )
+ }
+}
+
impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSourceClosureData<'tcx, N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
@@ -125,11 +133,3 @@ impl<N: fmt::Debug> fmt::Debug for traits::ImplSourceConstDestructData<N> {
write!(f, "ImplSourceConstDestructData(nested={:?})", self.nested)
}
}
-
-///////////////////////////////////////////////////////////////////////////
-// Lift implementations
-
-TrivialTypeTraversalAndLiftImpls! {
- super::ImplSourceDiscriminantKindData,
- super::ImplSourcePointeeData,
-}
diff --git a/compiler/rustc_middle/src/ty/_match.rs b/compiler/rustc_middle/src/ty/_match.rs
index e6aab30a1..cd147d7e5 100644
--- a/compiler/rustc_middle/src/ty/_match.rs
+++ b/compiler/rustc_middle/src/ty/_match.rs
@@ -36,6 +36,11 @@ impl<'tcx> TypeRelation<'tcx> for Match<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
+
+ fn intercrate(&self) -> bool {
+ false
+ }
+
fn param_env(&self) -> ty::ParamEnv<'tcx> {
self.param_env
}
@@ -43,6 +48,10 @@ impl<'tcx> TypeRelation<'tcx> for Match<'tcx> {
true
} // irrelevant
+ fn mark_ambiguous(&mut self) {
+ bug!()
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
diff --git a/compiler/rustc_middle/src/ty/abstract_const.rs b/compiler/rustc_middle/src/ty/abstract_const.rs
index 1aa4df778..5de758ad9 100644
--- a/compiler/rustc_middle/src/ty/abstract_const.rs
+++ b/compiler/rustc_middle/src/ty/abstract_const.rs
@@ -1,98 +1,13 @@
//! A subset of a mir body used for const evaluatability checking.
-use crate::mir;
-use crate::ty::visit::TypeVisitable;
-use crate::ty::{self, DelaySpanBugEmitted, EarlyBinder, SubstsRef, Ty, TyCtxt};
+use crate::ty::{
+ self, Const, EarlyBinder, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable,
+ TypeVisitable,
+};
use rustc_errors::ErrorGuaranteed;
use rustc_hir::def_id::DefId;
-use std::cmp;
-use std::ops::ControlFlow;
-rustc_index::newtype_index! {
- /// An index into an `AbstractConst`.
- pub struct NodeId {
- derive [HashStable]
- DEBUG_FORMAT = "n{}",
- }
-}
-
-/// A tree representing an anonymous constant.
-///
-/// This is only able to represent a subset of `MIR`,
-/// and should not leak any information about desugarings.
-#[derive(Debug, Clone, Copy)]
-pub struct AbstractConst<'tcx> {
- // FIXME: Consider adding something like `IndexSlice`
- // and use this here.
- inner: &'tcx [Node<'tcx>],
- substs: SubstsRef<'tcx>,
-}
-
-impl<'tcx> AbstractConst<'tcx> {
- pub fn new(
- tcx: TyCtxt<'tcx>,
- uv: ty::UnevaluatedConst<'tcx>,
- ) -> Result<Option<AbstractConst<'tcx>>, ErrorGuaranteed> {
- let inner = tcx.thir_abstract_const_opt_const_arg(uv.def)?;
- debug!("AbstractConst::new({:?}) = {:?}", uv, inner);
- Ok(inner.map(|inner| AbstractConst { inner, substs: tcx.erase_regions(uv.substs) }))
- }
-
- pub fn from_const(
- tcx: TyCtxt<'tcx>,
- ct: ty::Const<'tcx>,
- ) -> Result<Option<AbstractConst<'tcx>>, ErrorGuaranteed> {
- match ct.kind() {
- ty::ConstKind::Unevaluated(uv) => AbstractConst::new(tcx, uv),
- ty::ConstKind::Error(DelaySpanBugEmitted { reported, .. }) => Err(reported),
- _ => Ok(None),
- }
- }
-
- #[inline]
- pub fn subtree(self, node: NodeId) -> AbstractConst<'tcx> {
- AbstractConst { inner: &self.inner[..=node.index()], substs: self.substs }
- }
-
- #[inline]
- pub fn root(self, tcx: TyCtxt<'tcx>) -> Node<'tcx> {
- let node = self.inner.last().copied().unwrap();
- match node {
- Node::Leaf(leaf) => Node::Leaf(EarlyBinder(leaf).subst(tcx, self.substs)),
- Node::Cast(kind, operand, ty) => {
- Node::Cast(kind, operand, EarlyBinder(ty).subst(tcx, self.substs))
- }
- // Don't perform substitution on the following as they can't directly contain generic params
- Node::Binop(_, _, _) | Node::UnaryOp(_, _) | Node::FunctionCall(_, _) => node,
- }
- }
-
- pub fn unify_failure_kind(self, tcx: TyCtxt<'tcx>) -> FailureKind {
- let mut failure_kind = FailureKind::Concrete;
- walk_abstract_const::<!, _>(tcx, self, |node| {
- match node.root(tcx) {
- Node::Leaf(leaf) => {
- if leaf.has_non_region_infer() {
- failure_kind = FailureKind::MentionsInfer;
- } else if leaf.has_non_region_param() {
- failure_kind = cmp::min(failure_kind, FailureKind::MentionsParam);
- }
- }
- Node::Cast(_, _, ty) => {
- if ty.has_non_region_infer() {
- failure_kind = FailureKind::MentionsInfer;
- } else if ty.has_non_region_param() {
- failure_kind = cmp::min(failure_kind, FailureKind::MentionsParam);
- }
- }
- Node::Binop(_, _, _) | Node::UnaryOp(_, _) | Node::FunctionCall(_, _) => {}
- }
- ControlFlow::CONTINUE
- });
- failure_kind
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)]
+#[derive(Hash, Debug, Clone, Copy, Ord, PartialOrd, PartialEq, Eq)]
+#[derive(TyDecodable, TyEncodable, HashStable, TypeVisitable, TypeFoldable)]
pub enum CastKind {
/// thir::ExprKind::As
As,
@@ -100,16 +15,6 @@ pub enum CastKind {
Use,
}
-/// A node of an `AbstractConst`.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)]
-pub enum Node<'tcx> {
- Leaf(ty::Const<'tcx>),
- Binop(mir::BinOp, NodeId, NodeId),
- UnaryOp(mir::UnOp, NodeId),
- FunctionCall(NodeId, &'tcx [NodeId]),
- Cast(CastKind, NodeId, Ty<'tcx>),
-}
-
#[derive(Debug, Copy, Clone, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)]
pub enum NotConstEvaluatable {
Error(ErrorGuaranteed),
@@ -127,68 +32,53 @@ TrivialTypeTraversalAndLiftImpls! {
NotConstEvaluatable,
}
+pub type BoundAbstractConst<'tcx> = Result<Option<EarlyBinder<ty::Const<'tcx>>>, ErrorGuaranteed>;
+
impl<'tcx> TyCtxt<'tcx> {
- #[inline]
- pub fn thir_abstract_const_opt_const_arg(
+ /// Returns a const without substs applied
+ pub fn bound_abstract_const(
self,
- def: ty::WithOptConstParam<DefId>,
- ) -> Result<Option<&'tcx [Node<'tcx>]>, ErrorGuaranteed> {
- if let Some((did, param_did)) = def.as_const_arg() {
+ uv: ty::WithOptConstParam<DefId>,
+ ) -> BoundAbstractConst<'tcx> {
+ let ac = if let Some((did, param_did)) = uv.as_const_arg() {
self.thir_abstract_const_of_const_arg((did, param_did))
} else {
- self.thir_abstract_const(def.did)
- }
+ self.thir_abstract_const(uv.did)
+ };
+ Ok(ac?.map(|ac| EarlyBinder(ac)))
}
-}
-#[instrument(skip(tcx, f), level = "debug")]
-pub fn walk_abstract_const<'tcx, R, F>(
- tcx: TyCtxt<'tcx>,
- ct: AbstractConst<'tcx>,
- mut f: F,
-) -> ControlFlow<R>
-where
- F: FnMut(AbstractConst<'tcx>) -> ControlFlow<R>,
-{
- #[instrument(skip(tcx, f), level = "debug")]
- fn recurse<'tcx, R>(
- tcx: TyCtxt<'tcx>,
- ct: AbstractConst<'tcx>,
- f: &mut dyn FnMut(AbstractConst<'tcx>) -> ControlFlow<R>,
- ) -> ControlFlow<R> {
- f(ct)?;
- let root = ct.root(tcx);
- debug!(?root);
- match root {
- Node::Leaf(_) => ControlFlow::CONTINUE,
- Node::Binop(_, l, r) => {
- recurse(tcx, ct.subtree(l), f)?;
- recurse(tcx, ct.subtree(r), f)
+ pub fn expand_abstract_consts<T: TypeFoldable<'tcx>>(self, ac: T) -> T {
+ struct Expander<'tcx> {
+ tcx: TyCtxt<'tcx>,
+ }
+
+ impl<'tcx> TypeFolder<'tcx> for Expander<'tcx> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
}
- Node::UnaryOp(_, v) => recurse(tcx, ct.subtree(v), f),
- Node::FunctionCall(func, args) => {
- recurse(tcx, ct.subtree(func), f)?;
- args.iter().try_for_each(|&arg| recurse(tcx, ct.subtree(arg), f))
+ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
+ if ty.has_type_flags(ty::TypeFlags::HAS_CT_PROJECTION) {
+ ty.super_fold_with(self)
+ } else {
+ ty
+ }
+ }
+ fn fold_const(&mut self, c: Const<'tcx>) -> Const<'tcx> {
+ let ct = match c.kind() {
+ ty::ConstKind::Unevaluated(uv) => match self.tcx.bound_abstract_const(uv.def) {
+ Err(e) => self.tcx.const_error_with_guaranteed(c.ty(), e),
+ Ok(Some(bac)) => {
+ let substs = self.tcx.erase_regions(uv.substs);
+ bac.subst(self.tcx, substs)
+ }
+ Ok(None) => c,
+ },
+ _ => c,
+ };
+ ct.super_fold_with(self)
}
- Node::Cast(_, operand, _) => recurse(tcx, ct.subtree(operand), f),
}
+ ac.fold_with(&mut Expander { tcx: self })
}
-
- recurse(tcx, ct, &mut f)
-}
-
-// We were unable to unify the abstract constant with
-// a constant found in the caller bounds, there are
-// now three possible cases here.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
-pub enum FailureKind {
- /// The abstract const still references an inference
- /// variable, in this case we return `TooGeneric`.
- MentionsInfer,
- /// The abstract const references a generic parameter,
- /// this means that we emit an error here.
- MentionsParam,
- /// The substs are concrete enough that we can simply
- /// try and evaluate the given constant.
- Concrete,
}
diff --git a/compiler/rustc_middle/src/ty/adjustment.rs b/compiler/rustc_middle/src/ty/adjustment.rs
index 4682ac96b..7036c4a7b 100644
--- a/compiler/rustc_middle/src/ty/adjustment.rs
+++ b/compiler/rustc_middle/src/ty/adjustment.rs
@@ -1,7 +1,5 @@
-use crate::ty::subst::SubstsRef;
use crate::ty::{self, Ty, TyCtxt};
use rustc_hir as hir;
-use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
use rustc_macros::HashStable;
use rustc_span::Span;
@@ -121,7 +119,8 @@ pub struct OverloadedDeref<'tcx> {
}
impl<'tcx> OverloadedDeref<'tcx> {
- pub fn method_call(&self, tcx: TyCtxt<'tcx>, source: Ty<'tcx>) -> (DefId, SubstsRef<'tcx>) {
+ /// Get the zst function item type for this method call.
+ pub fn method_call(&self, tcx: TyCtxt<'tcx>, source: Ty<'tcx>) -> Ty<'tcx> {
let trait_def_id = match self.mutbl {
hir::Mutability::Not => tcx.require_lang_item(LangItem::Deref, None),
hir::Mutability::Mut => tcx.require_lang_item(LangItem::DerefMut, None),
@@ -132,7 +131,7 @@ impl<'tcx> OverloadedDeref<'tcx> {
.find(|m| m.kind == ty::AssocKind::Fn)
.unwrap()
.def_id;
- (method_def_id, tcx.mk_substs_trait(source, &[]))
+ tcx.mk_fn_def(method_def_id, tcx.mk_substs_trait(source, []))
}
}
@@ -160,6 +159,18 @@ pub enum AutoBorrowMutability {
Not,
}
+impl AutoBorrowMutability {
+ /// Creates an `AutoBorrowMutability` from a mutability and allowance of two phase borrows.
+ ///
+ /// Note that when `mutbl.is_not()`, `allow_two_phase_borrow` is ignored
+ pub fn new(mutbl: hir::Mutability, allow_two_phase_borrow: AllowTwoPhase) -> Self {
+ match mutbl {
+ hir::Mutability::Not => Self::Not,
+ hir::Mutability::Mut => Self::Mut { allow_two_phase_borrow },
+ }
+ }
+}
+
impl From<AutoBorrowMutability> for hir::Mutability {
fn from(m: AutoBorrowMutability) -> Self {
match m {
diff --git a/compiler/rustc_middle/src/ty/adt.rs b/compiler/rustc_middle/src/ty/adt.rs
index b0a2412ab..d3d667f68 100644
--- a/compiler/rustc_middle/src/ty/adt.rs
+++ b/compiler/rustc_middle/src/ty/adt.rs
@@ -14,7 +14,7 @@ use rustc_index::vec::{Idx, IndexVec};
use rustc_query_system::ich::StableHashingContext;
use rustc_session::DataTypeKind;
use rustc_span::symbol::sym;
-use rustc_target::abi::VariantIdx;
+use rustc_target::abi::{ReprOptions, VariantIdx};
use std::cell::RefCell;
use std::cmp::Ordering;
@@ -22,9 +22,7 @@ use std::hash::{Hash, Hasher};
use std::ops::Range;
use std::str;
-use super::{
- Destructor, FieldDef, GenericPredicates, ReprOptions, Ty, TyCtxt, VariantDef, VariantDiscr,
-};
+use super::{Destructor, FieldDef, GenericPredicates, Ty, TyCtxt, VariantDef, VariantDiscr};
bitflags! {
#[derive(HashStable, TyEncodable, TyDecodable)]
@@ -230,7 +228,7 @@ impl AdtDefData {
AdtKind::Struct => AdtFlags::IS_STRUCT,
};
- if kind == AdtKind::Struct && variants[VariantIdx::new(0)].ctor_def_id.is_some() {
+ if kind == AdtKind::Struct && variants[VariantIdx::new(0)].ctor.is_some() {
flags |= AdtFlags::HAS_CTOR;
}
@@ -386,11 +384,9 @@ impl<'tcx> AdtDef<'tcx> {
// Baz = 3,
// }
// ```
- if self
- .variants()
- .iter()
- .any(|v| matches!(v.discr, VariantDiscr::Explicit(_)) && v.ctor_kind != CtorKind::Const)
- {
+ if self.variants().iter().any(|v| {
+ matches!(v.discr, VariantDiscr::Explicit(_)) && v.ctor_kind() != Some(CtorKind::Const)
+ }) {
return false;
}
self.variants().iter().all(|v| v.fields.is_empty())
@@ -405,7 +401,7 @@ impl<'tcx> AdtDef<'tcx> {
pub fn variant_with_ctor_id(self, cid: DefId) -> &'tcx VariantDef {
self.variants()
.iter()
- .find(|v| v.ctor_def_id == Some(cid))
+ .find(|v| v.ctor_def_id() == Some(cid))
.expect("variant_with_ctor_id: unknown variant")
}
@@ -422,7 +418,7 @@ impl<'tcx> AdtDef<'tcx> {
pub fn variant_index_with_ctor_id(self, cid: DefId) -> VariantIdx {
self.variants()
.iter_enumerated()
- .find(|(_, v)| v.ctor_def_id == Some(cid))
+ .find(|(_, v)| v.ctor_def_id() == Some(cid))
.expect("variant_index_with_ctor_id: unknown variant")
.0
}
@@ -463,9 +459,7 @@ impl<'tcx> AdtDef<'tcx> {
}
Err(err) => {
let msg = match err {
- ErrorHandled::Reported(_) | ErrorHandled::Linted => {
- "enum discriminant evaluation failed"
- }
+ ErrorHandled::Reported(_) => "enum discriminant evaluation failed",
ErrorHandled::TooGeneric => "enum discriminant depends on generics",
};
tcx.sess.delay_span_bug(tcx.def_span(expr_did), msg);
diff --git a/compiler/rustc_middle/src/ty/closure.rs b/compiler/rustc_middle/src/ty/closure.rs
index 0d6c26a58..d00553cba 100644
--- a/compiler/rustc_middle/src/ty/closure.rs
+++ b/compiler/rustc_middle/src/ty/closure.rs
@@ -5,6 +5,7 @@ use crate::{mir, ty};
use std::fmt::Write;
+use hir::LangItem;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
@@ -14,8 +15,8 @@ use super::{Ty, TyCtxt};
use self::BorrowKind::*;
-// Captures are represented using fields inside a structure.
-// This represents accessing self in the closure structure
+/// Captures are represented using fields inside a structure.
+/// This represents accessing self in the closure structure
pub const CAPTURE_STRUCT_LOCAL: mir::Local = mir::Local::from_u32(1);
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable, HashStable)]
@@ -90,20 +91,26 @@ pub enum ClosureKind {
}
impl<'tcx> ClosureKind {
- // This is the initial value used when doing upvar inference.
+ /// This is the initial value used when doing upvar inference.
pub const LATTICE_BOTTOM: ClosureKind = ClosureKind::Fn;
/// Returns `true` if a type that impls this closure kind
/// must also implement `other`.
pub fn extends(self, other: ty::ClosureKind) -> bool {
- matches!(
- (self, other),
- (ClosureKind::Fn, ClosureKind::Fn)
- | (ClosureKind::Fn, ClosureKind::FnMut)
- | (ClosureKind::Fn, ClosureKind::FnOnce)
- | (ClosureKind::FnMut, ClosureKind::FnMut)
- | (ClosureKind::FnMut, ClosureKind::FnOnce)
- | (ClosureKind::FnOnce, ClosureKind::FnOnce)
+ self <= other
+ }
+
+ /// Converts `self` to a [`DefId`] of the corresponding trait.
+ ///
+ /// Note: the inverse of this function is [`TyCtxt::fn_trait_kind_from_def_id`].
+ pub fn to_def_id(&self, tcx: TyCtxt<'_>) -> DefId {
+ tcx.require_lang_item(
+ match self {
+ ClosureKind::Fn => LangItem::Fn,
+ ClosureKind::FnMut => LangItem::FnMut,
+ ClosureKind::FnOnce => LangItem::FnOnce,
+ },
+ None,
)
}
@@ -116,26 +123,6 @@ impl<'tcx> ClosureKind {
ClosureKind::FnOnce => tcx.types.i32,
}
}
-
- pub fn from_def_id(tcx: TyCtxt<'_>, def_id: DefId) -> Option<ClosureKind> {
- if Some(def_id) == tcx.lang_items().fn_once_trait() {
- Some(ClosureKind::FnOnce)
- } else if Some(def_id) == tcx.lang_items().fn_mut_trait() {
- Some(ClosureKind::FnMut)
- } else if Some(def_id) == tcx.lang_items().fn_trait() {
- Some(ClosureKind::Fn)
- } else {
- None
- }
- }
-
- pub fn to_def_id(&self, tcx: TyCtxt<'_>) -> DefId {
- match self {
- ClosureKind::Fn => tcx.lang_items().fn_once_trait().unwrap(),
- ClosureKind::FnMut => tcx.lang_items().fn_mut_trait().unwrap(),
- ClosureKind::FnOnce => tcx.lang_items().fn_trait().unwrap(),
- }
- }
}
/// A composite describing a `Place` that is captured by a closure.
diff --git a/compiler/rustc_middle/src/ty/codec.rs b/compiler/rustc_middle/src/ty/codec.rs
index 14ec88b7e..75f2d45ea 100644
--- a/compiler/rustc_middle/src/ty/codec.rs
+++ b/compiler/rustc_middle/src/ty/codec.rs
@@ -298,7 +298,7 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for ty::List<Ty
}
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
- for ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>
+ for ty::List<ty::PolyExistentialPredicate<'tcx>>
{
fn decode(decoder: &mut D) -> &'tcx Self {
let len = decoder.read_usize();
@@ -310,7 +310,8 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> Decodable<D> for ty::Const<'tcx> {
fn decode(decoder: &mut D) -> Self {
- decoder.interner().mk_const(Decodable::decode(decoder))
+ let consts: ty::ConstS<'tcx> = Decodable::decode(decoder);
+ decoder.interner().mk_const(consts.kind, consts.ty)
}
}
@@ -344,9 +345,7 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
}
}
-impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
- for [ty::abstract_const::Node<'tcx>]
-{
+impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for [(ty::Clause<'tcx>, Span)] {
fn decode(decoder: &mut D) -> &'tcx Self {
decoder.interner().arena.alloc_from_iter(
(0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
@@ -355,30 +354,29 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
}
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
- for [ty::abstract_const::NodeId]
+ for ty::List<ty::BoundVariableKind>
{
fn decode(decoder: &mut D) -> &'tcx Self {
- decoder.interner().arena.alloc_from_iter(
- (0..decoder.read_usize()).map(|_| Decodable::decode(decoder)).collect::<Vec<_>>(),
+ let len = decoder.read_usize();
+ decoder.interner().mk_bound_variable_kinds(
+ (0..len).map::<ty::BoundVariableKind, _>(|_| Decodable::decode(decoder)),
)
}
}
-impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D>
- for ty::List<ty::BoundVariableKind>
-{
+impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> RefDecodable<'tcx, D> for ty::List<ty::Const<'tcx>> {
fn decode(decoder: &mut D) -> &'tcx Self {
let len = decoder.read_usize();
- decoder.interner().mk_bound_variable_kinds(
- (0..len).map::<ty::BoundVariableKind, _>(|_| Decodable::decode(decoder)),
- )
+ decoder
+ .interner()
+ .mk_const_list((0..len).map::<ty::Const<'tcx>, _>(|_| Decodable::decode(decoder)))
}
}
impl_decodable_via_ref! {
&'tcx ty::TypeckResults<'tcx>,
&'tcx ty::List<Ty<'tcx>>,
- &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
&'tcx traits::ImplSource<'tcx, ()>,
&'tcx mir::Body<'tcx>,
&'tcx mir::UnsafetyCheckResult,
diff --git a/compiler/rustc_middle/src/ty/consts.rs b/compiler/rustc_middle/src/ty/consts.rs
index f998e6083..c2be08e49 100644
--- a/compiler/rustc_middle/src/ty/consts.rs
+++ b/compiler/rustc_middle/src/ty/consts.rs
@@ -1,8 +1,6 @@
use crate::mir::interpret::LitToConstInput;
-use crate::mir::ConstantKind;
-use crate::ty::{self, InternalSubsts, ParamEnv, ParamEnvAnd, Ty, TyCtxt};
+use crate::ty::{self, DefIdTree, InternalSubsts, ParamEnv, ParamEnvAnd, Ty, TyCtxt};
use rustc_data_structures::intern::Interned;
-use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_macros::HashStable;
@@ -77,13 +75,13 @@ impl<'tcx> Const<'tcx> {
match Self::try_eval_lit_or_param(tcx, ty, expr) {
Some(v) => v,
- None => tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Unevaluated(ty::UnevaluatedConst {
+ None => tcx.mk_const(
+ ty::UnevaluatedConst {
def: def.to_global(),
substs: InternalSubsts::identity_for_item(tcx, def.did.to_def_id()),
- }),
+ },
ty,
- }),
+ ),
}
}
@@ -132,27 +130,16 @@ impl<'tcx> Const<'tcx> {
ExprKind::Path(QPath::Resolved(_, &Path { res: Res::Def(ConstParam, def_id), .. })) => {
// Find the name and index of the const parameter by indexing the generics of
// the parent item and construct a `ParamConst`.
- let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
- let item_id = tcx.hir().get_parent_node(hir_id);
- let item_def_id = tcx.hir().local_def_id(item_id);
- let generics = tcx.generics_of(item_def_id.to_def_id());
+ let item_def_id = tcx.parent(def_id);
+ let generics = tcx.generics_of(item_def_id);
let index = generics.param_def_id_to_index[&def_id];
- let name = tcx.hir().name(hir_id);
- Some(tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Param(ty::ParamConst::new(index, name)),
- ty,
- }))
+ let name = tcx.item_name(def_id);
+ Some(tcx.mk_const(ty::ParamConst::new(index, name), ty))
}
_ => None,
}
}
- /// Interns the given value as a constant.
- #[inline]
- pub fn from_value(tcx: TyCtxt<'tcx>, val: ty::ValTree<'tcx>, ty: Ty<'tcx>) -> Self {
- tcx.mk_const(ConstS { kind: ConstKind::Value(val), ty })
- }
-
/// Panics if self.kind != ty::ConstKind::Value
pub fn to_valtree(self) -> ty::ValTree<'tcx> {
match self.kind() {
@@ -161,11 +148,6 @@ impl<'tcx> Const<'tcx> {
}
}
- pub fn from_scalar_int(tcx: TyCtxt<'tcx>, i: ScalarInt, ty: Ty<'tcx>) -> Self {
- let valtree = ty::ValTree::from_scalar_int(i);
- Self::from_value(tcx, valtree, ty)
- }
-
#[inline]
/// Creates a constant with the given integer value and interns it.
pub fn from_bits(tcx: TyCtxt<'tcx>, bits: u128, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Self {
@@ -173,14 +155,16 @@ impl<'tcx> Const<'tcx> {
.layout_of(ty)
.unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e))
.size;
- Self::from_scalar_int(tcx, ScalarInt::try_from_uint(bits, size).unwrap(), ty.value)
+ tcx.mk_const(
+ ty::ValTree::from_scalar_int(ScalarInt::try_from_uint(bits, size).unwrap()),
+ ty.value,
+ )
}
#[inline]
/// Creates an interned zst constant.
pub fn zero_sized(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Self {
- let valtree = ty::ValTree::zst();
- Self::from_value(tcx, valtree, ty)
+ tcx.mk_const(ty::ValTree::zst(), ty)
}
#[inline]
@@ -227,8 +211,8 @@ impl<'tcx> Const<'tcx> {
pub fn eval(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Const<'tcx> {
if let Some(val) = self.kind().try_eval_for_typeck(tcx, param_env) {
match val {
- Ok(val) => Const::from_value(tcx, val, self.ty()),
- Err(ErrorGuaranteed { .. }) => tcx.const_error(self.ty()),
+ Ok(val) => tcx.mk_const(val, self.ty()),
+ Err(guar) => tcx.const_error_with_guaranteed(self.ty(), guar),
}
} else {
// Either the constant isn't evaluatable or ValTree creation failed.
@@ -237,20 +221,6 @@ impl<'tcx> Const<'tcx> {
}
#[inline]
- /// Tries to evaluate the constant if it is `Unevaluated` and creates a ConstValue if the
- /// evaluation succeeds. If it doesn't succeed, returns the unevaluated constant.
- pub fn eval_for_mir(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> ConstantKind<'tcx> {
- if let Some(val) = self.kind().try_eval_for_mir(tcx, param_env) {
- match val {
- Ok(const_val) => ConstantKind::from_value(const_val, self.ty()),
- Err(ErrorGuaranteed { .. }) => ConstantKind::Ty(tcx.const_error(self.ty())),
- }
- } else {
- ConstantKind::Ty(self)
- }
- }
-
- #[inline]
/// Panics if the value cannot be evaluated or doesn't contain a valid integer of the given type.
pub fn eval_bits(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>) -> u128 {
self.try_eval_bits(tcx, param_env, ty)
@@ -272,9 +242,9 @@ impl<'tcx> Const<'tcx> {
pub fn const_param_default<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Const<'tcx> {
let default_def_id = match tcx.hir().get_by_def_id(def_id.expect_local()) {
hir::Node::GenericParam(hir::GenericParam {
- kind: hir::GenericParamKind::Const { ty: _, default: Some(ac) },
+ kind: hir::GenericParamKind::Const { default: Some(ac), .. },
..
- }) => tcx.hir().local_def_id(ac.hir_id),
+ }) => ac.def_id,
_ => span_bug!(
tcx.def_span(def_id),
"`const_param_default` expected a generic parameter with a constant"
diff --git a/compiler/rustc_middle/src/ty/consts/int.rs b/compiler/rustc_middle/src/ty/consts/int.rs
index 7436f0f6f..f3186e1c3 100644
--- a/compiler/rustc_middle/src/ty/consts/int.rs
+++ b/compiler/rustc_middle/src/ty/consts/int.rs
@@ -245,6 +245,18 @@ impl ScalarInt {
self.to_bits(size)
}
+ // Tries to convert the `ScalarInt` to `bool`. Fails if the `size` of the `ScalarInt`
+ // in not equal to `Size { raw: 1 }` or if the value is not 0 or 1 and returns the `size`
+ // value of the `ScalarInt` in that case.
+ #[inline]
+ pub fn try_to_bool(self) -> Result<bool, Size> {
+ match self.try_to_u8()? {
+ 0 => Ok(false),
+ 1 => Ok(true),
+ _ => Err(self.size()),
+ }
+ }
+
// Tries to convert the `ScalarInt` to `u8`. Fails if the `size` of the `ScalarInt`
// in not equal to `Size { raw: 1 }` and returns the `size` value of the `ScalarInt` in
// that case.
diff --git a/compiler/rustc_middle/src/ty/consts/kind.rs b/compiler/rustc_middle/src/ty/consts/kind.rs
index 4ab761e07..becc2b805 100644
--- a/compiler/rustc_middle/src/ty/consts/kind.rs
+++ b/compiler/rustc_middle/src/ty/consts/kind.rs
@@ -1,10 +1,12 @@
use std::convert::TryInto;
+use super::Const;
use crate::mir;
use crate::mir::interpret::{AllocId, ConstValue, Scalar};
+use crate::ty::abstract_const::CastKind;
use crate::ty::subst::{InternalSubsts, SubstsRef};
use crate::ty::ParamEnv;
-use crate::ty::{self, TyCtxt, TypeVisitable};
+use crate::ty::{self, List, Ty, TyCtxt, TypeVisitable};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorGuaranteed;
use rustc_hir::def_id::DefId;
@@ -47,6 +49,7 @@ impl<'tcx> UnevaluatedConst<'tcx> {
/// Represents a constant in Rust.
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Hash, HashStable, TypeFoldable, TypeVisitable)]
+#[derive(derive_more::From)]
pub enum ConstKind<'tcx> {
/// A const generic parameter.
Param(ty::ParamConst),
@@ -69,9 +72,31 @@ pub enum ConstKind<'tcx> {
/// A placeholder for a const which could not be computed; this is
/// propagated to avoid useless error messages.
- Error(ty::DelaySpanBugEmitted),
+ #[from(ignore)]
+ Error(ErrorGuaranteed),
+
+ /// Expr which contains an expression which has partially evaluated items.
+ Expr(Expr<'tcx>),
+}
+
+impl<'tcx> From<ty::ConstVid<'tcx>> for ConstKind<'tcx> {
+ fn from(const_vid: ty::ConstVid<'tcx>) -> Self {
+ InferConst::Var(const_vid).into()
+ }
}
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)]
+#[derive(HashStable, TyEncodable, TyDecodable, TypeVisitable, TypeFoldable)]
+pub enum Expr<'tcx> {
+ Binop(mir::BinOp, Const<'tcx>, Const<'tcx>),
+ UnOp(mir::UnOp, Const<'tcx>),
+ FunctionCall(Const<'tcx>, &'tcx List<Const<'tcx>>),
+ Cast(CastKind, Const<'tcx>, Ty<'tcx>),
+}
+
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+static_assert_size!(Expr<'_>, 24);
+
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(ConstKind<'_>, 32);
@@ -226,7 +251,7 @@ impl<'tcx> ConstKind<'tcx> {
// (which may be identity substs, see above),
// can leak through `val` into the const we return.
Ok(val) => Some(Ok(EvalResult::ValTree(val?))),
- Err(ErrorHandled::TooGeneric | ErrorHandled::Linted) => None,
+ Err(ErrorHandled::TooGeneric) => None,
Err(ErrorHandled::Reported(e)) => Some(Err(e)),
}
}
@@ -237,7 +262,7 @@ impl<'tcx> ConstKind<'tcx> {
// (which may be identity substs, see above),
// can leak through `val` into the const we return.
Ok(val) => Some(Ok(EvalResult::ConstVal(val))),
- Err(ErrorHandled::TooGeneric | ErrorHandled::Linted) => None,
+ Err(ErrorHandled::TooGeneric) => None,
Err(ErrorHandled::Reported(e)) => Some(Err(e)),
}
}
diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs
index 3d7e2a083..b44bc14ec 100644
--- a/compiler/rustc_middle/src/ty/context.rs
+++ b/compiler/rustc_middle/src/ty/context.rs
@@ -1,5 +1,7 @@
//! Type context book-keeping.
+#![allow(rustc::usage_of_ty_tykind)]
+
use crate::arena::Arena;
use crate::dep_graph::{DepGraph, DepKindStruct};
use crate::hir::place::Place as HirPlace;
@@ -17,17 +19,17 @@ use crate::traits;
use crate::ty::query::{self, TyCtxtAt};
use crate::ty::{
self, AdtDef, AdtDefData, AdtKind, Binder, BindingMode, BoundVar, CanonicalPolyFnSig,
- ClosureSizeProfileData, Const, ConstS, ConstVid, DefIdTree, ExistentialPredicate, FloatTy,
- FloatVar, FloatVid, GenericParamDefKind, InferConst, InferTy, IntTy, IntVar, IntVid, List,
- ParamConst, ParamTy, PolyFnSig, Predicate, PredicateKind, PredicateS, ProjectionTy, Region,
- RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyS, TyVar, TyVid, TypeAndMut, UintTy,
+ ClosureSizeProfileData, Const, ConstS, DefIdTree, FloatTy, FloatVar, FloatVid,
+ GenericParamDefKind, InferTy, IntTy, IntVar, IntVid, List, ParamConst, ParamTy,
+ PolyExistentialPredicate, PolyFnSig, Predicate, PredicateKind, ProjectionTy, Region,
+ RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyVar, TyVid, TypeAndMut, UintTy,
Visibility,
};
use crate::ty::{GenericArg, GenericArgKind, InternalSubsts, SubstsRef, UserSubsts};
use rustc_ast as ast;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_data_structures::intern::{Interned, WithStableHash};
+use rustc_data_structures::intern::Interned;
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
@@ -41,7 +43,7 @@ use rustc_errors::{
};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
-use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LOCAL_CRATE};
+use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalDefIdMap, LOCAL_CRATE};
use rustc_hir::definitions::Definitions;
use rustc_hir::hir_id::OwnerId;
use rustc_hir::intravisit::Visitor;
@@ -53,6 +55,7 @@ use rustc_hir::{
use rustc_index::vec::{Idx, IndexVec};
use rustc_macros::HashStable;
use rustc_middle::mir::FakeReadCause;
+use rustc_query_system::dep_graph::DepNodeIndex;
use rustc_query_system::ich::StableHashingContext;
use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
use rustc_session::config::{CrateType, OutputFilenames};
@@ -67,6 +70,7 @@ use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{Layout, LayoutS, TargetDataLayout, VariantIdx};
use rustc_target::spec::abi;
use rustc_type_ir::sty::TyKind::*;
+use rustc_type_ir::WithCachedTypeInfo;
use rustc_type_ir::{DynKind, InternAs, InternIteratorElement, Interner, TypeFlags};
use std::any::Any;
@@ -80,7 +84,7 @@ use std::mem;
use std::ops::{Bound, Deref};
use std::sync::Arc;
-use super::{ImplPolarity, ResolverOutputs, RvalueScopes};
+use super::{ImplPolarity, RvalueScopes};
pub trait OnDiskCache<'tcx>: rustc_data_structures::sync::Sync {
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
@@ -109,7 +113,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
type Mutability = hir::Mutability;
type Movability = hir::Movability;
type PolyFnSig = PolyFnSig<'tcx>;
- type ListBinderExistentialPredicate = &'tcx List<Binder<'tcx, ExistentialPredicate<'tcx>>>;
+ type ListBinderExistentialPredicate = &'tcx List<PolyExistentialPredicate<'tcx>>;
type BinderListTy = Binder<'tcx, &'tcx List<Ty<'tcx>>>;
type ListTy = &'tcx List<Ty<'tcx>>;
type ProjectionTy = ty::ProjectionTy<'tcx>;
@@ -117,7 +121,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
type BoundTy = ty::BoundTy;
type PlaceholderType = ty::PlaceholderType;
type InferTy = InferTy;
- type DelaySpanBugEmitted = DelaySpanBugEmitted;
+ type ErrorGuaranteed = ErrorGuaranteed;
type PredicateKind = ty::PredicateKind<'tcx>;
type AllocId = crate::mir::interpret::AllocId;
@@ -128,15 +132,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
type PlaceholderRegion = ty::PlaceholderRegion;
}
-/// A type that is not publicly constructable. This prevents people from making [`TyKind::Error`]s
-/// except through the error-reporting functions on a [`tcx`][TyCtxt].
-#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
-#[derive(TyEncodable, TyDecodable, HashStable)]
-pub struct DelaySpanBugEmitted {
- pub reported: ErrorGuaranteed,
- _priv: (),
-}
-
type InternedSet<'tcx, T> = ShardedHashMap<InternedInSet<'tcx, T>, ()>;
pub struct CtxtInterners<'tcx> {
@@ -145,20 +140,20 @@ pub struct CtxtInterners<'tcx> {
// Specifically use a speedy hash algorithm for these hash sets, since
// they're accessed quite often.
- type_: InternedSet<'tcx, WithStableHash<TyS<'tcx>>>,
+ type_: InternedSet<'tcx, WithCachedTypeInfo<TyKind<'tcx>>>,
+ const_lists: InternedSet<'tcx, List<ty::Const<'tcx>>>,
substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>,
region: InternedSet<'tcx, RegionKind<'tcx>>,
- poly_existential_predicates:
- InternedSet<'tcx, List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>>,
- predicate: InternedSet<'tcx, PredicateS<'tcx>>,
+ poly_existential_predicates: InternedSet<'tcx, List<PolyExistentialPredicate<'tcx>>>,
+ predicate: InternedSet<'tcx, WithCachedTypeInfo<ty::Binder<'tcx, PredicateKind<'tcx>>>>,
predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
projs: InternedSet<'tcx, List<ProjectionKind>>,
place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>,
const_: InternedSet<'tcx, ConstS<'tcx>>,
const_allocation: InternedSet<'tcx, Allocation>,
bound_variable_kinds: InternedSet<'tcx, List<ty::BoundVariableKind>>,
- layout: InternedSet<'tcx, LayoutS<'tcx>>,
+ layout: InternedSet<'tcx, LayoutS<VariantIdx>>,
adt_def: InternedSet<'tcx, AdtDefData>,
}
@@ -167,6 +162,7 @@ impl<'tcx> CtxtInterners<'tcx> {
CtxtInterners {
arena,
type_: Default::default(),
+ const_lists: Default::default(),
substs: Default::default(),
region: Default::default(),
poly_existential_predicates: Default::default(),
@@ -198,53 +194,64 @@ impl<'tcx> CtxtInterners<'tcx> {
self.type_
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_kind(&kind);
+ let stable_hash =
+ self.stable_hash(&flags, sess, definitions, cstore, source_span, &kind);
- // It's impossible to hash inference variables (and will ICE), so we don't need to try to cache them.
- // Without incremental, we rarely stable-hash types, so let's not do it proactively.
- let stable_hash = if flags.flags.intersects(TypeFlags::NEEDS_INFER)
- || sess.opts.incremental.is_none()
- {
- Fingerprint::ZERO
- } else {
- let mut hasher = StableHasher::new();
- let mut hcx = StableHashingContext::ignore_spans(
- sess,
- definitions,
- cstore,
- source_span,
- );
- kind.hash_stable(&mut hcx, &mut hasher);
- hasher.finish()
- };
-
- let ty_struct = TyS {
- kind,
+ InternedInSet(self.arena.alloc(WithCachedTypeInfo {
+ internee: kind,
+ stable_hash,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
- };
-
- InternedInSet(
- self.arena.alloc(WithStableHash { internee: ty_struct, stable_hash }),
- )
+ }))
})
.0,
))
}
+ fn stable_hash<'a, T: HashStable<StableHashingContext<'a>>>(
+ &self,
+ flags: &ty::flags::FlagComputation,
+ sess: &'a Session,
+ definitions: &'a rustc_hir::definitions::Definitions,
+ cstore: &'a CrateStoreDyn,
+ source_span: &'a IndexVec<LocalDefId, Span>,
+ val: &T,
+ ) -> Fingerprint {
+ // It's impossible to hash inference variables (and will ICE), so we don't need to try to cache them.
+ // Without incremental, we rarely stable-hash types, so let's not do it proactively.
+ if flags.flags.intersects(TypeFlags::NEEDS_INFER) || sess.opts.incremental.is_none() {
+ Fingerprint::ZERO
+ } else {
+ let mut hasher = StableHasher::new();
+ let mut hcx = StableHashingContext::new(sess, definitions, cstore, source_span);
+ val.hash_stable(&mut hcx, &mut hasher);
+ hasher.finish()
+ }
+ }
+
#[inline(never)]
- fn intern_predicate(&self, kind: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> {
+ fn intern_predicate(
+ &self,
+ kind: Binder<'tcx, PredicateKind<'tcx>>,
+ sess: &Session,
+ definitions: &rustc_hir::definitions::Definitions,
+ cstore: &CrateStoreDyn,
+ source_span: &IndexVec<LocalDefId, Span>,
+ ) -> Predicate<'tcx> {
Predicate(Interned::new_unchecked(
self.predicate
.intern(kind, |kind| {
let flags = super::flags::FlagComputation::for_predicate(kind);
- let predicate_struct = PredicateS {
- kind,
+ let stable_hash =
+ self.stable_hash(&flags, sess, definitions, cstore, source_span, &kind);
+
+ InternedInSet(self.arena.alloc(WithCachedTypeInfo {
+ internee: kind,
+ stable_hash,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
- };
-
- InternedInSet(self.arena.alloc(predicate_struct))
+ }))
})
.0,
))
@@ -456,7 +463,7 @@ pub struct TypeckResults<'tcx> {
/// Stores the canonicalized types provided by the user. See also
/// `AscribeUserType` statement in MIR.
- pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>,
+ pub user_provided_sigs: LocalDefIdMap<CanonicalPolyFnSig<'tcx>>,
adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
@@ -660,6 +667,14 @@ impl<'tcx> TypeckResults<'tcx> {
LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices }
}
+ pub fn field_index(&self, id: hir::HirId) -> usize {
+ self.field_indices().get(id).cloned().expect("no index for a field")
+ }
+
+ pub fn opt_field_index(&self, id: hir::HirId) -> Option<usize> {
+ self.field_indices().get(id).cloned()
+ }
+
pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> {
LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types }
}
@@ -725,22 +740,24 @@ impl<'tcx> TypeckResults<'tcx> {
self.node_substs.get(&id.local_id).cloned()
}
- // Returns the type of a pattern as a monotype. Like @expr_ty, this function
- // doesn't provide type parameter substitutions.
+ /// Returns the type of a pattern as a monotype. Like [`expr_ty`], this function
+ /// doesn't provide type parameter substitutions.
+ ///
+ /// [`expr_ty`]: TypeckResults::expr_ty
pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> {
self.node_type(pat.hir_id)
}
- // Returns the type of an expression as a monotype.
- //
- // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
- // some cases, we insert `Adjustment` annotations such as auto-deref or
- // auto-ref. The type returned by this function does not consider such
- // adjustments. See `expr_ty_adjusted()` instead.
- //
- // NB (2): This type doesn't provide type parameter substitutions; e.g., if you
- // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
- // instead of "fn(ty) -> T with T = isize".
+ /// Returns the type of an expression as a monotype.
+ ///
+ /// NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
+ /// some cases, we insert `Adjustment` annotations such as auto-deref or
+ /// auto-ref. The type returned by this function does not consider such
+ /// adjustments. See `expr_ty_adjusted()` instead.
+ ///
+ /// NB (2): This type doesn't provide type parameter substitutions; e.g., if you
+ /// ask for the type of `id` in `id(3)`, it will return `fn(&isize) -> isize`
+ /// instead of `fn(ty) -> T with T = isize`.
pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> {
self.node_type(expr.hir_id)
}
@@ -1007,18 +1024,46 @@ impl<'tcx> CommonConsts<'tcx> {
}
}
-// This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
-// conflict.
+/// This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
+/// conflict.
#[derive(Debug)]
pub struct FreeRegionInfo {
- // `LocalDefId` corresponding to FreeRegion
+ /// `LocalDefId` corresponding to FreeRegion
pub def_id: LocalDefId,
- // the bound region corresponding to FreeRegion
+ /// the bound region corresponding to FreeRegion
pub boundregion: ty::BoundRegionKind,
- // checks if bound region is in Impl Item
+ /// checks if bound region is in Impl Item
pub is_impl_item: bool,
}
+/// This struct should only be created by `create_def`.
+#[derive(Copy, Clone)]
+pub struct TyCtxtFeed<'tcx, KEY: Copy> {
+ pub tcx: TyCtxt<'tcx>,
+ // Do not allow direct access, as downstream code must not mutate this field.
+ key: KEY,
+}
+
+impl<'tcx> TyCtxt<'tcx> {
+ pub fn feed_unit_query(self) -> TyCtxtFeed<'tcx, ()> {
+ TyCtxtFeed { tcx: self, key: () }
+ }
+}
+
+impl<'tcx, KEY: Copy> TyCtxtFeed<'tcx, KEY> {
+ #[inline(always)]
+ pub fn key(&self) -> KEY {
+ self.key
+ }
+}
+
+impl<'tcx> TyCtxtFeed<'tcx, LocalDefId> {
+ #[inline(always)]
+ pub fn def_id(&self) -> LocalDefId {
+ self.key
+ }
+}
+
/// The central data structure of the compiler. It stores references
/// to the various **arenas** and also houses the results of the
/// various **compiler queries** that have been performed. See the
@@ -1071,7 +1116,6 @@ pub struct GlobalCtxt<'tcx> {
/// Output of the resolver.
pub(crate) untracked_resolutions: ty::ResolverGlobalCtxt,
- untracked_resolver_for_lowering: Steal<ty::ResolverAstLowering>,
/// The entire crate as AST. This field serves as the input for the hir_crate query,
/// which lowers it from AST to HIR. It must not be read or used by anything else.
pub untracked_crate: Steal<Lrc<ast::Crate>>,
@@ -1201,8 +1245,9 @@ impl<'tcx> TyCtxt<'tcx> {
debug!("layout_scalar_valid_range: attr={:?}", attr);
if let Some(
&[
- ast::NestedMetaItem::Literal(ast::Lit {
- kind: ast::LitKind::Int(a, _), ..
+ ast::NestedMetaItem::Lit(ast::MetaItemLit {
+ kind: ast::LitKind::Int(a, _),
+ ..
}),
],
) = attr.meta_item_list().as_deref()
@@ -1233,21 +1278,17 @@ impl<'tcx> TyCtxt<'tcx> {
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
arena: &'tcx WorkerLocal<Arena<'tcx>>,
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
- resolver_outputs: ResolverOutputs,
+ definitions: Definitions,
+ untracked_resolutions: ty::ResolverGlobalCtxt,
krate: Lrc<ast::Crate>,
dep_graph: DepGraph,
on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>,
queries: &'tcx dyn query::QueryEngine<'tcx>,
query_kinds: &'tcx [DepKindStruct<'tcx>],
- crate_name: &str,
+ crate_name: Symbol,
output_filenames: OutputFilenames,
) -> GlobalCtxt<'tcx> {
- let ResolverOutputs {
- definitions,
- global_ctxt: untracked_resolutions,
- ast_lowering: untracked_resolver_for_lowering,
- } = resolver_outputs;
- let data_layout = TargetDataLayout::parse(&s.target).unwrap_or_else(|err| {
+ let data_layout = s.target.parse_data_layout().unwrap_or_else(|err| {
s.emit_fatal(err);
});
let interners = CtxtInterners::new(arena);
@@ -1275,7 +1316,6 @@ impl<'tcx> TyCtxt<'tcx> {
lifetimes: common_lifetimes,
consts: common_consts,
untracked_resolutions,
- untracked_resolver_for_lowering: Steal::new(untracked_resolver_for_lowering),
untracked_crate: Steal::new(krate),
on_disk_cache,
queries,
@@ -1285,13 +1325,19 @@ impl<'tcx> TyCtxt<'tcx> {
pred_rcache: Default::default(),
selection_cache: Default::default(),
evaluation_cache: Default::default(),
- crate_name: Symbol::intern(crate_name),
+ crate_name,
data_layout,
alloc_map: Lock::new(interpret::AllocMap::new()),
output_filenames: Arc::new(output_filenames),
}
}
+ /// Constructs a `TyKind::Error` type with current `ErrorGuaranteed`
+ #[track_caller]
+ pub fn ty_error_with_guaranteed(self, reported: ErrorGuaranteed) -> Ty<'tcx> {
+ self.mk_ty(Error(reported))
+ }
+
/// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used.
#[track_caller]
pub fn ty_error(self) -> Ty<'tcx> {
@@ -1303,7 +1349,17 @@ impl<'tcx> TyCtxt<'tcx> {
#[track_caller]
pub fn ty_error_with_message<S: Into<MultiSpan>>(self, span: S, msg: &str) -> Ty<'tcx> {
let reported = self.sess.delay_span_bug(span, msg);
- self.mk_ty(Error(DelaySpanBugEmitted { reported, _priv: () }))
+ self.mk_ty(Error(reported))
+ }
+
+ /// Like [TyCtxt::ty_error] but for constants, with current `ErrorGuaranteed`
+ #[track_caller]
+ pub fn const_error_with_guaranteed(
+ self,
+ ty: Ty<'tcx>,
+ reported: ErrorGuaranteed,
+ ) -> Const<'tcx> {
+ self.mk_const(ty::ConstKind::Error(reported), ty)
}
/// Like [TyCtxt::ty_error] but for constants.
@@ -1325,10 +1381,7 @@ impl<'tcx> TyCtxt<'tcx> {
msg: &str,
) -> Const<'tcx> {
let reported = self.sess.delay_span_bug(span, msg);
- self.mk_const(ty::ConstS {
- kind: ty::ConstKind::Error(DelaySpanBugEmitted { reported, _priv: () }),
- ty,
- })
+ self.mk_const(ty::ConstKind::Error(reported), ty)
}
pub fn consider_optimizing<T: Fn() -> String>(self, msg: T) -> bool {
@@ -1357,6 +1410,11 @@ impl<'tcx> TyCtxt<'tcx> {
self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did)
}
+ /// Returns `true` if the node pointed to by `def_id` is a generator for an async construct.
+ pub fn generator_is_async(self, def_id: DefId) -> bool {
+ matches!(self.generator_kind(def_id), Some(hir::GeneratorKind::Async(_)))
+ }
+
pub fn stability(self) -> &'tcx stability::Index {
self.stability_index(())
}
@@ -1460,14 +1518,19 @@ impl<'tcx> TyCtxt<'tcx> {
self.def_path(def_id).to_string_no_crate_verbose()
)
}
+}
+impl<'tcx> TyCtxtAt<'tcx> {
/// Create a new definition within the incr. comp. engine.
- pub fn create_def(self, parent: LocalDefId, data: hir::definitions::DefPathData) -> LocalDefId {
+ pub fn create_def(
+ self,
+ parent: LocalDefId,
+ data: hir::definitions::DefPathData,
+ ) -> TyCtxtFeed<'tcx, LocalDefId> {
// This function modifies `self.definitions` using a side-effect.
// We need to ensure that these side effects are re-run by the incr. comp. engine.
// Depending on the forever-red node will tell the graph that the calling query
// needs to be re-evaluated.
- use rustc_query_system::dep_graph::DepNodeIndex;
self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE);
// The following call has the side effect of modifying the tables inside `definitions`.
@@ -1484,23 +1547,42 @@ impl<'tcx> TyCtxt<'tcx> {
// This is fine because:
// - those queries are `eval_always` so we won't miss their result changing;
// - this write will have happened before these queries are called.
- self.definitions.write().create_def(parent, data)
+ let key = self.definitions.write().create_def(parent, data);
+
+ let feed = TyCtxtFeed { tcx: self.tcx, key };
+ feed.def_span(self.span);
+ feed
}
+}
+impl<'tcx> TyCtxt<'tcx> {
pub fn iter_local_def_id(self) -> impl Iterator<Item = LocalDefId> + 'tcx {
- // Create a dependency to the crate to be sure we re-execute this when the amount of
+ // Create a dependency to the red node to be sure we re-execute this when the amount of
// definitions change.
- self.ensure().hir_crate(());
- // Leak a read lock once we start iterating on definitions, to prevent adding new ones
- // while iterating. If some query needs to add definitions, it should be `ensure`d above.
- let definitions = self.definitions.leak();
- definitions.iter_local_def_id()
+ self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE);
+
+ let definitions = &self.definitions;
+ std::iter::from_generator(|| {
+ let mut i = 0;
+
+ // Recompute the number of definitions each time, because our caller may be creating
+ // new ones.
+ while i < { definitions.read().num_definitions() } {
+ let local_def_index = rustc_span::def_id::DefIndex::from_usize(i);
+ yield LocalDefId { local_def_index };
+ i += 1;
+ }
+
+ // Leak a read lock once we finish iterating on definitions, to prevent adding new ones.
+ definitions.leak();
+ })
}
pub fn def_path_table(self) -> &'tcx rustc_hir::definitions::DefPathTable {
// Create a dependency to the crate to be sure we re-execute this when the amount of
// definitions change.
- self.ensure().hir_crate(());
+ self.dep_graph.read_index(DepNodeIndex::FOREVER_RED_NODE);
+
// Leak a read lock once we start iterating on definitions, to prevent adding new ones
// while iterating. If some query needs to add definitions, it should be `ensure`d above.
let definitions = self.definitions.leak();
@@ -1659,7 +1741,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
- // Checks if the bound region is in Impl Item.
+ /// Checks if the bound region is in Impl Item.
pub fn is_bound_region_in_impl_item(self, suitable_region_binding_scope: LocalDefId) -> bool {
let container_id = self.parent(suitable_region_binding_scope.to_def_id());
if self.impl_trait_ref(container_id).is_some() {
@@ -1810,7 +1892,7 @@ nop_lift! {const_; Const<'a> => Const<'tcx>}
nop_lift! {const_allocation; ConstAllocation<'a> => ConstAllocation<'tcx>}
nop_lift! {predicate; Predicate<'a> => Predicate<'tcx>}
-nop_list_lift! {poly_existential_predicates; ty::Binder<'a, ExistentialPredicate<'a>> => ty::Binder<'tcx, ExistentialPredicate<'tcx>>}
+nop_list_lift! {poly_existential_predicates; PolyExistentialPredicate<'a> => PolyExistentialPredicate<'tcx>}
nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>}
nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>}
nop_list_lift! {projs; ProjectionKind => ProjectionKind}
@@ -2026,7 +2108,7 @@ macro_rules! sty_debug_print {
let shards = tcx.interners.type_.lock_shards();
let types = shards.iter().flat_map(|shard| shard.keys());
for &InternedInSet(t) in types {
- let variant = match t.kind {
+ let variant = match t.internee {
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
ty::Float(..) | ty::Str | ty::Never => continue,
ty::Error(_) => /* unimportant */ continue,
@@ -2136,49 +2218,26 @@ impl<'tcx, T: 'tcx + ?Sized> IntoPointer for InternedInSet<'tcx, T> {
}
#[allow(rustc::usage_of_ty_tykind)]
-impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
- fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
- &self.0.kind
- }
-}
-
-impl<'tcx> PartialEq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
- fn eq(&self, other: &InternedInSet<'tcx, WithStableHash<TyS<'tcx>>>) -> bool {
- // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
- // `x == y`.
- self.0.kind == other.0.kind
- }
-}
-
-impl<'tcx> Eq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {}
-
-impl<'tcx> Hash for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
- fn hash<H: Hasher>(&self, s: &mut H) {
- // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
- self.0.kind.hash(s)
- }
-}
-
-impl<'tcx> Borrow<Binder<'tcx, PredicateKind<'tcx>>> for InternedInSet<'tcx, PredicateS<'tcx>> {
- fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> {
- &self.0.kind
+impl<'tcx, T> Borrow<T> for InternedInSet<'tcx, WithCachedTypeInfo<T>> {
+ fn borrow<'a>(&'a self) -> &'a T {
+ &self.0.internee
}
}
-impl<'tcx> PartialEq for InternedInSet<'tcx, PredicateS<'tcx>> {
- fn eq(&self, other: &InternedInSet<'tcx, PredicateS<'tcx>>) -> bool {
+impl<'tcx, T: PartialEq> PartialEq for InternedInSet<'tcx, WithCachedTypeInfo<T>> {
+ fn eq(&self, other: &InternedInSet<'tcx, WithCachedTypeInfo<T>>) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
// `x == y`.
- self.0.kind == other.0.kind
+ self.0.internee == other.0.internee
}
}
-impl<'tcx> Eq for InternedInSet<'tcx, PredicateS<'tcx>> {}
+impl<'tcx, T: Eq> Eq for InternedInSet<'tcx, WithCachedTypeInfo<T>> {}
-impl<'tcx> Hash for InternedInSet<'tcx, PredicateS<'tcx>> {
+impl<'tcx, T: Hash> Hash for InternedInSet<'tcx, WithCachedTypeInfo<T>> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
- self.0.kind.hash(s)
+ self.0.internee.hash(s)
}
}
@@ -2243,9 +2302,9 @@ macro_rules! direct_interners {
direct_interners! {
region: mk_region(RegionKind<'tcx>): Region -> Region<'tcx>,
- const_: mk_const(ConstS<'tcx>): Const -> Const<'tcx>,
+ const_: mk_const_internal(ConstS<'tcx>): Const -> Const<'tcx>,
const_allocation: intern_const_alloc(Allocation): ConstAllocation -> ConstAllocation<'tcx>,
- layout: intern_layout(LayoutS<'tcx>): Layout -> Layout<'tcx>,
+ layout: intern_layout(LayoutS<VariantIdx>): Layout -> Layout<'tcx>,
adt_def: intern_adt_def(AdtDefData): AdtDef -> AdtDef<'tcx>,
}
@@ -2262,10 +2321,11 @@ macro_rules! slice_interners {
}
slice_interners!(
+ const_lists: _intern_const_list(Const<'tcx>),
substs: _intern_substs(GenericArg<'tcx>),
canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>),
poly_existential_predicates:
- _intern_poly_existential_predicates(ty::Binder<'tcx, ExistentialPredicate<'tcx>>),
+ _intern_poly_existential_predicates(PolyExistentialPredicate<'tcx>),
predicates: _intern_predicates(Predicate<'tcx>),
projs: _intern_projs(ProjectionKind),
place_elems: _intern_place_elems(PlaceElem<'tcx>),
@@ -2294,10 +2354,10 @@ impl<'tcx> TyCtxt<'tcx> {
/// Given a `ty`, return whether it's an `impl Future<...>`.
pub fn ty_is_opaque_future(self, ty: Ty<'_>) -> bool {
let ty::Opaque(def_id, _) = ty.kind() else { return false };
- let future_trait = self.lang_items().future_trait().unwrap();
+ let future_trait = self.require_lang_item(LangItem::Future, None);
self.explicit_item_bounds(def_id).iter().any(|(predicate, _)| {
- let ty::PredicateKind::Trait(trait_predicate) = predicate.kind().skip_binder() else {
+ let ty::PredicateKind::Clause(ty::Clause::Trait(trait_predicate)) = predicate.kind().skip_binder() else {
return false;
};
trait_predicate.trait_ref.def_id == future_trait
@@ -2320,7 +2380,9 @@ impl<'tcx> TyCtxt<'tcx> {
let generic_predicates = self.super_predicates_of(trait_did);
for (predicate, _) in generic_predicates.predicates {
- if let ty::PredicateKind::Trait(data) = predicate.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(data)) =
+ predicate.kind().skip_binder()
+ {
if set.insert(data.def_id()) {
stack.push(data.def_id());
}
@@ -2374,7 +2436,14 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_predicate(self, binder: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> {
- self.interners.intern_predicate(binder)
+ self.interners.intern_predicate(
+ binder,
+ self.sess,
+ &self.definitions.read(),
+ &*self.untracked_resolutions.cstore,
+ // This is only used to create a stable hashing context.
+ &self.untracked_resolutions.source_span,
+ )
}
#[inline]
@@ -2456,7 +2525,7 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_lang_item(self, ty: Ty<'tcx>, item: LangItem) -> Option<Ty<'tcx>> {
- let def_id = self.lang_items().require(item).ok()?;
+ let def_id = self.lang_items().get(item)?;
Some(self.mk_generic_adt(def_id, ty))
}
@@ -2517,7 +2586,7 @@ impl<'tcx> TyCtxt<'tcx> {
self.mk_ty(Tuple(self.intern_type_list(&ts)))
}
- pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
+ pub fn mk_tup<I: InternAs<Ty<'tcx>, Ty<'tcx>>>(self, iter: I) -> I::Output {
iter.intern_with(|ts| self.mk_ty(Tuple(self.intern_type_list(&ts))))
}
@@ -2533,6 +2602,11 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
+ debug_assert_eq!(
+ self.generics_of(def_id).count(),
+ substs.len(),
+ "wrong number of generic parameters for {def_id:?}: {substs:?}",
+ );
self.mk_ty(FnDef(def_id, substs))
}
@@ -2544,7 +2618,7 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_dynamic(
self,
- obj: &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>,
+ obj: &'tcx List<PolyExistentialPredicate<'tcx>>,
reg: ty::Region<'tcx>,
repr: DynKind,
) -> Ty<'tcx> {
@@ -2553,6 +2627,11 @@ impl<'tcx> TyCtxt<'tcx> {
#[inline]
pub fn mk_projection(self, item_def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> {
+ debug_assert_eq!(
+ self.generics_of(item_def_id).count(),
+ substs.len(),
+ "wrong number of generic parameters for {item_def_id:?}: {substs:?}",
+ );
self.mk_ty(Projection(ProjectionTy { item_def_id, substs }))
}
@@ -2582,8 +2661,8 @@ impl<'tcx> TyCtxt<'tcx> {
}
#[inline]
- pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> Const<'tcx> {
- self.mk_const(ty::ConstS { kind: ty::ConstKind::Infer(InferConst::Var(v)), ty })
+ pub fn mk_const(self, kind: impl Into<ty::ConstKind<'tcx>>, ty: Ty<'tcx>) -> Const<'tcx> {
+ self.mk_const_internal(ty::ConstS { kind: kind.into(), ty })
}
#[inline]
@@ -2602,29 +2681,22 @@ impl<'tcx> TyCtxt<'tcx> {
}
#[inline]
- pub fn mk_const_infer(self, ic: InferConst<'tcx>, ty: Ty<'tcx>) -> ty::Const<'tcx> {
- self.mk_const(ty::ConstS { kind: ty::ConstKind::Infer(ic), ty })
- }
-
- #[inline]
pub fn mk_ty_param(self, index: u32, name: Symbol) -> Ty<'tcx> {
self.mk_ty(Param(ParamTy { index, name }))
}
- #[inline]
- pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> Const<'tcx> {
- self.mk_const(ty::ConstS { kind: ty::ConstKind::Param(ParamConst { index, name }), ty })
- }
-
pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> {
match param.kind {
GenericParamDefKind::Lifetime => {
self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
}
GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(),
- GenericParamDefKind::Const { .. } => {
- self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into()
- }
+ GenericParamDefKind::Const { .. } => self
+ .mk_const(
+ ParamConst { index: param.index, name: param.name },
+ self.type_of(param.def_id),
+ )
+ .into(),
}
}
@@ -2677,8 +2749,8 @@ impl<'tcx> TyCtxt<'tcx> {
pub fn intern_poly_existential_predicates(
self,
- eps: &[ty::Binder<'tcx, ExistentialPredicate<'tcx>>],
- ) -> &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>> {
+ eps: &[PolyExistentialPredicate<'tcx>],
+ ) -> &'tcx List<PolyExistentialPredicate<'tcx>> {
assert!(!eps.is_empty());
assert!(
eps.array_windows()
@@ -2700,6 +2772,17 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
+ pub fn mk_const_list<I: InternAs<ty::Const<'tcx>, &'tcx List<ty::Const<'tcx>>>>(
+ self,
+ iter: I,
+ ) -> I::Output {
+ iter.intern_with(|xs| self.intern_const_list(xs))
+ }
+
+ pub fn intern_const_list(self, cs: &[ty::Const<'tcx>]) -> &'tcx List<ty::Const<'tcx>> {
+ if cs.is_empty() { List::empty() } else { self._intern_const_list(cs) }
+ }
+
pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
if ts.is_empty() {
List::empty()
@@ -2762,10 +2845,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
pub fn mk_poly_existential_predicates<
- I: InternAs<
- [ty::Binder<'tcx, ExistentialPredicate<'tcx>>],
- &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>,
- >,
+ I: InternAs<PolyExistentialPredicate<'tcx>, &'tcx List<PolyExistentialPredicate<'tcx>>>,
>(
self,
iter: I,
@@ -2773,37 +2853,58 @@ impl<'tcx> TyCtxt<'tcx> {
iter.intern_with(|xs| self.intern_poly_existential_predicates(xs))
}
- pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], &'tcx List<Predicate<'tcx>>>>(
+ pub fn mk_predicates<I: InternAs<Predicate<'tcx>, &'tcx List<Predicate<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_predicates(xs))
}
- pub fn mk_type_list<I: InternAs<[Ty<'tcx>], &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
+ pub fn mk_type_list<I: InternAs<Ty<'tcx>, &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
iter.intern_with(|xs| self.intern_type_list(xs))
}
- pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], &'tcx List<GenericArg<'tcx>>>>(
+ pub fn mk_substs<I: InternAs<GenericArg<'tcx>, &'tcx List<GenericArg<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_substs(xs))
}
- pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], &'tcx List<PlaceElem<'tcx>>>>(
+ pub fn mk_place_elems<I: InternAs<PlaceElem<'tcx>, &'tcx List<PlaceElem<'tcx>>>>(
self,
iter: I,
) -> I::Output {
iter.intern_with(|xs| self.intern_place_elems(xs))
}
- pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> {
- self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
+ pub fn mk_substs_trait(
+ self,
+ self_ty: Ty<'tcx>,
+ rest: impl IntoIterator<Item = GenericArg<'tcx>>,
+ ) -> SubstsRef<'tcx> {
+ self.mk_substs(iter::once(self_ty.into()).chain(rest))
+ }
+
+ pub fn mk_trait_ref(
+ self,
+ trait_def_id: DefId,
+ substs: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
+ ) -> ty::TraitRef<'tcx> {
+ let substs = substs.into_iter().map(Into::into);
+ let n = self.generics_of(trait_def_id).count();
+ debug_assert_eq!(
+ (n, Some(n)),
+ substs.size_hint(),
+ "wrong number of generic parameters for {trait_def_id:?}: {:?} \nDid you accidentally include the self-type in the params list?",
+ substs.collect::<Vec<_>>(),
+ );
+ let substs = self.mk_substs(substs);
+ ty::TraitRef::new(trait_def_id, substs)
}
pub fn mk_bound_variable_kinds<
- I: InternAs<[ty::BoundVariableKind], &'tcx List<ty::BoundVariableKind>>,
+ I: InternAs<ty::BoundVariableKind, &'tcx List<ty::BoundVariableKind>>,
>(
self,
iter: I,
@@ -2820,7 +2921,9 @@ impl<'tcx> TyCtxt<'tcx> {
span: impl Into<MultiSpan>,
decorator: impl for<'a> DecorateLint<'a, ()>,
) {
- self.struct_span_lint_hir(lint, hir_id, span, decorator.msg(), |diag| {
+ let msg = decorator.msg();
+ let (level, src) = self.lint_level_at_node(lint, hir_id);
+ struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg, |diag| {
decorator.decorate_lint(diag)
})
}
@@ -2830,6 +2933,7 @@ impl<'tcx> TyCtxt<'tcx> {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
pub fn struct_span_lint_hir(
self,
lint: &'static Lint,
@@ -2860,6 +2964,7 @@ impl<'tcx> TyCtxt<'tcx> {
/// Return value of the `decorate` closure is ignored, see [`struct_lint_level`] for a detailed explanation.
///
/// [`struct_lint_level`]: rustc_middle::lint::struct_lint_level#decorate-signature
+ #[rustc_lint_diagnostics]
pub fn struct_lint_node(
self,
lint: &'static Lint,
@@ -2954,6 +3059,15 @@ impl<'tcx> TyCtxtAt<'tcx> {
pub fn ty_error_with_message(self, msg: &str) -> Ty<'tcx> {
self.tcx.ty_error_with_message(self.span, msg)
}
+
+ pub fn mk_trait_ref(
+ self,
+ trait_lang_item: LangItem,
+ substs: impl IntoIterator<Item = impl Into<ty::GenericArg<'tcx>>>,
+ ) -> ty::TraitRef<'tcx> {
+ let trait_def_id = self.require_lang_item(trait_lang_item, Some(self.span));
+ self.tcx.mk_trait_ref(trait_def_id, substs)
+ }
}
/// Parameter attributes that can only be determined by examining the body of a function instead
@@ -2979,7 +3093,6 @@ fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool {
pub fn provide(providers: &mut ty::query::Providers) {
providers.resolutions = |tcx, ()| &tcx.untracked_resolutions;
- providers.resolver_for_lowering = |tcx, ()| &tcx.untracked_resolver_for_lowering;
providers.module_reexports =
|tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]);
providers.crate_name = |tcx, id| {
diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs
index b8fd01e6a..511d51cd6 100644
--- a/compiler/rustc_middle/src/ty/diagnostics.rs
+++ b/compiler/rustc_middle/src/ty/diagnostics.rs
@@ -151,7 +151,6 @@ enum SuggestChangingConstraintsMessage<'a> {
}
fn suggest_removing_unsized_bound(
- tcx: TyCtxt<'_>,
generics: &hir::Generics<'_>,
suggestions: &mut Vec<(Span, String, SuggestChangingConstraintsMessage<'_>)>,
param: &hir::GenericParam<'_>,
@@ -160,17 +159,16 @@ fn suggest_removing_unsized_bound(
// See if there's a `?Sized` bound that can be removed to suggest that.
// First look at the `where` clause because we can have `where T: ?Sized`,
// then look at params.
- let param_def_id = tcx.hir().local_def_id(param.hir_id);
for (where_pos, predicate) in generics.predicates.iter().enumerate() {
let WherePredicate::BoundPredicate(predicate) = predicate else {
continue;
};
- if !predicate.is_param_bound(param_def_id.to_def_id()) {
+ if !predicate.is_param_bound(param.def_id.to_def_id()) {
continue;
};
for (pos, bound) in predicate.bounds.iter().enumerate() {
- let hir::GenericBound::Trait(poly, hir::TraitBoundModifier::Maybe) = bound else {
+ let hir::GenericBound::Trait(poly, hir::TraitBoundModifier::Maybe) = bound else {
continue;
};
if poly.trait_ref.trait_def_id() != def_id {
@@ -232,7 +230,7 @@ pub fn suggest_constraining_type_params<'a>(
param.span,
&format!("this type parameter needs to be `{}`", constraint),
);
- suggest_removing_unsized_bound(tcx, generics, &mut suggestions, param, def_id);
+ suggest_removing_unsized_bound(generics, &mut suggestions, param, def_id);
}
}
@@ -283,8 +281,7 @@ pub fn suggest_constraining_type_params<'a>(
// --
// |
// replace with: `T: Bar +`
- let param_def_id = tcx.hir().local_def_id(param.hir_id);
- if let Some(span) = generics.bounds_span_for_suggestions(param_def_id) {
+ if let Some(span) = generics.bounds_span_for_suggestions(param.def_id) {
suggest_restrict(span, true);
continue;
}
@@ -358,6 +355,12 @@ pub fn suggest_constraining_type_params<'a>(
));
}
+ // FIXME: remove the suggestions that are from derive, as the span is not correct
+ suggestions = suggestions
+ .into_iter()
+ .filter(|(span, _, _)| !span.in_derive_expansion())
+ .collect::<Vec<_>>();
+
if suggestions.len() == 1 {
let (span, suggestion, msg) = suggestions.pop().unwrap();
@@ -400,7 +403,7 @@ impl<'v> hir::intravisit::Visitor<'v> for TraitObjectVisitor<'v> {
hir::TyKind::TraitObject(
_,
hir::Lifetime {
- name:
+ res:
hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Static,
..
},
@@ -424,10 +427,9 @@ pub struct StaticLifetimeVisitor<'tcx>(pub Vec<Span>, pub crate::hir::map::Map<'
impl<'v> hir::intravisit::Visitor<'v> for StaticLifetimeVisitor<'v> {
fn visit_lifetime(&mut self, lt: &'v hir::Lifetime) {
- if let hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Static =
- lt.name
+ if let hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Static = lt.res
{
- self.0.push(lt.span);
+ self.0.push(lt.ident.span);
}
}
}
@@ -511,11 +513,3 @@ impl<'tcx> TypeVisitor<'tcx> for IsSuggestableVisitor<'tcx> {
c.super_visit_with(self)
}
}
-
-#[derive(Diagnostic)]
-#[diag(borrowck_const_not_used_in_type_alias)]
-pub(super) struct ConstNotUsedTraitAlias {
- pub ct: String,
- #[primary_span]
- pub span: Span,
-}
diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs
index 4e6cdb786..aa61c39b8 100644
--- a/compiler/rustc_middle/src/ty/error.rs
+++ b/compiler/rustc_middle/src/ty/error.rs
@@ -12,7 +12,12 @@ use rustc_span::{BytePos, Span};
use rustc_target::spec::abi;
use std::borrow::Cow;
+use std::collections::hash_map::DefaultHasher;
use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::path::PathBuf;
+
+use super::print::PrettyPrinter;
#[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable, TypeVisitable, Lift)]
pub struct ExpectedFound<T> {
@@ -64,10 +69,7 @@ pub enum TypeError<'tcx> {
CyclicTy(Ty<'tcx>),
CyclicConst(ty::Const<'tcx>),
ProjectionMismatched(ExpectedFound<DefId>),
- ExistentialMismatch(
- ExpectedFound<&'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>>,
- ),
- ObjectUnsafeCoercion(DefId),
+ ExistentialMismatch(ExpectedFound<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>),
ConstMismatch(ExpectedFound<ty::Const<'tcx>>),
IntrinsicCast,
@@ -219,7 +221,6 @@ impl<'tcx> fmt::Display for TypeError<'tcx> {
f,
"cannot coerce functions with `#[target_feature]` to safe function pointers"
),
- ObjectUnsafeCoercion(_) => write!(f, "coercion to object-unsafe trait object"),
}
}
}
@@ -246,8 +247,7 @@ impl<'tcx> TypeError<'tcx> {
| ProjectionMismatched(_)
| ExistentialMismatch(_)
| ConstMismatch(_)
- | IntrinsicCast
- | ObjectUnsafeCoercion(_) => true,
+ | IntrinsicCast => true,
}
}
}
@@ -430,7 +430,9 @@ impl<'tcx> TyCtxt<'tcx> {
(ty::Projection(_), ty::Projection(_)) => {
diag.note("an associated type was expected, but a different one was found");
}
- (ty::Param(p), ty::Projection(proj)) | (ty::Projection(proj), ty::Param(p)) => {
+ (ty::Param(p), ty::Projection(proj)) | (ty::Projection(proj), ty::Param(p))
+ if self.def_kind(proj.item_def_id) != DefKind::ImplTraitPlaceholder =>
+ {
let generics = self.generics_of(body_owner_def_id);
let p_span = self.def_span(generics.type_param(p, self).def_id);
if !sp.contains(p_span) {
@@ -983,6 +985,47 @@ fn foo(&self) -> Self::T { String::new() }
false
}
+ pub fn short_ty_string(self, ty: Ty<'tcx>) -> (String, Option<PathBuf>) {
+ let width = self.sess.diagnostic_width();
+ let length_limit = width.saturating_sub(30);
+ let mut type_limit = 50;
+ let regular = FmtPrinter::new(self, hir::def::Namespace::TypeNS)
+ .pretty_print_type(ty)
+ .expect("could not write to `String`")
+ .into_buffer();
+ if regular.len() <= width {
+ return (regular, None);
+ }
+ let mut short;
+ loop {
+ // Look for the longest properly trimmed path that still fits in lenght_limit.
+ short = FmtPrinter::new_with_limit(
+ self,
+ hir::def::Namespace::TypeNS,
+ rustc_session::Limit(type_limit),
+ )
+ .pretty_print_type(ty)
+ .expect("could not write to `String`")
+ .into_buffer();
+ if short.len() <= length_limit || type_limit == 0 {
+ break;
+ }
+ type_limit -= 1;
+ }
+ if regular == short {
+ return (regular, None);
+ }
+ // Multiple types might be shortened in a single error, ensure we create a file for each.
+ let mut s = DefaultHasher::new();
+ ty.hash(&mut s);
+ let hash = s.finish();
+ let path = self.output_filenames(()).temp_path_ext(&format!("long-type-{hash}.txt"), None);
+ match std::fs::write(&path, &regular) {
+ Ok(_) => (short, Some(path)),
+ Err(_) => (regular, None),
+ }
+ }
+
fn format_generic_args(self, args: &[ty::GenericArg<'tcx>]) -> String {
FmtPrinter::new(self, hir::def::Namespace::TypeNS)
.path_generic_args(Ok, args)
diff --git a/compiler/rustc_middle/src/ty/fast_reject.rs b/compiler/rustc_middle/src/ty/fast_reject.rs
index 3be0bc4de..c9c09c93a 100644
--- a/compiler/rustc_middle/src/ty/fast_reject.rs
+++ b/compiler/rustc_middle/src/ty/fast_reject.rs
@@ -42,7 +42,6 @@ where
ClosureSimplifiedType(D),
GeneratorSimplifiedType(D),
GeneratorWitnessSimplifiedType(usize),
- OpaqueSimplifiedType(D),
FunctionSimplifiedType(usize),
PlaceholderSimplifiedType,
}
@@ -127,7 +126,7 @@ pub fn simplify_type<'tcx>(
TreatParams::AsPlaceholder => Some(PlaceholderSimplifiedType),
TreatParams::AsInfer => None,
},
- ty::Projection(_) => match treat_params {
+ ty::Opaque(..) | ty::Projection(_) => match treat_params {
// When treating `ty::Param` as a placeholder, projections also
// don't unify with anything else as long as they are fully normalized.
//
@@ -138,7 +137,6 @@ pub fn simplify_type<'tcx>(
}
TreatParams::AsPlaceholder | TreatParams::AsInfer => None,
},
- ty::Opaque(def_id, _) => Some(OpaqueSimplifiedType(def_id)),
ty::Foreign(def_id) => Some(ForeignSimplifiedType(def_id)),
ty::Bound(..) | ty::Infer(_) | ty::Error(_) => None,
}
@@ -151,8 +149,7 @@ impl<D: Copy + Debug + Eq> SimplifiedTypeGen<D> {
| ForeignSimplifiedType(d)
| TraitSimplifiedType(d)
| ClosureSimplifiedType(d)
- | GeneratorSimplifiedType(d)
- | OpaqueSimplifiedType(d) => Some(d),
+ | GeneratorSimplifiedType(d) => Some(d),
_ => None,
}
}
@@ -182,7 +179,6 @@ impl<D: Copy + Debug + Eq> SimplifiedTypeGen<D> {
ClosureSimplifiedType(d) => ClosureSimplifiedType(map(d)),
GeneratorSimplifiedType(d) => GeneratorSimplifiedType(map(d)),
GeneratorWitnessSimplifiedType(n) => GeneratorWitnessSimplifiedType(n),
- OpaqueSimplifiedType(d) => OpaqueSimplifiedType(map(d)),
FunctionSimplifiedType(n) => FunctionSimplifiedType(n),
PlaceholderSimplifiedType => PlaceholderSimplifiedType,
}
@@ -229,7 +225,7 @@ impl DeepRejectCtxt {
match impl_ty.kind() {
// Start by checking whether the type in the impl may unify with
// pretty much everything. Just return `true` in that case.
- ty::Param(_) | ty::Projection(_) | ty::Error(_) => return true,
+ ty::Param(_) | ty::Projection(_) | ty::Error(_) | ty::Opaque(..) => return true,
// These types only unify with inference variables or their own
// variant.
ty::Bool
@@ -247,8 +243,7 @@ impl DeepRejectCtxt {
| ty::Never
| ty::Tuple(..)
| ty::FnPtr(..)
- | ty::Foreign(..)
- | ty::Opaque(..) => {}
+ | ty::Foreign(..) => {}
ty::FnDef(..)
| ty::Closure(..)
| ty::Generator(..)
@@ -328,10 +323,7 @@ impl DeepRejectCtxt {
_ => false,
},
- // Opaque types in impls should be forbidden, but that doesn't
- // stop compilation. So this match arm should never return true
- // if compilation succeeds.
- ty::Opaque(..) => matches!(k, ty::Opaque(..)),
+ ty::Opaque(..) => true,
// Impls cannot contain these types as these cannot be named directly.
ty::FnDef(..) | ty::Closure(..) | ty::Generator(..) => false,
@@ -364,7 +356,10 @@ impl DeepRejectCtxt {
pub fn consts_may_unify(self, obligation_ct: ty::Const<'_>, impl_ct: ty::Const<'_>) -> bool {
match impl_ct.kind() {
- ty::ConstKind::Param(_) | ty::ConstKind::Unevaluated(_) | ty::ConstKind::Error(_) => {
+ ty::ConstKind::Expr(_)
+ | ty::ConstKind::Param(_)
+ | ty::ConstKind::Unevaluated(_)
+ | ty::ConstKind::Error(_) => {
return true;
}
ty::ConstKind::Value(_) => {}
@@ -382,7 +377,9 @@ impl DeepRejectCtxt {
// As we don't necessarily eagerly evaluate constants,
// they might unify with any value.
- ty::ConstKind::Unevaluated(_) | ty::ConstKind::Error(_) => true,
+ ty::ConstKind::Expr(_) | ty::ConstKind::Unevaluated(_) | ty::ConstKind::Error(_) => {
+ true
+ }
ty::ConstKind::Value(obl) => match k {
ty::ConstKind::Value(imp) => obl == imp,
_ => true,
diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs
index 7201737be..046a2660a 100644
--- a/compiler/rustc_middle/src/ty/flags.rs
+++ b/compiler/rustc_middle/src/ty/flags.rs
@@ -6,7 +6,7 @@ use std::slice;
pub struct FlagComputation {
pub flags: TypeFlags,
- // see `Ty::outer_exclusive_binder` for details
+ /// see `Ty::outer_exclusive_binder` for details
pub outer_exclusive_binder: ty::DebruijnIndex,
}
@@ -216,14 +216,17 @@ impl FlagComputation {
fn add_predicate_atom(&mut self, atom: ty::PredicateKind<'_>) {
match atom {
- ty::PredicateKind::Trait(trait_pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) => {
self.add_substs(trait_pred.trait_ref.substs);
}
- ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(a, b)) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate(a, b))) => {
self.add_region(a);
self.add_region(b);
}
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty, region)) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty,
+ region,
+ ))) => {
self.add_ty(ty);
self.add_region(region);
}
@@ -235,7 +238,10 @@ impl FlagComputation {
self.add_ty(a);
self.add_ty(b);
}
- ty::PredicateKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(ty::ProjectionPredicate {
+ projection_ty,
+ term,
+ })) => {
self.add_projection_ty(projection_ty);
match term.unpack() {
ty::TermKind::Ty(ty) => self.add_ty(ty),
@@ -259,6 +265,7 @@ impl FlagComputation {
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
self.add_ty(ty);
}
+ ty::PredicateKind::Ambiguous => {}
}
}
@@ -306,6 +313,26 @@ impl FlagComputation {
self.add_flags(TypeFlags::STILL_FURTHER_SPECIALIZABLE);
}
ty::ConstKind::Value(_) => {}
+ ty::ConstKind::Expr(e) => {
+ use ty::Expr;
+ match e {
+ Expr::Binop(_, l, r) => {
+ self.add_const(l);
+ self.add_const(r);
+ }
+ Expr::UnOp(_, v) => self.add_const(v),
+ Expr::FunctionCall(f, args) => {
+ self.add_const(f);
+ for arg in args {
+ self.add_const(arg);
+ }
+ }
+ Expr::Cast(_, c, t) => {
+ self.add_ty(t);
+ self.add_const(c);
+ }
+ }
+ }
ty::ConstKind::Error(_) => self.add_flags(TypeFlags::HAS_ERROR),
}
}
diff --git a/compiler/rustc_middle/src/ty/fold.rs b/compiler/rustc_middle/src/ty/fold.rs
index 54f1499eb..d431d008d 100644
--- a/compiler/rustc_middle/src/ty/fold.rs
+++ b/compiler/rustc_middle/src/ty/fold.rs
@@ -407,6 +407,7 @@ where
match *t.kind() {
ty::Bound(debruijn, bound_ty) if debruijn == self.current_index => {
let ty = self.delegate.replace_ty(bound_ty);
+ debug_assert!(!ty.has_vars_bound_above(ty::INNERMOST));
ty::fold::shift_vars(self.tcx, ty, self.current_index.as_u32())
}
_ if t.has_vars_bound_at_or_above(self.current_index) => t.super_fold_with(self),
@@ -437,6 +438,7 @@ where
match ct.kind() {
ty::ConstKind::Bound(debruijn, bound_const) if debruijn == self.current_index => {
let ct = self.delegate.replace_const(bound_const, ct.ty());
+ debug_assert!(!ct.has_vars_bound_above(ty::INNERMOST));
ty::fold::shift_vars(self.tcx, ct, self.current_index.as_u32())
}
_ => ct.super_fold_with(self),
@@ -566,10 +568,7 @@ impl<'tcx> TyCtxt<'tcx> {
))
},
consts: &mut |c, ty: Ty<'tcx>| {
- self.mk_const(ty::ConstS {
- kind: ty::ConstKind::Bound(ty::INNERMOST, shift_bv(c)),
- ty,
- })
+ self.mk_const(ty::ConstKind::Bound(ty::INNERMOST, shift_bv(c)), ty)
},
},
)
@@ -601,7 +600,7 @@ impl<'tcx> TyCtxt<'tcx> {
.replace_late_bound_regions(sig, |_| {
let br = ty::BoundRegion {
var: ty::BoundVar::from_u32(counter),
- kind: ty::BrAnon(counter),
+ kind: ty::BrAnon(counter, None),
};
let r = self.mk_region(ty::ReLateBound(ty::INNERMOST, br));
counter += 1;
@@ -609,7 +608,7 @@ impl<'tcx> TyCtxt<'tcx> {
})
.0;
let bound_vars = self.mk_bound_variable_kinds(
- (0..counter).map(|i| ty::BoundVariableKind::Region(ty::BrAnon(i))),
+ (0..counter).map(|i| ty::BoundVariableKind::Region(ty::BrAnon(i, None))),
);
Binder::bind_with_vars(inner, bound_vars)
}
@@ -629,7 +628,9 @@ impl<'tcx> TyCtxt<'tcx> {
let index = entry.index();
let var = ty::BoundVar::from_usize(index);
let kind = entry
- .or_insert_with(|| ty::BoundVariableKind::Region(ty::BrAnon(index as u32)))
+ .or_insert_with(|| {
+ ty::BoundVariableKind::Region(ty::BrAnon(index as u32, None))
+ })
.expect_region();
let br = ty::BoundRegion { var, kind };
self.tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br))
@@ -648,7 +649,7 @@ impl<'tcx> TyCtxt<'tcx> {
let index = entry.index();
let var = ty::BoundVar::from_usize(index);
let () = entry.or_insert_with(|| ty::BoundVariableKind::Const).expect_const();
- self.tcx.mk_const(ty::ConstS { ty, kind: ty::ConstKind::Bound(ty::INNERMOST, var) })
+ self.tcx.mk_const(ty::ConstKind::Bound(ty::INNERMOST, var), ty)
}
}
@@ -698,14 +699,10 @@ impl<'tcx> TypeFolder<'tcx> for Shifter<'tcx> {
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
match *r {
- ty::ReLateBound(debruijn, br) => {
- if self.amount == 0 || debruijn < self.current_index {
- r
- } else {
- let debruijn = debruijn.shifted_in(self.amount);
- let shifted = ty::ReLateBound(debruijn, br);
- self.tcx.mk_region(shifted)
- }
+ ty::ReLateBound(debruijn, br) if debruijn >= self.current_index => {
+ let debruijn = debruijn.shifted_in(self.amount);
+ let shifted = ty::ReLateBound(debruijn, br);
+ self.tcx.mk_region(shifted)
}
_ => r,
}
@@ -713,34 +710,30 @@ impl<'tcx> TypeFolder<'tcx> for Shifter<'tcx> {
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
match *ty.kind() {
- ty::Bound(debruijn, bound_ty) => {
- if self.amount == 0 || debruijn < self.current_index {
- ty
- } else {
- let debruijn = debruijn.shifted_in(self.amount);
- self.tcx.mk_ty(ty::Bound(debruijn, bound_ty))
- }
+ ty::Bound(debruijn, bound_ty) if debruijn >= self.current_index => {
+ let debruijn = debruijn.shifted_in(self.amount);
+ self.tcx.mk_ty(ty::Bound(debruijn, bound_ty))
}
- _ => ty.super_fold_with(self),
+ _ if ty.has_vars_bound_at_or_above(self.current_index) => ty.super_fold_with(self),
+ _ => ty,
}
}
fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
- if let ty::ConstKind::Bound(debruijn, bound_ct) = ct.kind() {
- if self.amount == 0 || debruijn < self.current_index {
- ct
- } else {
- let debruijn = debruijn.shifted_in(self.amount);
- self.tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Bound(debruijn, bound_ct),
- ty: ct.ty(),
- })
- }
+ if let ty::ConstKind::Bound(debruijn, bound_ct) = ct.kind()
+ && debruijn >= self.current_index
+ {
+ let debruijn = debruijn.shifted_in(self.amount);
+ self.tcx.mk_const(ty::ConstKind::Bound(debruijn, bound_ct), ct.ty())
} else {
ct.super_fold_with(self)
}
}
+
+ fn fold_predicate(&mut self, p: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> {
+ if p.has_vars_bound_at_or_above(self.current_index) { p.super_fold_with(self) } else { p }
+ }
}
pub fn shift_region<'tcx>(
@@ -762,5 +755,9 @@ where
{
debug!("shift_vars(value={:?}, amount={})", value, amount);
+ if amount == 0 || !value.has_escaping_bound_vars() {
+ return value;
+ }
+
value.fold_with(&mut Shifter::new(tcx, amount))
}
diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs
index 19754d145..48329da3e 100644
--- a/compiler/rustc_middle/src/ty/generics.rs
+++ b/compiler/rustc_middle/src/ty/generics.rs
@@ -3,7 +3,7 @@ use crate::ty::{EarlyBinder, SubstsRef};
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::DefId;
-use rustc_span::symbol::Symbol;
+use rustc_span::symbol::{kw, Symbol};
use rustc_span::Span;
use super::{EarlyBoundRegion, InstantiatedPredicates, ParamConst, ParamTy, Predicate, TyCtxt};
@@ -78,6 +78,15 @@ impl GenericParamDef {
}
}
+ pub fn is_anonymous_lifetime(&self) -> bool {
+ match self.kind {
+ GenericParamDefKind::Lifetime => {
+ self.name == kw::UnderscoreLifetime || self.name == kw::Empty
+ }
+ _ => false,
+ }
+ }
+
pub fn default_value<'tcx>(
&self,
tcx: TyCtxt<'tcx>,
@@ -92,6 +101,20 @@ impl GenericParamDef {
_ => None,
}
}
+
+ pub fn to_error<'tcx>(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ preceding_substs: &[ty::GenericArg<'tcx>],
+ ) -> ty::GenericArg<'tcx> {
+ match &self.kind {
+ ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_static.into(),
+ ty::GenericParamDefKind::Type { .. } => tcx.ty_error().into(),
+ ty::GenericParamDefKind::Const { .. } => {
+ tcx.const_error(tcx.bound_type_of(self.def_id).subst(tcx, preceding_substs)).into()
+ }
+ }
+ }
}
#[derive(Default)]
diff --git a/compiler/rustc_middle/src/ty/impls_ty.rs b/compiler/rustc_middle/src/ty/impls_ty.rs
index d1c0d62ac..3e59c0b96 100644
--- a/compiler/rustc_middle/src/ty/impls_ty.rs
+++ b/compiler/rustc_middle/src/ty/impls_ty.rs
@@ -112,19 +112,6 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::AllocId {
}
}
-// `Relocations` with default type parameters is a sorted map.
-impl<'a, Prov> HashStable<StableHashingContext<'a>> for mir::interpret::ProvenanceMap<Prov>
-where
- Prov: HashStable<StableHashingContext<'a>>,
-{
- fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
- self.len().hash_stable(hcx, hasher);
- for reloc in self.iter() {
- reloc.hash_stable(hcx, hasher);
- }
- }
-}
-
impl<'a> ToStableHashKey<StableHashingContext<'a>> for region::Scope {
type KeyType = region::Scope;
diff --git a/compiler/rustc_middle/src/ty/inhabitedness/inhabited_predicate.rs b/compiler/rustc_middle/src/ty/inhabitedness/inhabited_predicate.rs
index b7aa45572..33f727297 100644
--- a/compiler/rustc_middle/src/ty/inhabitedness/inhabited_predicate.rs
+++ b/compiler/rustc_middle/src/ty/inhabitedness/inhabited_predicate.rs
@@ -41,6 +41,13 @@ impl<'tcx> InhabitedPredicate<'tcx> {
self.apply_inner(tcx, param_env, &|_| Err(())).ok()
}
+ /// Same as `apply`, but `NotInModule(_)` predicates yield `false`. That is,
+ /// privately uninhabited types are considered always uninhabited.
+ pub fn apply_ignore_module(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> bool {
+ let Ok(result) = self.apply_inner::<!>(tcx, param_env, &|_| Ok(true));
+ result
+ }
+
fn apply_inner<E>(
self,
tcx: TyCtxt<'tcx>,
diff --git a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs
index 279a728ea..ace81bc4f 100644
--- a/compiler/rustc_middle/src/ty/inhabitedness/mod.rs
+++ b/compiler/rustc_middle/src/ty/inhabitedness/mod.rs
@@ -5,7 +5,7 @@
//!
//! # Example
//! ```rust
-//! enum Void {}
+//! #![feature(never_type)]
//! mod a {
//! pub mod b {
//! pub struct SecretlyUninhabited {
@@ -15,6 +15,7 @@
//! }
//!
//! mod c {
+//! enum Void {}
//! pub struct AlsoSecretlyUninhabited {
//! _priv: Void,
//! }
@@ -28,14 +29,14 @@
//! }
//! ```
//! In this code, the type `Foo` will only be visibly uninhabited inside the
-//! modules `b`, `c` and `d`. Calling `uninhabited_predicate` on `Foo` will
+//! modules `b`, `c` and `d`. Calling `inhabited_predicate` on `Foo` will
//! return `NotInModule(b) AND NotInModule(c)`.
//!
//! We need this information for pattern-matching on `Foo` or types that contain
//! `Foo`.
//!
//! # Example
-//! ```rust
+//! ```ignore(illustrative)
//! let foo_result: Result<T, Foo> = ... ;
//! let Ok(t) = foo_result;
//! ```
@@ -56,57 +57,6 @@ pub(crate) fn provide(providers: &mut ty::query::Providers) {
ty::query::Providers { inhabited_predicate_adt, inhabited_predicate_type, ..*providers };
}
-impl<'tcx> TyCtxt<'tcx> {
- /// Checks whether a type is visibly uninhabited from a particular module.
- ///
- /// # Example
- /// ```
- /// #![feature(never_type)]
- /// # fn main() {}
- /// enum Void {}
- /// mod a {
- /// pub mod b {
- /// pub struct SecretlyUninhabited {
- /// _priv: !,
- /// }
- /// }
- /// }
- ///
- /// mod c {
- /// use super::Void;
- /// pub struct AlsoSecretlyUninhabited {
- /// _priv: Void,
- /// }
- /// mod d {
- /// }
- /// }
- ///
- /// struct Foo {
- /// x: a::b::SecretlyUninhabited,
- /// y: c::AlsoSecretlyUninhabited,
- /// }
- /// ```
- /// In this code, the type `Foo` will only be visibly uninhabited inside the
- /// modules b, c and d. This effects pattern-matching on `Foo` or types that
- /// contain `Foo`.
- ///
- /// # Example
- /// ```ignore (illustrative)
- /// let foo_result: Result<T, Foo> = ... ;
- /// let Ok(t) = foo_result;
- /// ```
- /// This code should only compile in modules where the uninhabitedness of Foo is
- /// visible.
- pub fn is_ty_uninhabited_from(
- self,
- module: DefId,
- ty: Ty<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- ) -> bool {
- !ty.inhabited_predicate(self).apply(self, param_env, module)
- }
-}
-
/// Returns an `InhabitedPredicate` that is generic over type parameters and
/// requires calling [`InhabitedPredicate::subst`]
fn inhabited_predicate_adt(tcx: TyCtxt<'_>, def_id: DefId) -> InhabitedPredicate<'_> {
@@ -170,6 +120,64 @@ impl<'tcx> Ty<'tcx> {
_ => InhabitedPredicate::True,
}
}
+
+ /// Checks whether a type is visibly uninhabited from a particular module.
+ ///
+ /// # Example
+ /// ```
+ /// #![feature(never_type)]
+ /// # fn main() {}
+ /// enum Void {}
+ /// mod a {
+ /// pub mod b {
+ /// pub struct SecretlyUninhabited {
+ /// _priv: !,
+ /// }
+ /// }
+ /// }
+ ///
+ /// mod c {
+ /// use super::Void;
+ /// pub struct AlsoSecretlyUninhabited {
+ /// _priv: Void,
+ /// }
+ /// mod d {
+ /// }
+ /// }
+ ///
+ /// struct Foo {
+ /// x: a::b::SecretlyUninhabited,
+ /// y: c::AlsoSecretlyUninhabited,
+ /// }
+ /// ```
+ /// In this code, the type `Foo` will only be visibly uninhabited inside the
+ /// modules b, c and d. This effects pattern-matching on `Foo` or types that
+ /// contain `Foo`.
+ ///
+ /// # Example
+ /// ```ignore (illustrative)
+ /// let foo_result: Result<T, Foo> = ... ;
+ /// let Ok(t) = foo_result;
+ /// ```
+ /// This code should only compile in modules where the uninhabitedness of Foo is
+ /// visible.
+ pub fn is_inhabited_from(
+ self,
+ tcx: TyCtxt<'tcx>,
+ module: DefId,
+ param_env: ty::ParamEnv<'tcx>,
+ ) -> bool {
+ self.inhabited_predicate(tcx).apply(tcx, param_env, module)
+ }
+
+ /// Returns true if the type is uninhabited without regard to visibility
+ pub fn is_privately_uninhabited(
+ self,
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ) -> bool {
+ !self.inhabited_predicate(tcx).apply_ignore_module(tcx, param_env)
+ }
}
/// N.B. this query should only be called through `Ty::inhabited_predicate`
diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs
index 6c1414f7b..586460986 100644
--- a/compiler/rustc_middle/src/ty/instance.rs
+++ b/compiler/rustc_middle/src/ty/instance.rs
@@ -276,28 +276,45 @@ impl<'tcx> InstanceDef<'tcx> {
}
}
-impl<'tcx> fmt::Display for Instance<'tcx> {
+fn fmt_instance(
+ f: &mut fmt::Formatter<'_>,
+ instance: &Instance<'_>,
+ type_length: rustc_session::Limit,
+) -> fmt::Result {
+ ty::tls::with(|tcx| {
+ let substs = tcx.lift(instance.substs).expect("could not lift for printing");
+
+ let s = FmtPrinter::new_with_limit(tcx, Namespace::ValueNS, type_length)
+ .print_def_path(instance.def_id(), substs)?
+ .into_buffer();
+ f.write_str(&s)
+ })?;
+
+ match instance.def {
+ InstanceDef::Item(_) => Ok(()),
+ InstanceDef::VTableShim(_) => write!(f, " - shim(vtable)"),
+ InstanceDef::ReifyShim(_) => write!(f, " - shim(reify)"),
+ InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"),
+ InstanceDef::Virtual(_, num) => write!(f, " - virtual#{}", num),
+ InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({})", ty),
+ InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"),
+ InstanceDef::DropGlue(_, None) => write!(f, " - shim(None)"),
+ InstanceDef::DropGlue(_, Some(ty)) => write!(f, " - shim(Some({}))", ty),
+ InstanceDef::CloneShim(_, ty) => write!(f, " - shim({})", ty),
+ }
+}
+
+pub struct ShortInstance<'a, 'tcx>(pub &'a Instance<'tcx>, pub usize);
+
+impl<'a, 'tcx> fmt::Display for ShortInstance<'a, 'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- ty::tls::with(|tcx| {
- let substs = tcx.lift(self.substs).expect("could not lift for printing");
- let s = FmtPrinter::new(tcx, Namespace::ValueNS)
- .print_def_path(self.def_id(), substs)?
- .into_buffer();
- f.write_str(&s)
- })?;
+ fmt_instance(f, self.0, rustc_session::Limit(self.1))
+ }
+}
- match self.def {
- InstanceDef::Item(_) => Ok(()),
- InstanceDef::VTableShim(_) => write!(f, " - shim(vtable)"),
- InstanceDef::ReifyShim(_) => write!(f, " - shim(reify)"),
- InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"),
- InstanceDef::Virtual(_, num) => write!(f, " - virtual#{}", num),
- InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({})", ty),
- InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"),
- InstanceDef::DropGlue(_, None) => write!(f, " - shim(None)"),
- InstanceDef::DropGlue(_, Some(ty)) => write!(f, " - shim(Some({}))", ty),
- InstanceDef::CloneShim(_, ty) => write!(f, " - shim({})", ty),
- }
+impl<'tcx> fmt::Display for Instance<'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ ty::tls::with(|tcx| fmt_instance(f, self, tcx.type_length_limit()))
}
}
@@ -511,12 +528,12 @@ impl<'tcx> Instance<'tcx> {
Instance::resolve(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap().unwrap()
}
+ #[instrument(level = "debug", skip(tcx), ret)]
pub fn fn_once_adapter_instance(
tcx: TyCtxt<'tcx>,
closure_did: DefId,
substs: ty::SubstsRef<'tcx>,
) -> Option<Instance<'tcx>> {
- debug!("fn_once_adapter_shim({:?}, {:?})", closure_did, substs);
let fn_once = tcx.require_lang_item(LangItem::FnOnce, None);
let call_once = tcx
.associated_items(fn_once)
@@ -534,9 +551,9 @@ impl<'tcx> Instance<'tcx> {
let sig =
tcx.try_normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), sig).ok()?;
assert_eq!(sig.inputs().len(), 1);
- let substs = tcx.mk_substs_trait(self_ty, &[sig.inputs()[0].into()]);
+ let substs = tcx.mk_substs_trait(self_ty, [sig.inputs()[0].into()]);
- debug!("fn_once_adapter_shim: self_ty={:?} sig={:?}", self_ty, sig);
+ debug!(?self_ty, ?sig);
Some(Instance { def, substs })
}
diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs
index 3312f44c6..488fd5678 100644
--- a/compiler/rustc_middle/src/ty/layout.rs
+++ b/compiler/rustc_middle/src/ty/layout.rs
@@ -1,8 +1,6 @@
use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use crate::ty::normalize_erasing_regions::NormalizationError;
use crate::ty::{self, ReprOptions, Ty, TyCtxt, TypeVisitable};
-use rustc_ast as ast;
-use rustc_attr as attr;
use rustc_errors::{DiagnosticBuilder, Handler, IntoDiagnostic};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
@@ -20,7 +18,6 @@ use std::ops::Bound;
pub trait IntegerExt {
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx>;
- fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer;
fn from_int_ty<C: HasDataLayout>(cx: &C, ity: ty::IntTy) -> Integer;
fn from_uint_ty<C: HasDataLayout>(cx: &C, uty: ty::UintTy) -> Integer;
fn repr_discr<'tcx>(
@@ -49,22 +46,6 @@ impl IntegerExt for Integer {
}
}
- /// Gets the Integer type from an attr::IntType.
- fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer {
- let dl = cx.data_layout();
-
- match ity {
- attr::SignedInt(ast::IntTy::I8) | attr::UnsignedInt(ast::UintTy::U8) => I8,
- attr::SignedInt(ast::IntTy::I16) | attr::UnsignedInt(ast::UintTy::U16) => I16,
- attr::SignedInt(ast::IntTy::I32) | attr::UnsignedInt(ast::UintTy::U32) => I32,
- attr::SignedInt(ast::IntTy::I64) | attr::UnsignedInt(ast::UintTy::U64) => I64,
- attr::SignedInt(ast::IntTy::I128) | attr::UnsignedInt(ast::UintTy::U128) => I128,
- attr::SignedInt(ast::IntTy::Isize) | attr::UnsignedInt(ast::UintTy::Usize) => {
- dl.ptr_sized_integer()
- }
- }
- }
-
fn from_int_ty<C: HasDataLayout>(cx: &C, ity: ty::IntTy) -> Integer {
match ity {
ty::IntTy::I8 => I8,
@@ -189,8 +170,8 @@ pub enum LayoutError<'tcx> {
NormalizationFailure(Ty<'tcx>, NormalizationError<'tcx>),
}
-impl<'a> IntoDiagnostic<'a, !> for LayoutError<'a> {
- fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, !> {
+impl IntoDiagnostic<'_, !> for LayoutError<'_> {
+ fn into_diagnostic(self, handler: &Handler) -> DiagnosticBuilder<'_, !> {
let mut diag = handler.struct_fatal("");
match self {
@@ -237,6 +218,18 @@ pub struct LayoutCx<'tcx, C> {
pub param_env: ty::ParamEnv<'tcx>,
}
+impl<'tcx> LayoutCalculator for LayoutCx<'tcx, TyCtxt<'tcx>> {
+ type TargetDataLayoutRef = &'tcx TargetDataLayout;
+
+ fn delay_bug(&self, txt: &str) {
+ self.tcx.sess.delay_span_bug(DUMMY_SP, txt);
+ }
+
+ fn current_data_layout(&self) -> Self::TargetDataLayoutRef {
+ &self.tcx.data_layout
+ }
+}
+
/// Type size "skeleton", i.e., the only information determining a type's size.
/// While this is conservative, (aside from constant sizes, only pointers,
/// newtypes thereof and null pointer optimized enums are allowed), it is
@@ -610,7 +603,7 @@ where
})
}
- Variants::Multiple { ref variants, .. } => variants[variant_index],
+ Variants::Multiple { ref variants, .. } => cx.tcx().intern_layout(variants[variant_index].clone()),
};
assert_eq!(*layout.variants(), Variants::Single { index: variant_index });
@@ -1126,8 +1119,8 @@ impl<'tcx> fmt::Display for FnAbiError<'tcx> {
}
}
-impl<'tcx> IntoDiagnostic<'tcx, !> for FnAbiError<'tcx> {
- fn into_diagnostic(self, handler: &'tcx Handler) -> DiagnosticBuilder<'tcx, !> {
+impl IntoDiagnostic<'_, !> for FnAbiError<'_> {
+ fn into_diagnostic(self, handler: &Handler) -> DiagnosticBuilder<'_, !> {
handler.struct_fatal(self.to_string())
}
}
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index a42d05706..3f99efd25 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -9,6 +9,8 @@
//!
//! ["The `ty` module: representing types"]: https://rustc-dev-guide.rust-lang.org/ty.html
+#![allow(rustc::usage_of_ty_tykind)]
+
pub use self::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable};
pub use self::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
pub use self::AssocItemContainer::*;
@@ -26,13 +28,12 @@ use crate::ty::util::Discr;
pub use adt::*;
pub use assoc::*;
pub use generics::*;
-use hir::OpaqueTyOrigin;
use rustc_ast as ast;
use rustc_ast::node_id::NodeMap;
use rustc_attr as attr;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
-use rustc_data_structures::intern::{Interned, WithStableHash};
+use rustc_data_structures::intern::Interned;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
use rustc_hir as hir;
@@ -48,7 +49,9 @@ use rustc_session::cstore::CrateStoreDyn;
use rustc_span::hygiene::MacroKind;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{ExpnId, Span};
-use rustc_target::abi::{Align, VariantIdx};
+use rustc_target::abi::{Align, Integer, IntegerType, VariantIdx};
+pub use rustc_target::abi::{ReprFlags, ReprOptions};
+use rustc_type_ir::WithCachedTypeInfo;
pub use subst::*;
pub use vtable::*;
@@ -76,15 +79,15 @@ pub use self::closure::{
CAPTURE_STRUCT_LOCAL,
};
pub use self::consts::{
- Const, ConstInt, ConstKind, ConstS, InferConst, ScalarInt, UnevaluatedConst, ValTree,
+ Const, ConstInt, ConstKind, ConstS, Expr, InferConst, ScalarInt, UnevaluatedConst, ValTree,
};
pub use self::context::{
tls, CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations,
- CtxtInterners, DeducedParamAttrs, DelaySpanBugEmitted, FreeRegionInfo, GeneratorDiagnosticData,
- GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, TyCtxt, TypeckResults, UserType,
- UserTypeAnnotationIndex,
+ CtxtInterners, DeducedParamAttrs, FreeRegionInfo, GeneratorDiagnosticData,
+ GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, TyCtxt, TyCtxtFeed, TypeckResults,
+ UserType, UserTypeAnnotationIndex,
};
-pub use self::instance::{Instance, InstanceDef};
+pub use self::instance::{Instance, InstanceDef, ShortInstance};
pub use self::list::List;
pub use self::parameterized::ParameterizedOverTcx;
pub use self::rvalue_scopes::RvalueScopes;
@@ -94,9 +97,10 @@ pub use self::sty::{
BoundVariableKind, CanonicalPolyFnSig, ClosureSubsts, ClosureSubstsParts, ConstVid,
EarlyBoundRegion, ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FnSig,
FreeRegion, GenSig, GeneratorSubsts, GeneratorSubstsParts, InlineConstSubsts,
- InlineConstSubstsParts, ParamConst, ParamTy, PolyExistentialProjection,
- PolyExistentialTraitRef, PolyFnSig, PolyGenSig, PolyTraitRef, ProjectionTy, Region, RegionKind,
- RegionVid, TraitRef, TyKind, TypeAndMut, UpvarSubsts, VarianceDiagInfo,
+ InlineConstSubstsParts, ParamConst, ParamTy, PolyExistentialPredicate,
+ PolyExistentialProjection, PolyExistentialTraitRef, PolyFnSig, PolyGenSig, PolyTraitRef,
+ ProjectionTy, Region, RegionKind, RegionVid, TraitRef, TyKind, TypeAndMut, UpvarSubsts,
+ VarianceDiagInfo,
};
pub use self::trait_def::TraitDef;
@@ -206,6 +210,8 @@ pub struct ResolverAstLowering {
/// A small map keeping true kinds of built-in macros that appear to be fn-like on
/// the surface (`macro` items in libcore), but are actually attributes or derives.
pub builtin_macro_kinds: FxHashMap<LocalDefId, MacroKind>,
+ /// List functions and methods for which lifetime elision was successful.
+ pub lifetime_elision_allowed: FxHashSet<ast::NodeId>,
}
#[derive(Clone, Copy, Debug)]
@@ -410,7 +416,7 @@ impl Visibility<DefId> {
self.map_id(|id| id.expect_local())
}
- // Returns `true` if this item is visible anywhere in the local crate.
+ /// Returns `true` if this item is visible anywhere in the local crate.
pub fn is_visible_locally(self) -> bool {
match self {
Visibility::Public => true,
@@ -441,118 +447,44 @@ pub struct CReaderCacheKey {
pub pos: usize,
}
-/// Represents a type.
-///
-/// IMPORTANT:
-/// - This is a very "dumb" struct (with no derives and no `impls`).
-/// - Values of this type are always interned and thus unique, and are stored
-/// as an `Interned<TyS>`.
-/// - `Ty` (which contains a reference to a `Interned<TyS>`) or `Interned<TyS>`
-/// should be used everywhere instead of `TyS`. In particular, `Ty` has most
-/// of the relevant methods.
-#[derive(PartialEq, Eq, PartialOrd, Ord)]
-#[allow(rustc::usage_of_ty_tykind)]
-pub(crate) struct TyS<'tcx> {
- /// This field shouldn't be used directly and may be removed in the future.
- /// Use `Ty::kind()` instead.
- kind: TyKind<'tcx>,
-
- /// This field provides fast access to information that is also contained
- /// in `kind`.
- ///
- /// This field shouldn't be used directly and may be removed in the future.
- /// Use `Ty::flags()` instead.
- flags: TypeFlags,
-
- /// This field provides fast access to information that is also contained
- /// in `kind`.
- ///
- /// This is a kind of confusing thing: it stores the smallest
- /// binder such that
- ///
- /// (a) the binder itself captures nothing but
- /// (b) all the late-bound things within the type are captured
- /// by some sub-binder.
- ///
- /// So, for a type without any late-bound things, like `u32`, this
- /// will be *innermost*, because that is the innermost binder that
- /// captures nothing. But for a type `&'D u32`, where `'D` is a
- /// late-bound region with De Bruijn index `D`, this would be `D + 1`
- /// -- the binder itself does not capture `D`, but `D` is captured
- /// by an inner binder.
- ///
- /// We call this concept an "exclusive" binder `D` because all
- /// De Bruijn indices within the type are contained within `0..D`
- /// (exclusive).
- outer_exclusive_binder: ty::DebruijnIndex,
-}
-
-/// Use this rather than `TyS`, whenever possible.
+/// Use this rather than `TyKind`, whenever possible.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
#[rustc_diagnostic_item = "Ty"]
#[rustc_pass_by_value]
-pub struct Ty<'tcx>(Interned<'tcx, WithStableHash<TyS<'tcx>>>);
+pub struct Ty<'tcx>(Interned<'tcx, WithCachedTypeInfo<TyKind<'tcx>>>);
impl<'tcx> TyCtxt<'tcx> {
/// A "bool" type used in rustc_mir_transform unit tests when we
/// have not spun up a TyCtxt.
- pub const BOOL_TY_FOR_UNIT_TESTING: Ty<'tcx> = Ty(Interned::new_unchecked(&WithStableHash {
- internee: TyS {
- kind: ty::Bool,
+ pub const BOOL_TY_FOR_UNIT_TESTING: Ty<'tcx> =
+ Ty(Interned::new_unchecked(&WithCachedTypeInfo {
+ internee: ty::Bool,
+ stable_hash: Fingerprint::ZERO,
flags: TypeFlags::empty(),
outer_exclusive_binder: DebruijnIndex::from_usize(0),
- },
- stable_hash: Fingerprint::ZERO,
- }));
-}
-
-impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TyS<'tcx> {
- #[inline]
- fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
- let TyS {
- kind,
-
- // The other fields just provide fast access to information that is
- // also contained in `kind`, so no need to hash them.
- flags: _,
-
- outer_exclusive_binder: _,
- } = self;
-
- kind.hash_stable(hcx, hasher)
- }
+ }));
}
impl ty::EarlyBoundRegion {
/// Does this early bound region have a name? Early bound regions normally
/// always have names except when using anonymous lifetimes (`'_`).
pub fn has_name(&self) -> bool {
- self.name != kw::UnderscoreLifetime
+ self.name != kw::UnderscoreLifetime && self.name != kw::Empty
}
}
-/// Represents a predicate.
-///
-/// See comments on `TyS`, which apply here too (albeit for
-/// `PredicateS`/`Predicate` rather than `TyS`/`Ty`).
-#[derive(Debug)]
-pub(crate) struct PredicateS<'tcx> {
- kind: Binder<'tcx, PredicateKind<'tcx>>,
- flags: TypeFlags,
- /// See the comment for the corresponding field of [TyS].
- outer_exclusive_binder: ty::DebruijnIndex,
-}
-
-/// Use this rather than `PredicateS`, whenever possible.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+/// Use this rather than `PredicateKind`, whenever possible.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
-pub struct Predicate<'tcx>(Interned<'tcx, PredicateS<'tcx>>);
+pub struct Predicate<'tcx>(
+ Interned<'tcx, WithCachedTypeInfo<ty::Binder<'tcx, PredicateKind<'tcx>>>>,
+);
impl<'tcx> Predicate<'tcx> {
/// Gets the inner `Binder<'tcx, PredicateKind<'tcx>>`.
#[inline]
pub fn kind(self) -> Binder<'tcx, PredicateKind<'tcx>> {
- self.0.kind
+ self.0.internee
}
#[inline(always)]
@@ -572,13 +504,15 @@ impl<'tcx> Predicate<'tcx> {
let kind = self
.kind()
.map_bound(|kind| match kind {
- PredicateKind::Trait(TraitPredicate { trait_ref, constness, polarity }) => {
- Some(PredicateKind::Trait(TraitPredicate {
- trait_ref,
- constness,
- polarity: polarity.flip()?,
- }))
- }
+ PredicateKind::Clause(Clause::Trait(TraitPredicate {
+ trait_ref,
+ constness,
+ polarity,
+ })) => Some(PredicateKind::Clause(Clause::Trait(TraitPredicate {
+ trait_ref,
+ constness,
+ polarity: polarity.flip()?,
+ }))),
_ => None,
})
@@ -588,14 +522,14 @@ impl<'tcx> Predicate<'tcx> {
}
pub fn without_const(mut self, tcx: TyCtxt<'tcx>) -> Self {
- if let PredicateKind::Trait(TraitPredicate { trait_ref, constness, polarity }) = self.kind().skip_binder()
+ if let PredicateKind::Clause(Clause::Trait(TraitPredicate { trait_ref, constness, polarity })) = self.kind().skip_binder()
&& constness != BoundConstness::NotConst
{
- self = tcx.mk_predicate(self.kind().rebind(PredicateKind::Trait(TraitPredicate {
+ self = tcx.mk_predicate(self.kind().rebind(PredicateKind::Clause(Clause::Trait(TraitPredicate {
trait_ref,
constness: BoundConstness::NotConst,
polarity,
- })));
+ }))));
}
self
}
@@ -609,36 +543,22 @@ impl<'tcx> Predicate<'tcx> {
pub fn allow_normalization(self) -> bool {
match self.kind().skip_binder() {
PredicateKind::WellFormed(_) => false,
- PredicateKind::Trait(_)
- | PredicateKind::RegionOutlives(_)
- | PredicateKind::TypeOutlives(_)
- | PredicateKind::Projection(_)
+ PredicateKind::Clause(Clause::Trait(_))
+ | PredicateKind::Clause(Clause::RegionOutlives(_))
+ | PredicateKind::Clause(Clause::TypeOutlives(_))
+ | PredicateKind::Clause(Clause::Projection(_))
| PredicateKind::ObjectSafe(_)
| PredicateKind::ClosureKind(_, _, _)
| PredicateKind::Subtype(_)
| PredicateKind::Coerce(_)
| PredicateKind::ConstEvaluatable(_)
| PredicateKind::ConstEquate(_, _)
+ | PredicateKind::Ambiguous
| PredicateKind::TypeWellFormedFromEnv(_) => true,
}
}
}
-impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for Predicate<'tcx> {
- fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
- let PredicateS {
- ref kind,
-
- // The other fields just provide fast access to information that is
- // also contained in `kind`, so no need to hash them.
- flags: _,
- outer_exclusive_binder: _,
- } = self.0.0;
-
- kind.hash_stable(hcx, hasher);
- }
-}
-
impl rustc_errors::IntoDiagnosticArg for Predicate<'_> {
fn into_diagnostic_arg(self) -> rustc_errors::DiagnosticArgValue<'static> {
rustc_errors::DiagnosticArgValue::Str(std::borrow::Cow::Owned(self.to_string()))
@@ -647,7 +567,9 @@ impl rustc_errors::IntoDiagnosticArg for Predicate<'_> {
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
-pub enum PredicateKind<'tcx> {
+/// A clause is something that can appear in where bounds or be inferred
+/// by implied bounds.
+pub enum Clause<'tcx> {
/// Corresponds to `where Foo: Bar<A, B, C>`. `Foo` here would be
/// the `Self` type of the trait reference and `A`, `B`, and `C`
/// would be the type parameters.
@@ -662,6 +584,13 @@ pub enum PredicateKind<'tcx> {
/// `where <T as TraitRef>::Name == X`, approximately.
/// See the `ProjectionPredicate` struct for details.
Projection(ProjectionPredicate<'tcx>),
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
+#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
+pub enum PredicateKind<'tcx> {
+ /// Prove a clause
+ Clause(Clause<'tcx>),
/// No syntax: `T` well-formed.
WellFormed(GenericArg<'tcx>),
@@ -701,6 +630,10 @@ pub enum PredicateKind<'tcx> {
///
/// Only used for Chalk.
TypeWellFormedFromEnv(Ty<'tcx>),
+
+ /// A marker predicate that is always ambiguous.
+ /// Used for coherence to mark opaque types as possibly equal to each other but ambiguous.
+ Ambiguous,
}
/// The crate outlives map is computed during typeck and contains the
@@ -714,7 +647,7 @@ pub struct CratePredicatesMap<'tcx> {
/// For each struct with outlive bounds, maps to a vector of the
/// predicate of its outlive bounds. If an item has no outlives
/// bounds, it will have no entry.
- pub predicates: FxHashMap<DefId, &'tcx [(Predicate<'tcx>, Span)]>,
+ pub predicates: FxHashMap<DefId, &'tcx [(Clause<'tcx>, Span)]>,
}
impl<'tcx> Predicate<'tcx> {
@@ -851,6 +784,10 @@ impl<'tcx> TraitPredicate<'tcx> {
}
}
+ pub fn with_self_type(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self {
+ Self { trait_ref: self.trait_ref.with_self_type(tcx, self_ty), ..self }
+ }
+
pub fn def_id(self) -> DefId {
self.trait_ref.def_id
}
@@ -902,9 +839,10 @@ impl<'tcx> PolyTraitPredicate<'tcx> {
}
}
+/// `A: B`
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable, TypeVisitable, Lift)]
-pub struct OutlivesPredicate<A, B>(pub A, pub B); // `A: B`
+pub struct OutlivesPredicate<A, B>(pub A, pub B);
pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>;
pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>;
pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder<'tcx, RegionOutlivesPredicate<'tcx>>;
@@ -1003,7 +941,7 @@ impl<'tcx> Term<'tcx> {
unsafe {
match ptr & TAG_MASK {
TYPE_TAG => TermKind::Ty(Ty(Interned::new_unchecked(
- &*((ptr & !TAG_MASK) as *const WithStableHash<ty::TyS<'tcx>>),
+ &*((ptr & !TAG_MASK) as *const WithCachedTypeInfo<ty::TyKind<'tcx>>),
))),
CONST_TAG => TermKind::Const(ty::Const(Interned::new_unchecked(
&*((ptr & !TAG_MASK) as *const ty::ConstS<'tcx>),
@@ -1047,7 +985,7 @@ impl<'tcx> TermKind<'tcx> {
TermKind::Ty(ty) => {
// Ensure we can use the tag bits.
assert_eq!(mem::align_of_val(&*ty.0.0) & TAG_MASK, 0);
- (TYPE_TAG, ty.0.0 as *const WithStableHash<ty::TyS<'tcx>> as usize)
+ (TYPE_TAG, ty.0.0 as *const WithCachedTypeInfo<ty::TyKind<'tcx>> as usize)
}
TermKind::Const(ct) => {
// Ensure we can use the tag bits.
@@ -1125,12 +1063,12 @@ impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> {
}
}
-pub trait ToPredicate<'tcx> {
- fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx>;
+pub trait ToPredicate<'tcx, P = Predicate<'tcx>> {
+ fn to_predicate(self, tcx: TyCtxt<'tcx>) -> P;
}
-impl<'tcx> ToPredicate<'tcx> for Predicate<'tcx> {
- fn to_predicate(self, _tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
+impl<'tcx, T> ToPredicate<'tcx, T> for T {
+ fn to_predicate(self, _tcx: TyCtxt<'tcx>) -> T {
self
}
}
@@ -1142,27 +1080,53 @@ impl<'tcx> ToPredicate<'tcx> for Binder<'tcx, PredicateKind<'tcx>> {
}
}
+impl<'tcx> ToPredicate<'tcx> for Clause<'tcx> {
+ #[inline(always)]
+ fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
+ tcx.mk_predicate(ty::Binder::dummy(ty::PredicateKind::Clause(self)))
+ }
+}
+
+impl<'tcx> ToPredicate<'tcx> for Binder<'tcx, TraitRef<'tcx>> {
+ #[inline(always)]
+ fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
+ let pred: PolyTraitPredicate<'tcx> = self.to_predicate(tcx);
+ pred.to_predicate(tcx)
+ }
+}
+
+impl<'tcx> ToPredicate<'tcx, PolyTraitPredicate<'tcx>> for Binder<'tcx, TraitRef<'tcx>> {
+ #[inline(always)]
+ fn to_predicate(self, _: TyCtxt<'tcx>) -> PolyTraitPredicate<'tcx> {
+ self.map_bound(|trait_ref| TraitPredicate {
+ trait_ref,
+ constness: ty::BoundConstness::NotConst,
+ polarity: ty::ImplPolarity::Positive,
+ })
+ }
+}
+
impl<'tcx> ToPredicate<'tcx> for PolyTraitPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
- self.map_bound(PredicateKind::Trait).to_predicate(tcx)
+ self.map_bound(|p| PredicateKind::Clause(Clause::Trait(p))).to_predicate(tcx)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
- self.map_bound(PredicateKind::RegionOutlives).to_predicate(tcx)
+ self.map_bound(|p| PredicateKind::Clause(Clause::RegionOutlives(p))).to_predicate(tcx)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
- self.map_bound(PredicateKind::TypeOutlives).to_predicate(tcx)
+ self.map_bound(|p| PredicateKind::Clause(Clause::TypeOutlives(p))).to_predicate(tcx)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
- self.map_bound(PredicateKind::Projection).to_predicate(tcx)
+ self.map_bound(|p| PredicateKind::Clause(Clause::Projection(p))).to_predicate(tcx)
}
}
@@ -1170,17 +1134,18 @@ impl<'tcx> Predicate<'tcx> {
pub fn to_opt_poly_trait_pred(self) -> Option<PolyTraitPredicate<'tcx>> {
let predicate = self.kind();
match predicate.skip_binder() {
- PredicateKind::Trait(t) => Some(predicate.rebind(t)),
- PredicateKind::Projection(..)
+ PredicateKind::Clause(Clause::Trait(t)) => Some(predicate.rebind(t)),
+ PredicateKind::Clause(Clause::Projection(..))
| PredicateKind::Subtype(..)
| PredicateKind::Coerce(..)
- | PredicateKind::RegionOutlives(..)
+ | PredicateKind::Clause(Clause::RegionOutlives(..))
| PredicateKind::WellFormed(..)
| PredicateKind::ObjectSafe(..)
| PredicateKind::ClosureKind(..)
- | PredicateKind::TypeOutlives(..)
+ | PredicateKind::Clause(Clause::TypeOutlives(..))
| PredicateKind::ConstEvaluatable(..)
| PredicateKind::ConstEquate(..)
+ | PredicateKind::Ambiguous
| PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
@@ -1188,17 +1153,18 @@ impl<'tcx> Predicate<'tcx> {
pub fn to_opt_poly_projection_pred(self) -> Option<PolyProjectionPredicate<'tcx>> {
let predicate = self.kind();
match predicate.skip_binder() {
- PredicateKind::Projection(t) => Some(predicate.rebind(t)),
- PredicateKind::Trait(..)
+ PredicateKind::Clause(Clause::Projection(t)) => Some(predicate.rebind(t)),
+ PredicateKind::Clause(Clause::Trait(..))
| PredicateKind::Subtype(..)
| PredicateKind::Coerce(..)
- | PredicateKind::RegionOutlives(..)
+ | PredicateKind::Clause(Clause::RegionOutlives(..))
| PredicateKind::WellFormed(..)
| PredicateKind::ObjectSafe(..)
| PredicateKind::ClosureKind(..)
- | PredicateKind::TypeOutlives(..)
+ | PredicateKind::Clause(Clause::TypeOutlives(..))
| PredicateKind::ConstEvaluatable(..)
| PredicateKind::ConstEquate(..)
+ | PredicateKind::Ambiguous
| PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
@@ -1206,17 +1172,18 @@ impl<'tcx> Predicate<'tcx> {
pub fn to_opt_type_outlives(self) -> Option<PolyTypeOutlivesPredicate<'tcx>> {
let predicate = self.kind();
match predicate.skip_binder() {
- PredicateKind::TypeOutlives(data) => Some(predicate.rebind(data)),
- PredicateKind::Trait(..)
- | PredicateKind::Projection(..)
+ PredicateKind::Clause(Clause::TypeOutlives(data)) => Some(predicate.rebind(data)),
+ PredicateKind::Clause(Clause::Trait(..))
+ | PredicateKind::Clause(Clause::Projection(..))
| PredicateKind::Subtype(..)
| PredicateKind::Coerce(..)
- | PredicateKind::RegionOutlives(..)
+ | PredicateKind::Clause(Clause::RegionOutlives(..))
| PredicateKind::WellFormed(..)
| PredicateKind::ObjectSafe(..)
| PredicateKind::ClosureKind(..)
| PredicateKind::ConstEvaluatable(..)
| PredicateKind::ConstEquate(..)
+ | PredicateKind::Ambiguous
| PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
@@ -1257,7 +1224,7 @@ impl<'tcx> InstantiatedPredicates<'tcx> {
}
}
-#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable, Lift)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable, Lift)]
#[derive(TypeFoldable, TypeVisitable)]
pub struct OpaqueTypeKey<'tcx> {
pub def_id: LocalDefId,
@@ -1319,7 +1286,6 @@ impl<'tcx> OpaqueHiddenType<'tcx> {
tcx: TyCtxt<'tcx>,
// typeck errors have subpar spans for opaque types, so delay error reporting until borrowck.
ignore_errors: bool,
- origin: OpaqueTyOrigin,
) -> Self {
let OpaqueTypeKey { def_id, substs } = opaque_type_key;
@@ -1332,78 +1298,10 @@ impl<'tcx> OpaqueHiddenType<'tcx> {
let id_substs = InternalSubsts::identity_for_item(tcx, def_id.to_def_id());
debug!(?id_substs);
- let map = substs.iter().zip(id_substs);
-
- let map: FxHashMap<GenericArg<'tcx>, GenericArg<'tcx>> = match origin {
- // HACK: The HIR lowering for async fn does not generate
- // any `+ Captures<'x>` bounds for the `impl Future<...>`, so all async fns with lifetimes
- // would now fail to compile. We should probably just make hir lowering fill this in properly.
- OpaqueTyOrigin::AsyncFn(_) => map.collect(),
- OpaqueTyOrigin::FnReturn(_) | OpaqueTyOrigin::TyAlias => {
- // Opaque types may only use regions that are bound. So for
- // ```rust
- // type Foo<'a, 'b, 'c> = impl Trait<'a> + 'b;
- // ```
- // we may not use `'c` in the hidden type.
- struct OpaqueTypeLifetimeCollector<'tcx> {
- lifetimes: FxHashSet<ty::Region<'tcx>>,
- }
-
- impl<'tcx> ty::TypeVisitor<'tcx> for OpaqueTypeLifetimeCollector<'tcx> {
- fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow<Self::BreakTy> {
- self.lifetimes.insert(r);
- r.super_visit_with(self)
- }
- }
-
- let mut collector = OpaqueTypeLifetimeCollector { lifetimes: Default::default() };
-
- for pred in tcx.bound_explicit_item_bounds(def_id.to_def_id()).transpose_iter() {
- let pred = pred.map_bound(|(pred, _)| *pred).subst(tcx, id_substs);
-
- trace!(pred=?pred.kind());
-
- // We only ignore opaque type substs if the opaque type is the outermost type.
- // The opaque type may be nested within itself via recursion in e.g.
- // type Foo<'a> = impl PartialEq<Foo<'a>>;
- // which thus mentions `'a` and should thus accept hidden types that borrow 'a
- // instead of requiring an additional `+ 'a`.
- match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(TraitPredicate {
- trait_ref: ty::TraitRef { def_id: _, substs },
- constness: _,
- polarity: _,
- }) => {
- trace!(?substs);
- for subst in &substs[1..] {
- subst.visit_with(&mut collector);
- }
- }
- ty::PredicateKind::Projection(ty::ProjectionPredicate {
- projection_ty: ty::ProjectionTy { substs, item_def_id: _ },
- term,
- }) => {
- for subst in &substs[1..] {
- subst.visit_with(&mut collector);
- }
- term.visit_with(&mut collector);
- }
- _ => {
- pred.visit_with(&mut collector);
- }
- }
- }
- let lifetimes = collector.lifetimes;
- trace!(?lifetimes);
- map.filter(|(_, v)| {
- let ty::GenericArgKind::Lifetime(lt) = v.unpack() else {
- return true;
- };
- lifetimes.contains(&lt)
- })
- .collect()
- }
- };
+ // This zip may have several times the same lifetime in `substs` paired with a different
+ // lifetime from `id_substs`. Simply `collect`ing the iterator is the correct behaviour:
+ // it will pick the last one, which is the one we introduced in the impl-trait desugaring.
+ let map = substs.iter().zip(id_substs).collect();
debug!("map = {:#?}", map);
// Convert the type from the function into a type valid outside
@@ -1748,7 +1646,7 @@ impl<'tcx> ParamEnv<'tcx> {
}
ParamEnv::new(
- tcx.normalize_opaque_types(self.caller_bounds()),
+ tcx.reveal_opaque_types_in_bounds(self.caller_bounds()),
Reveal::All,
self.constness(),
)
@@ -1852,15 +1750,13 @@ pub struct VariantDef {
pub def_id: DefId,
/// `DefId` that identifies the variant's constructor.
/// If this variant is a struct variant, then this is `None`.
- pub ctor_def_id: Option<DefId>,
+ pub ctor: Option<(CtorKind, DefId)>,
/// Variant or struct name.
pub name: Symbol,
/// Discriminant of this variant.
pub discr: VariantDiscr,
/// Fields of this variant.
pub fields: Vec<FieldDef>,
- /// Type of constructor of variant.
- pub ctor_kind: CtorKind,
/// Flags of the variant (e.g. is field list non-exhaustive)?
flags: VariantFlags,
}
@@ -1885,19 +1781,18 @@ impl VariantDef {
pub fn new(
name: Symbol,
variant_did: Option<DefId>,
- ctor_def_id: Option<DefId>,
+ ctor: Option<(CtorKind, DefId)>,
discr: VariantDiscr,
fields: Vec<FieldDef>,
- ctor_kind: CtorKind,
adt_kind: AdtKind,
parent_did: DefId,
recovered: bool,
is_field_list_non_exhaustive: bool,
) -> Self {
debug!(
- "VariantDef::new(name = {:?}, variant_did = {:?}, ctor_def_id = {:?}, discr = {:?},
- fields = {:?}, ctor_kind = {:?}, adt_kind = {:?}, parent_did = {:?})",
- name, variant_did, ctor_def_id, discr, fields, ctor_kind, adt_kind, parent_did,
+ "VariantDef::new(name = {:?}, variant_did = {:?}, ctor = {:?}, discr = {:?},
+ fields = {:?}, adt_kind = {:?}, parent_did = {:?})",
+ name, variant_did, ctor, discr, fields, adt_kind, parent_did,
);
let mut flags = VariantFlags::NO_VARIANT_FLAGS;
@@ -1909,15 +1804,7 @@ impl VariantDef {
flags |= VariantFlags::IS_RECOVERED;
}
- VariantDef {
- def_id: variant_did.unwrap_or(parent_did),
- ctor_def_id,
- name,
- discr,
- fields,
- ctor_kind,
- flags,
- }
+ VariantDef { def_id: variant_did.unwrap_or(parent_did), ctor, name, discr, fields, flags }
}
/// Is this field list non-exhaustive?
@@ -1936,6 +1823,16 @@ impl VariantDef {
pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident {
Ident::new(self.name, tcx.def_ident_span(self.def_id).unwrap())
}
+
+ #[inline]
+ pub fn ctor_kind(&self) -> Option<CtorKind> {
+ self.ctor.map(|(kind, _)| kind)
+ }
+
+ #[inline]
+ pub fn ctor_def_id(&self) -> Option<DefId> {
+ self.ctor.map(|(_, def_id)| def_id)
+ }
}
impl PartialEq for VariantDef {
@@ -1948,26 +1845,8 @@ impl PartialEq for VariantDef {
// definition of `VariantDef` changes, a compile-error will be produced,
// reminding us to revisit this assumption.
- let Self {
- def_id: lhs_def_id,
- ctor_def_id: _,
- name: _,
- discr: _,
- fields: _,
- ctor_kind: _,
- flags: _,
- } = &self;
-
- let Self {
- def_id: rhs_def_id,
- ctor_def_id: _,
- name: _,
- discr: _,
- fields: _,
- ctor_kind: _,
- flags: _,
- } = other;
-
+ let Self { def_id: lhs_def_id, ctor: _, name: _, discr: _, fields: _, flags: _ } = &self;
+ let Self { def_id: rhs_def_id, ctor: _, name: _, discr: _, fields: _, flags: _ } = other;
lhs_def_id == rhs_def_id
}
}
@@ -1984,9 +1863,7 @@ impl Hash for VariantDef {
// of `VariantDef` changes, a compile-error will be produced, reminding
// us to revisit this assumption.
- let Self { def_id, ctor_def_id: _, name: _, discr: _, fields: _, ctor_kind: _, flags: _ } =
- &self;
-
+ let Self { def_id, ctor: _, name: _, discr: _, fields: _, flags: _ } = &self;
def_id.hash(s)
}
}
@@ -2047,163 +1924,6 @@ impl Hash for FieldDef {
}
}
-bitflags! {
- #[derive(TyEncodable, TyDecodable, Default, HashStable)]
- pub struct ReprFlags: u8 {
- const IS_C = 1 << 0;
- const IS_SIMD = 1 << 1;
- const IS_TRANSPARENT = 1 << 2;
- // Internal only for now. If true, don't reorder fields.
- const IS_LINEAR = 1 << 3;
- // If true, the type's layout can be randomized using
- // the seed stored in `ReprOptions.layout_seed`
- const RANDOMIZE_LAYOUT = 1 << 4;
- // Any of these flags being set prevent field reordering optimisation.
- const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits
- | ReprFlags::IS_SIMD.bits
- | ReprFlags::IS_LINEAR.bits;
- }
-}
-
-/// Represents the repr options provided by the user,
-#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Default, HashStable)]
-pub struct ReprOptions {
- pub int: Option<attr::IntType>,
- pub align: Option<Align>,
- pub pack: Option<Align>,
- pub flags: ReprFlags,
- /// The seed to be used for randomizing a type's layout
- ///
- /// Note: This could technically be a `[u8; 16]` (a `u128`) which would
- /// be the "most accurate" hash as it'd encompass the item and crate
- /// hash without loss, but it does pay the price of being larger.
- /// Everything's a tradeoff, a `u64` seed should be sufficient for our
- /// purposes (primarily `-Z randomize-layout`)
- pub field_shuffle_seed: u64,
-}
-
-impl ReprOptions {
- pub fn new(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions {
- let mut flags = ReprFlags::empty();
- let mut size = None;
- let mut max_align: Option<Align> = None;
- let mut min_pack: Option<Align> = None;
-
- // Generate a deterministically-derived seed from the item's path hash
- // to allow for cross-crate compilation to actually work
- let mut field_shuffle_seed = tcx.def_path_hash(did).0.to_smaller_hash();
-
- // If the user defined a custom seed for layout randomization, xor the item's
- // path hash with the user defined seed, this will allowing determinism while
- // still allowing users to further randomize layout generation for e.g. fuzzing
- if let Some(user_seed) = tcx.sess.opts.unstable_opts.layout_seed {
- field_shuffle_seed ^= user_seed;
- }
-
- for attr in tcx.get_attrs(did, sym::repr) {
- for r in attr::parse_repr_attr(&tcx.sess, attr) {
- flags.insert(match r {
- attr::ReprC => ReprFlags::IS_C,
- attr::ReprPacked(pack) => {
- let pack = Align::from_bytes(pack as u64).unwrap();
- min_pack = Some(if let Some(min_pack) = min_pack {
- min_pack.min(pack)
- } else {
- pack
- });
- ReprFlags::empty()
- }
- attr::ReprTransparent => ReprFlags::IS_TRANSPARENT,
- attr::ReprSimd => ReprFlags::IS_SIMD,
- attr::ReprInt(i) => {
- size = Some(i);
- ReprFlags::empty()
- }
- attr::ReprAlign(align) => {
- max_align = max_align.max(Some(Align::from_bytes(align as u64).unwrap()));
- ReprFlags::empty()
- }
- });
- }
- }
-
- // If `-Z randomize-layout` was enabled for the type definition then we can
- // consider performing layout randomization
- if tcx.sess.opts.unstable_opts.randomize_layout {
- flags.insert(ReprFlags::RANDOMIZE_LAYOUT);
- }
-
- // This is here instead of layout because the choice must make it into metadata.
- if !tcx.consider_optimizing(|| format!("Reorder fields of {:?}", tcx.def_path_str(did))) {
- flags.insert(ReprFlags::IS_LINEAR);
- }
-
- Self { int: size, align: max_align, pack: min_pack, flags, field_shuffle_seed }
- }
-
- #[inline]
- pub fn simd(&self) -> bool {
- self.flags.contains(ReprFlags::IS_SIMD)
- }
-
- #[inline]
- pub fn c(&self) -> bool {
- self.flags.contains(ReprFlags::IS_C)
- }
-
- #[inline]
- pub fn packed(&self) -> bool {
- self.pack.is_some()
- }
-
- #[inline]
- pub fn transparent(&self) -> bool {
- self.flags.contains(ReprFlags::IS_TRANSPARENT)
- }
-
- #[inline]
- pub fn linear(&self) -> bool {
- self.flags.contains(ReprFlags::IS_LINEAR)
- }
-
- /// Returns the discriminant type, given these `repr` options.
- /// This must only be called on enums!
- pub fn discr_type(&self) -> attr::IntType {
- self.int.unwrap_or(attr::SignedInt(ast::IntTy::Isize))
- }
-
- /// Returns `true` if this `#[repr()]` should inhabit "smart enum
- /// layout" optimizations, such as representing `Foo<&T>` as a
- /// single pointer.
- pub fn inhibit_enum_layout_opt(&self) -> bool {
- self.c() || self.int.is_some()
- }
-
- /// Returns `true` if this `#[repr()]` should inhibit struct field reordering
- /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
- pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
- if let Some(pack) = self.pack {
- if pack.bytes() == 1 {
- return true;
- }
- }
-
- self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
- }
-
- /// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
- /// was enabled for its declaration crate
- pub fn can_randomize_type_layout(&self) -> bool {
- !self.inhibit_struct_field_reordering_opt()
- && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
- }
-
- /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
- pub fn inhibit_union_abi_opt(&self) -> bool {
- self.c()
- }
-}
-
impl<'tcx> FieldDef {
/// Returns the type of this field. The resulting type is not normalized. The `subst` is
/// typically obtained via the second field of [`TyKind::Adt`].
@@ -2271,6 +1991,81 @@ impl<'tcx> TyCtxt<'tcx> {
.filter(move |item| item.kind == AssocKind::Fn && item.defaultness(self).has_value())
}
+ pub fn repr_options_of_def(self, did: DefId) -> ReprOptions {
+ let mut flags = ReprFlags::empty();
+ let mut size = None;
+ let mut max_align: Option<Align> = None;
+ let mut min_pack: Option<Align> = None;
+
+ // Generate a deterministically-derived seed from the item's path hash
+ // to allow for cross-crate compilation to actually work
+ let mut field_shuffle_seed = self.def_path_hash(did).0.to_smaller_hash();
+
+ // If the user defined a custom seed for layout randomization, xor the item's
+ // path hash with the user defined seed, this will allowing determinism while
+ // still allowing users to further randomize layout generation for e.g. fuzzing
+ if let Some(user_seed) = self.sess.opts.unstable_opts.layout_seed {
+ field_shuffle_seed ^= user_seed;
+ }
+
+ for attr in self.get_attrs(did, sym::repr) {
+ for r in attr::parse_repr_attr(&self.sess, attr) {
+ flags.insert(match r {
+ attr::ReprC => ReprFlags::IS_C,
+ attr::ReprPacked(pack) => {
+ let pack = Align::from_bytes(pack as u64).unwrap();
+ min_pack = Some(if let Some(min_pack) = min_pack {
+ min_pack.min(pack)
+ } else {
+ pack
+ });
+ ReprFlags::empty()
+ }
+ attr::ReprTransparent => ReprFlags::IS_TRANSPARENT,
+ attr::ReprSimd => ReprFlags::IS_SIMD,
+ attr::ReprInt(i) => {
+ size = Some(match i {
+ attr::IntType::SignedInt(x) => match x {
+ ast::IntTy::Isize => IntegerType::Pointer(true),
+ ast::IntTy::I8 => IntegerType::Fixed(Integer::I8, true),
+ ast::IntTy::I16 => IntegerType::Fixed(Integer::I16, true),
+ ast::IntTy::I32 => IntegerType::Fixed(Integer::I32, true),
+ ast::IntTy::I64 => IntegerType::Fixed(Integer::I64, true),
+ ast::IntTy::I128 => IntegerType::Fixed(Integer::I128, true),
+ },
+ attr::IntType::UnsignedInt(x) => match x {
+ ast::UintTy::Usize => IntegerType::Pointer(false),
+ ast::UintTy::U8 => IntegerType::Fixed(Integer::I8, false),
+ ast::UintTy::U16 => IntegerType::Fixed(Integer::I16, false),
+ ast::UintTy::U32 => IntegerType::Fixed(Integer::I32, false),
+ ast::UintTy::U64 => IntegerType::Fixed(Integer::I64, false),
+ ast::UintTy::U128 => IntegerType::Fixed(Integer::I128, false),
+ },
+ });
+ ReprFlags::empty()
+ }
+ attr::ReprAlign(align) => {
+ max_align = max_align.max(Some(Align::from_bytes(align as u64).unwrap()));
+ ReprFlags::empty()
+ }
+ });
+ }
+ }
+
+ // If `-Z randomize-layout` was enabled for the type definition then we can
+ // consider performing layout randomization
+ if self.sess.opts.unstable_opts.randomize_layout {
+ flags.insert(ReprFlags::RANDOMIZE_LAYOUT);
+ }
+
+ // This is here instead of layout because the choice must make it into metadata.
+ if !self.consider_optimizing(|| format!("Reorder fields of {:?}", self.def_path_str(did))) {
+ flags.insert(ReprFlags::IS_LINEAR);
+ }
+
+ ReprOptions { int: size, align: max_align, pack: min_pack, flags, field_shuffle_seed }
+ }
+
/// Look up the name of a definition across crates. This does not look at HIR.
pub fn opt_item_name(self, def_id: DefId) -> Option<Symbol> {
if let Some(cnum) = def_id.as_crate_root() {
@@ -2322,10 +2117,6 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
- pub fn field_index(self, hir_id: hir::HirId, typeck_results: &TypeckResults<'_>) -> usize {
- typeck_results.field_indices().get(hir_id).cloned().expect("no index for a field")
- }
-
pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option<usize> {
variant
.fields
@@ -2506,6 +2297,10 @@ impl<'tcx> TyCtxt<'tcx> {
self.trait_def(trait_def_id).has_auto_impl
}
+ pub fn trait_is_coinductive(self, trait_def_id: DefId) -> bool {
+ self.trait_is_auto(trait_def_id) || self.lang_items().sized_trait() == Some(trait_def_id)
+ }
+
/// Returns layout of a generator. Layout might be unavailable if the
/// generator is tainted by errors.
pub fn generator_layout(self, def_id: DefId) -> Option<&'tcx GeneratorLayout<'tcx>> {
@@ -2550,11 +2345,11 @@ impl<'tcx> TyCtxt<'tcx> {
/// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err`
/// with the name of the crate containing the impl.
- pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {
- if let Some(impl_did) = impl_did.as_local() {
- Ok(self.def_span(impl_did))
+ pub fn span_of_impl(self, impl_def_id: DefId) -> Result<Span, Symbol> {
+ if let Some(impl_def_id) = impl_def_id.as_local() {
+ Ok(self.def_span(impl_def_id))
} else {
- Err(self.crate_name(impl_did.krate))
+ Err(self.crate_name(impl_def_id.krate))
}
}
@@ -2782,8 +2577,7 @@ mod size_asserts {
use super::*;
use rustc_data_structures::static_assert_size;
// tidy-alphabetical-start
- static_assert_size!(PredicateS<'_>, 48);
- static_assert_size!(TyS<'_>, 40);
- static_assert_size!(WithStableHash<TyS<'_>>, 56);
+ static_assert_size!(PredicateKind<'_>, 32);
+ static_assert_size!(WithCachedTypeInfo<TyKind<'_>>, 56);
// tidy-alphabetical-end
}
diff --git a/compiler/rustc_middle/src/ty/opaque_types.rs b/compiler/rustc_middle/src/ty/opaque_types.rs
index b05c63109..98cd92007 100644
--- a/compiler/rustc_middle/src/ty/opaque_types.rs
+++ b/compiler/rustc_middle/src/ty/opaque_types.rs
@@ -1,7 +1,8 @@
+use crate::error::ConstNotUsedTraitAlias;
+use crate::ty::fold::{TypeFolder, TypeSuperFoldable};
+use crate::ty::subst::{GenericArg, GenericArgKind};
+use crate::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc_data_structures::fx::FxHashMap;
-use rustc_middle::ty::fold::{TypeFolder, TypeSuperFoldable};
-use rustc_middle::ty::subst::{GenericArg, GenericArgKind};
-use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc_span::Span;
/// Converts generic params of a TypeFoldable from one
@@ -201,7 +202,7 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> {
Some(u) => panic!("const mapped to unexpected kind: {:?}", u),
None => {
if !self.ignore_errors {
- self.tcx.sess.emit_err(ty::ConstNotUsedTraitAlias {
+ self.tcx.sess.emit_err(ConstNotUsedTraitAlias {
ct: ct.to_string(),
span: self.span,
});
diff --git a/compiler/rustc_middle/src/ty/parameterized.rs b/compiler/rustc_middle/src/ty/parameterized.rs
index e1e705a92..9e69544d2 100644
--- a/compiler/rustc_middle/src/ty/parameterized.rs
+++ b/compiler/rustc_middle/src/ty/parameterized.rs
@@ -4,9 +4,8 @@ use rustc_index::vec::{Idx, IndexVec};
use crate::middle::exported_symbols::ExportedSymbol;
use crate::mir::Body;
-use crate::ty::abstract_const::Node;
use crate::ty::{
- self, Const, FnSig, GeneratorDiagnosticData, GenericPredicates, Predicate, TraitRef, Ty,
+ self, Clause, Const, FnSig, GeneratorDiagnosticData, GenericPredicates, Predicate, TraitRef, Ty,
};
pub trait ParameterizedOverTcx: 'static {
@@ -54,6 +53,7 @@ trivially_parameterized_over_tcx! {
usize,
(),
u32,
+ bool,
std::string::String,
crate::metadata::ModChild,
crate::middle::codegen_fn_attrs::CodegenFnAttrs,
@@ -70,7 +70,7 @@ trivially_parameterized_over_tcx! {
ty::adjustment::CoerceUnsizedInfo,
ty::fast_reject::SimplifiedTypeGen<DefId>,
rustc_ast::Attribute,
- rustc_ast::MacArgs,
+ rustc_ast::DelimArgs,
rustc_attr::ConstStability,
rustc_attr::DefaultBodyStability,
rustc_attr::Deprecation,
@@ -122,8 +122,8 @@ parameterized_over_tcx! {
TraitRef,
Const,
Predicate,
+ Clause,
GeneratorDiagnosticData,
Body,
- Node,
ExportedSymbol,
}
diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs
index 44b9548db..667298b9b 100644
--- a/compiler/rustc_middle/src/ty/print/mod.rs
+++ b/compiler/rustc_middle/src/ty/print/mod.rs
@@ -63,7 +63,7 @@ pub trait Printer<'tcx>: Sized {
fn print_dyn_existential(
self,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error>;
fn print_const(self, ct: ty::Const<'tcx>) -> Result<Self::Const, Self::Error>;
@@ -308,9 +308,7 @@ impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for Ty<'tcx> {
}
}
-impl<'tcx, P: Printer<'tcx>> Print<'tcx, P>
- for &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>
-{
+impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>> {
type Output = P::DynExistential;
type Error = P::Error;
fn print(&self, cx: P) -> Result<Self::Output, Self::Error> {
diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs
index ef9aa236b..5303341ba 100644
--- a/compiler/rustc_middle/src/ty/print/pretty.rs
+++ b/compiler/rustc_middle/src/ty/print/pretty.rs
@@ -11,8 +11,10 @@ use rustc_hir as hir;
use rustc_hir::def::{self, CtorKind, DefKind, Namespace};
use rustc_hir::def_id::{DefId, DefIdSet, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::definitions::{DefPathData, DefPathDataName, DisambiguatedDefPathData};
+use rustc_hir::LangItem;
use rustc_session::config::TrimmedDefPaths;
use rustc_session::cstore::{ExternCrate, ExternCrateSource};
+use rustc_session::Limit;
use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
@@ -63,7 +65,6 @@ thread_local! {
static NO_TRIMMED_PATH: Cell<bool> = const { Cell::new(false) };
static NO_QUERIES: Cell<bool> = const { Cell::new(false) };
static NO_VISIBLE_PATH: Cell<bool> = const { Cell::new(false) };
- static NO_VERBOSE_CONSTANTS: Cell<bool> = const { Cell::new(false) };
}
macro_rules! define_helper {
@@ -118,9 +119,6 @@ define_helper!(
/// Prevent selection of visible paths. `Display` impl of DefId will prefer
/// visible (public) reexports of types as paths.
fn with_no_visible_paths(NoVisibleGuard, NO_VISIBLE_PATH);
- /// Prevent verbose printing of constants. Verbose printing of constants is
- /// never desirable in some contexts like `std::any::type_name`.
- fn with_no_verbose_constants(NoVerboseConstantsGuard, NO_VERBOSE_CONSTANTS);
);
/// The "region highlights" are used to control region printing during
@@ -600,7 +598,7 @@ pub trait PrettyPrinter<'tcx>:
}
ty::FnPtr(ref bare_fn) => p!(print(bare_fn)),
ty::Infer(infer_ty) => {
- let verbose = self.tcx().sess.verbose();
+ let verbose = self.should_print_verbose();
if let ty::TyVar(ty_vid) = infer_ty {
if let Some(name) = self.ty_infer_name(ty_vid) {
p!(write("{}", name))
@@ -642,7 +640,7 @@ pub trait PrettyPrinter<'tcx>:
p!(print_def_path(def_id, &[]));
}
ty::Projection(ref data) => {
- if !(self.tcx().sess.verbose() || NO_QUERIES.with(|q| q.get()))
+ if !(self.should_print_verbose() || NO_QUERIES.with(|q| q.get()))
&& self.tcx().def_kind(data.item_def_id) == DefKind::ImplTraitPlaceholder
{
return self.pretty_print_opaque_impl_type(data.item_def_id, data.substs);
@@ -658,7 +656,7 @@ pub trait PrettyPrinter<'tcx>:
// only affect certain debug messages (e.g. messages printed
// from `rustc_middle::ty` during the computation of `tcx.predicates_of`),
// and should have no effect on any compiler output.
- if self.tcx().sess.verbose() || NO_QUERIES.with(|q| q.get()) {
+ if self.should_print_verbose() || NO_QUERIES.with(|q| q.get()) {
p!(write("Opaque({:?}, {:?})", def_id, substs));
return Ok(self);
}
@@ -684,13 +682,19 @@ pub trait PrettyPrinter<'tcx>:
ty::Str => p!("str"),
ty::Generator(did, substs, movability) => {
p!(write("["));
- match movability {
- hir::Movability::Movable => {}
- hir::Movability::Static => p!("static "),
+ let generator_kind = self.tcx().generator_kind(did).unwrap();
+ let should_print_movability =
+ self.should_print_verbose() || generator_kind == hir::GeneratorKind::Gen;
+
+ if should_print_movability {
+ match movability {
+ hir::Movability::Movable => {}
+ hir::Movability::Static => p!("static "),
+ }
}
- if !self.tcx().sess.verbose() {
- p!("generator");
+ if !self.should_print_verbose() {
+ p!(write("{}", generator_kind));
// FIXME(eddyb) should use `def_span`.
if let Some(did) = did.as_local() {
let span = self.tcx().def_span(did);
@@ -725,7 +729,7 @@ pub trait PrettyPrinter<'tcx>:
}
ty::Closure(did, substs) => {
p!(write("["));
- if !self.tcx().sess.verbose() {
+ if !self.should_print_verbose() {
p!(write("closure"));
// FIXME(eddyb) should use `def_span`.
if let Some(did) = did.as_local() {
@@ -763,7 +767,7 @@ pub trait PrettyPrinter<'tcx>:
}
ty::Array(ty, sz) => {
p!("[", print(ty), "; ");
- if !NO_VERBOSE_CONSTANTS.with(|flag| flag.get()) && self.tcx().sess.verbose() {
+ if self.should_print_verbose() {
p!(write("{:?}", sz));
} else if let ty::ConstKind::Unevaluated(..) = sz.kind() {
// Do not try to evaluate unevaluated constants. If we are const evaluating an
@@ -805,7 +809,7 @@ pub trait PrettyPrinter<'tcx>:
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => {
let trait_ref = bound_predicate.rebind(pred.trait_ref);
// Don't print + Sized, but rather + ?Sized if absent.
@@ -816,7 +820,7 @@ pub trait PrettyPrinter<'tcx>:
self.insert_trait_and_projection(trait_ref, None, &mut traits, &mut fn_traits);
}
- ty::PredicateKind::Projection(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => {
let proj_ref = bound_predicate.rebind(pred);
let trait_ref = proj_ref.required_poly_trait_ref(tcx);
@@ -830,7 +834,7 @@ pub trait PrettyPrinter<'tcx>:
&mut fn_traits,
);
}
- ty::PredicateKind::TypeOutlives(outlives) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(outlives)) => {
lifetimes.push(outlives.1);
}
_ => {}
@@ -892,7 +896,7 @@ pub trait PrettyPrinter<'tcx>:
// Group the return ty with its def id, if we had one.
entry
.return_ty
- .map(|ty| (tcx.lang_items().fn_once_output().unwrap(), ty)),
+ .map(|ty| (tcx.require_lang_item(LangItem::FnOnce, None), ty)),
);
}
if let Some(trait_ref) = entry.fn_mut_trait_ref {
@@ -1063,7 +1067,7 @@ pub trait PrettyPrinter<'tcx>:
fn pretty_print_dyn_existential(
mut self,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
// Generate the main trait ref, including associated types.
let mut first = true;
@@ -1076,8 +1080,8 @@ pub trait PrettyPrinter<'tcx>:
let mut resugared = false;
// Special-case `Fn(...) -> ...` and re-sugar it.
- let fn_trait_kind = cx.tcx().fn_trait_kind_from_lang_item(principal.def_id);
- if !cx.tcx().sess.verbose() && fn_trait_kind.is_some() {
+ let fn_trait_kind = cx.tcx().fn_trait_kind_from_def_id(principal.def_id);
+ if !cx.should_print_verbose() && fn_trait_kind.is_some() {
if let ty::Tuple(tys) = principal.substs.type_at(0).kind() {
let mut projections = predicates.projection_bounds();
if let (Some(proj), None) = (projections.next(), projections.next()) {
@@ -1141,7 +1145,7 @@ pub trait PrettyPrinter<'tcx>:
//
// To avoid causing instabilities in compiletest
// output, sort the auto-traits alphabetically.
- auto_traits.sort_by_cached_key(|did| self.tcx().def_path_str(*did));
+ auto_traits.sort_by_cached_key(|did| with_no_trimmed_paths!(self.tcx().def_path_str(*did)));
for def_id in auto_traits {
if !first {
@@ -1185,7 +1189,7 @@ pub trait PrettyPrinter<'tcx>:
) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
- if !NO_VERBOSE_CONSTANTS.with(|flag| flag.get()) && self.tcx().sess.verbose() {
+ if self.should_print_verbose() {
p!(write("Const({:?}: {:?})", ct.kind(), ct.ty()));
return Ok(self);
}
@@ -1244,6 +1248,9 @@ pub trait PrettyPrinter<'tcx>:
self.pretty_print_bound_var(debruijn, bound_var)?
}
ty::ConstKind::Placeholder(placeholder) => p!(write("Placeholder({:?})", placeholder)),
+ // FIXME(generic_const_exprs):
+ // write out some legible representation of an abstract const?
+ ty::ConstKind::Expr(_) => p!("[Const Expr]"),
ty::ConstKind::Error(_) => p!("[const error]"),
};
Ok(self)
@@ -1420,7 +1427,7 @@ pub trait PrettyPrinter<'tcx>:
) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
- if !NO_VERBOSE_CONSTANTS.with(|flag| flag.get()) && self.tcx().sess.verbose() {
+ if self.should_print_verbose() {
p!(write("ValTree({:?}: ", valtree), print(ty), ")");
return Ok(self);
}
@@ -1461,8 +1468,7 @@ pub trait PrettyPrinter<'tcx>:
}
// Aggregates, printed as array/tuple/struct/variant construction syntax.
(ty::ValTree::Branch(_), ty::Array(..) | ty::Tuple(..) | ty::Adt(..)) => {
- let contents =
- self.tcx().destructure_const(ty::Const::from_value(self.tcx(), valtree, ty));
+ let contents = self.tcx().destructure_const(self.tcx().mk_const(valtree, ty));
let fields = contents.fields.iter().copied();
match *ty.kind() {
ty::Array(..) => {
@@ -1490,12 +1496,12 @@ pub trait PrettyPrinter<'tcx>:
contents.variant.expect("destructed const of adt without variant idx");
let variant_def = &def.variant(variant_idx);
p!(print_value_path(variant_def.def_id, substs));
- match variant_def.ctor_kind {
- CtorKind::Const => {}
- CtorKind::Fn => {
+ match variant_def.ctor_kind() {
+ Some(CtorKind::Const) => {}
+ Some(CtorKind::Fn) => {
p!("(", comma_sep(fields), ")");
}
- CtorKind::Fictive => {
+ None => {
p!(" {{ ");
let mut first = true;
for (field_def, field) in iter::zip(&variant_def.fields, fields) {
@@ -1564,6 +1570,10 @@ pub trait PrettyPrinter<'tcx>:
Ok(cx)
})
}
+
+ fn should_print_verbose(&self) -> bool {
+ self.tcx().sess.verbose()
+ }
}
// HACK(eddyb) boxed to avoid moving around a large struct by-value.
@@ -1583,6 +1593,8 @@ pub struct FmtPrinterData<'a, 'tcx> {
region_index: usize,
binder_depth: usize,
printed_type_count: usize,
+ type_length_limit: Limit,
+ truncated: bool,
pub region_highlight_mode: RegionHighlightMode<'tcx>,
@@ -1605,6 +1617,10 @@ impl DerefMut for FmtPrinter<'_, '_> {
impl<'a, 'tcx> FmtPrinter<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, ns: Namespace) -> Self {
+ Self::new_with_limit(tcx, ns, tcx.type_length_limit())
+ }
+
+ pub fn new_with_limit(tcx: TyCtxt<'tcx>, ns: Namespace, type_length_limit: Limit) -> Self {
FmtPrinter(Box::new(FmtPrinterData {
tcx,
// Estimated reasonable capacity to allocate upfront based on a few
@@ -1617,6 +1633,8 @@ impl<'a, 'tcx> FmtPrinter<'a, 'tcx> {
region_index: 0,
binder_depth: 0,
printed_type_count: 0,
+ type_length_limit,
+ truncated: false,
region_highlight_mode: RegionHighlightMode::new(tcx),
ty_infer_name_resolver: None,
const_infer_name_resolver: None,
@@ -1659,6 +1677,12 @@ impl<'t> TyCtxt<'t> {
debug!("def_path_str: def_id={:?}, ns={:?}", def_id, ns);
FmtPrinter::new(self, ns).print_def_path(def_id, substs).unwrap().into_buffer()
}
+
+ pub fn value_path_str_with_substs(self, def_id: DefId, substs: &'t [GenericArg<'t>]) -> String {
+ let ns = guess_def_namespace(self, def_id);
+ debug!("value_path_str: def_id={:?}, ns={:?}", def_id, ns);
+ FmtPrinter::new(self, ns).print_value_path(def_id, substs).unwrap().into_buffer()
+ }
}
impl fmt::Write for FmtPrinter<'_, '_> {
@@ -1745,11 +1769,11 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> {
}
fn print_type(mut self, ty: Ty<'tcx>) -> Result<Self::Type, Self::Error> {
- let type_length_limit = self.tcx.type_length_limit();
- if type_length_limit.value_within_limit(self.printed_type_count) {
+ if self.type_length_limit.value_within_limit(self.printed_type_count) {
self.printed_type_count += 1;
self.pretty_print_type(ty)
} else {
+ self.truncated = true;
write!(self, "...")?;
Ok(self)
}
@@ -1757,7 +1781,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> {
fn print_dyn_existential(
self,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
self.pretty_print_dyn_existential(predicates)
}
@@ -1839,7 +1863,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> {
}
}
- let verbose = self.tcx.sess.verbose();
+ let verbose = self.should_print_verbose();
disambiguated_data.fmt_maybe_verbose(&mut self, verbose)?;
self.empty_path = false;
@@ -1940,24 +1964,20 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> {
return true;
}
- if self.tcx.sess.verbose() {
+ if self.should_print_verbose() {
return true;
}
let identify_regions = self.tcx.sess.opts.unstable_opts.identify_regions;
match *region {
- ty::ReEarlyBound(ref data) => {
- data.name != kw::Empty && data.name != kw::UnderscoreLifetime
- }
+ ty::ReEarlyBound(ref data) => data.has_name(),
ty::ReLateBound(_, ty::BoundRegion { kind: br, .. })
| ty::ReFree(ty::FreeRegion { bound_region: br, .. })
| ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
- if let ty::BrNamed(_, name) = br {
- if name != kw::Empty && name != kw::UnderscoreLifetime {
- return true;
- }
+ if br.is_named() {
+ return true;
}
if let Some((region, _)) = highlight.highlight_bound_region {
@@ -2012,7 +2032,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
return Ok(self);
}
- if self.tcx.sess.verbose() {
+ if self.should_print_verbose() {
p!(write("{:?}", region));
return Ok(self);
}
@@ -2033,11 +2053,9 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
ty::ReLateBound(_, ty::BoundRegion { kind: br, .. })
| ty::ReFree(ty::FreeRegion { bound_region: br, .. })
| ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
- if let ty::BrNamed(_, name) = br {
- if name != kw::Empty && name != kw::UnderscoreLifetime {
- p!(write("{}", name));
- return Ok(self);
- }
+ if let ty::BrNamed(_, name) = br && br.is_named() {
+ p!(write("{}", name));
+ return Ok(self);
}
if let Some((region, counter)) = highlight.highlight_bound_region {
@@ -2115,7 +2133,7 @@ impl<'a, 'tcx> ty::TypeFolder<'tcx> for RegionFolder<'a, 'tcx> {
// If this is an anonymous placeholder, don't rename. Otherwise, in some
// async fns, we get a `for<'r> Send` bound
match kind {
- ty::BrAnon(_) | ty::BrEnv => r,
+ ty::BrAnon(..) | ty::BrEnv => r,
_ => {
// Index doesn't matter, since this is just for naming and these never get bound
let br = ty::BoundRegion { var: ty::BoundVar::from_u32(0), kind };
@@ -2188,11 +2206,9 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
define_scoped_cx!(self);
- let possible_names =
- ('a'..='z').rev().map(|s| Symbol::intern(&format!("'{s}"))).collect::<Vec<_>>();
+ let possible_names = ('a'..='z').rev().map(|s| Symbol::intern(&format!("'{s}")));
let mut available_names = possible_names
- .into_iter()
.filter(|name| !self.used_region_names.contains(&name))
.collect::<Vec<_>>();
debug!(?available_names);
@@ -2218,47 +2234,13 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
// aren't named. Eventually, we might just want this as the default, but
// this is not *quite* right and changes the ordering of some output
// anyways.
- let (new_value, map) = if self.tcx().sess.verbose() {
- let regions: Vec<_> = value
- .bound_vars()
- .into_iter()
- .map(|var| {
- let ty::BoundVariableKind::Region(var) = var else {
- // This doesn't really matter because it doesn't get used,
- // it's just an empty value
- return ty::BrAnon(0);
- };
- match var {
- ty::BrAnon(_) | ty::BrEnv => {
- start_or_continue(&mut self, "for<", ", ");
- let name = next_name(&self);
- debug!(?name);
- do_continue(&mut self, name);
- ty::BrNamed(CRATE_DEF_ID.to_def_id(), name)
- }
- ty::BrNamed(def_id, kw::UnderscoreLifetime) => {
- start_or_continue(&mut self, "for<", ", ");
- let name = next_name(&self);
- do_continue(&mut self, name);
- ty::BrNamed(def_id, name)
- }
- ty::BrNamed(def_id, name) => {
- start_or_continue(&mut self, "for<", ", ");
- do_continue(&mut self, name);
- ty::BrNamed(def_id, name)
- }
- }
- })
- .collect();
+ let (new_value, map) = if self.should_print_verbose() {
+ for var in value.bound_vars().iter() {
+ start_or_continue(&mut self, "for<", ", ");
+ write!(self, "{:?}", var)?;
+ }
start_or_continue(&mut self, "", "> ");
-
- self.tcx.replace_late_bound_regions(value.clone(), |br| {
- let kind = regions[br.var.as_usize()];
- self.tcx.mk_region(ty::ReLateBound(
- ty::INNERMOST,
- ty::BoundRegion { var: br.var, kind },
- ))
- })
+ (value.clone().skip_binder(), BTreeMap::default())
} else {
let tcx = self.tcx;
@@ -2271,7 +2253,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
binder_level_idx: ty::DebruijnIndex,
br: ty::BoundRegion| {
let (name, kind) = match br.kind {
- ty::BrAnon(_) | ty::BrEnv => {
+ ty::BrAnon(..) | ty::BrEnv => {
let name = next_name(&self);
if let Some(lt_idx) = lifetime_idx {
@@ -2286,7 +2268,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
(name, ty::BrNamed(CRATE_DEF_ID.to_def_id(), name))
}
- ty::BrNamed(def_id, kw::UnderscoreLifetime) => {
+ ty::BrNamed(def_id, kw::UnderscoreLifetime | kw::Empty) => {
let name = next_name(&self);
if let Some(lt_idx) = lifetime_idx {
@@ -2551,12 +2533,12 @@ pub struct PrintClosureAsImpl<'tcx> {
forward_display_to_print! {
ty::Region<'tcx>,
Ty<'tcx>,
- &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
ty::Const<'tcx>,
// HACK(eddyb) these are exhaustive instead of generic,
// because `for<'tcx>` isn't possible yet.
- ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>,
+ ty::PolyExistentialPredicate<'tcx>,
ty::Binder<'tcx, ty::TraitRef<'tcx>>,
ty::Binder<'tcx, ty::ExistentialTraitRef<'tcx>>,
ty::Binder<'tcx, TraitRefPrintOnlyTraitPath<'tcx>>,
@@ -2698,14 +2680,14 @@ define_print_and_forward_display! {
ty::PredicateKind<'tcx> {
match *self {
- ty::PredicateKind::Trait(ref data) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(ref data)) => {
p!(print(data))
}
ty::PredicateKind::Subtype(predicate) => p!(print(predicate)),
ty::PredicateKind::Coerce(predicate) => p!(print(predicate)),
- ty::PredicateKind::RegionOutlives(predicate) => p!(print(predicate)),
- ty::PredicateKind::TypeOutlives(predicate) => p!(print(predicate)),
- ty::PredicateKind::Projection(predicate) => p!(print(predicate)),
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(predicate)) => p!(print(predicate)),
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(predicate)) => p!(print(predicate)),
+ ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => p!(print(predicate)),
ty::PredicateKind::WellFormed(arg) => p!(print(arg), " well-formed"),
ty::PredicateKind::ObjectSafe(trait_def_id) => {
p!("the trait `", print_def_path(trait_def_id, &[]), "` is object-safe")
@@ -2724,6 +2706,7 @@ define_print_and_forward_display! {
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
p!("the type `", print(ty), "` is found in the environment")
}
+ ty::PredicateKind::Ambiguous => p!("ambiguous"),
}
}
diff --git a/compiler/rustc_middle/src/ty/query.rs b/compiler/rustc_middle/src/ty/query.rs
index ec90590ad..642900d3a 100644
--- a/compiler/rustc_middle/src/ty/query.rs
+++ b/compiler/rustc_middle/src/ty/query.rs
@@ -15,6 +15,7 @@ use crate::mir::interpret::{
};
use crate::mir::interpret::{LitToConstError, LitToConstInput};
use crate::mir::mono::CodegenUnit;
+use crate::query::Key;
use crate::thir;
use crate::traits::query::{
CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal,
@@ -27,6 +28,7 @@ use crate::traits::query::{
};
use crate::traits::specialization_graph;
use crate::traits::{self, ImplSource};
+use crate::ty::context::TyCtxtFeed;
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::layout::TyAndLayout;
use crate::ty::subst::{GenericArg, SubstsRef};
@@ -121,10 +123,10 @@ macro_rules! query_helper_param_ty {
macro_rules! query_storage {
([][$K:ty, $V:ty]) => {
- <DefaultCacheSelector as CacheSelector<$K, $V>>::Cache
+ <<$K as Key>::CacheSelector as CacheSelector<'tcx, $V>>::Cache
};
([(arena_cache) $($rest:tt)*][$K:ty, $V:ty]) => {
- <ArenaCacheSelector<'tcx> as CacheSelector<$K, $V>>::Cache
+ <<$K as Key>::CacheSelector as CacheSelector<'tcx, $V>>::ArenaCache
};
([$other:tt $($modifiers:tt)*][$($args:tt)*]) => {
query_storage!([$($modifiers)*][$($args)*])
@@ -275,13 +277,16 @@ macro_rules! define_callbacks {
impl Default for Providers {
fn default() -> Self {
+ use crate::query::Key;
+
Providers {
$($name: |_, key| bug!(
- "`tcx.{}({:?})` is not supported for external or local crate;\n
- hint: Queries can be either made to the local crate, or the external crate. This error means you tried to use it for one that's not supported (likely the local crate).\n
+ "`tcx.{}({:?})` is not supported for {} crate;\n
+ hint: Queries can be either made to the local crate, or the external crate. This error means you tried to use it for one that's not supported.\n
If that's not the case, {} was likely never assigned to a provider function.\n",
stringify!($name),
key,
+ if key.query_crate_is_local() { "local" } else { "external" },
stringify!($name),
),)*
}
@@ -323,6 +328,56 @@ macro_rules! define_callbacks {
};
}
+macro_rules! hash_result {
+ ([]) => {{
+ Some(dep_graph::hash_result)
+ }};
+ ([(no_hash) $($rest:tt)*]) => {{
+ None
+ }};
+ ([$other:tt $($modifiers:tt)*]) => {
+ hash_result!([$($modifiers)*])
+ };
+}
+
+macro_rules! define_feedable {
+ ($($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
+ $(impl<'tcx, K: IntoQueryParam<$($K)*> + Copy> TyCtxtFeed<'tcx, K> {
+ $(#[$attr])*
+ #[inline(always)]
+ pub fn $name(self, value: $V) -> query_stored::$name<'tcx> {
+ let key = self.key().into_query_param();
+ opt_remap_env_constness!([$($modifiers)*][key]);
+
+ let tcx = self.tcx;
+ let cache = &tcx.query_caches.$name;
+
+ let cached = try_get_cached(tcx, cache, &key, copy);
+
+ match cached {
+ Ok(old) => {
+ bug!(
+ "Trying to feed an already recorded value for query {} key={key:?}:\nold value: {old:?}\nnew value: {value:?}",
+ stringify!($name),
+ );
+ }
+ Err(()) => (),
+ }
+
+ let dep_node = dep_graph::DepNode::construct(tcx, dep_graph::DepKind::$name, &key);
+ let dep_node_index = tcx.dep_graph.with_feed_task(
+ dep_node,
+ tcx,
+ key,
+ &value,
+ hash_result!([$($modifiers)*]),
+ );
+ cache.complete(key, value, dep_node_index)
+ }
+ })*
+ }
+}
+
// Each of these queries corresponds to a function pointer field in the
// `Providers` struct for requesting a value of that type, and a method
// on `tcx: TyCtxt` (and `tcx.at(span)`) for doing that request in a way
@@ -336,6 +391,7 @@ macro_rules! define_callbacks {
// as they will raise an fatal error on query cycles instead.
rustc_query_append! { define_callbacks! }
+rustc_feedable_queries! { define_feedable! }
mod sealed {
use super::{DefId, LocalDefId, OwnerId};
diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs
index b25b4bd4f..c759fb6d5 100644
--- a/compiler/rustc_middle/src/ty/relate.rs
+++ b/compiler/rustc_middle/src/ty/relate.rs
@@ -5,7 +5,7 @@
//! subtyping, type equality, etc.
use crate::ty::error::{ExpectedFound, TypeError};
-use crate::ty::{self, ImplSubject, Term, TermKind, Ty, TyCtxt, TypeFoldable};
+use crate::ty::{self, Expr, ImplSubject, Term, TermKind, Ty, TyCtxt, TypeFoldable};
use crate::ty::{GenericArg, GenericArgKind, SubstsRef};
use rustc_hir as ast;
use rustc_hir::def_id::DefId;
@@ -23,6 +23,8 @@ pub enum Cause {
pub trait TypeRelation<'tcx>: Sized {
fn tcx(&self) -> TyCtxt<'tcx>;
+ fn intercrate(&self) -> bool;
+
fn param_env(&self) -> ty::ParamEnv<'tcx>;
/// Returns a static string we can use for printouts.
@@ -32,6 +34,9 @@ pub trait TypeRelation<'tcx>: Sized {
/// relation. Just affects error messages.
fn a_is_expected(&self) -> bool;
+ /// Used during coherence. If called, must emit an always-ambiguous obligation.
+ fn mark_ambiguous(&mut self);
+
fn with_cause<F, R>(&mut self, _cause: Cause, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
@@ -60,7 +65,7 @@ pub trait TypeRelation<'tcx>: Sized {
let tcx = self.tcx();
let opt_variances = tcx.variances_of(item_def_id);
- relate_substs_with_variances(self, item_def_id, opt_variances, a_subst, b_subst)
+ relate_substs_with_variances(self, item_def_id, opt_variances, a_subst, b_subst, true)
}
/// Switch variance for the purpose of relating `a` and `b`.
@@ -151,13 +156,14 @@ pub fn relate_substs_with_variances<'tcx, R: TypeRelation<'tcx>>(
variances: &[ty::Variance],
a_subst: SubstsRef<'tcx>,
b_subst: SubstsRef<'tcx>,
+ fetch_ty_for_diag: bool,
) -> RelateResult<'tcx, SubstsRef<'tcx>> {
let tcx = relation.tcx();
let mut cached_ty = None;
let params = iter::zip(a_subst, b_subst).enumerate().map(|(i, (a, b))| {
let variance = variances[i];
- let variance_info = if variance == ty::Invariant {
+ let variance_info = if variance == ty::Invariant && fetch_ty_for_diag {
let ty =
*cached_ty.get_or_insert_with(|| tcx.bound_type_of(ty_def_id).subst(tcx, a_subst));
ty::VarianceDiagInfo::Invariant { ty, param_index: i.try_into().unwrap() }
@@ -561,8 +567,23 @@ pub fn super_relate_tys<'tcx, R: TypeRelation<'tcx>>(
(&ty::Opaque(a_def_id, a_substs), &ty::Opaque(b_def_id, b_substs))
if a_def_id == b_def_id =>
{
- let substs = relate_substs(relation, a_substs, b_substs)?;
- Ok(tcx.mk_opaque(a_def_id, substs))
+ if relation.intercrate() {
+ // During coherence, opaque types should be treated as equal to each other, even if their generic params
+ // differ, as they could resolve to the same hidden type, even for different generic params.
+ relation.mark_ambiguous();
+ Ok(a)
+ } else {
+ let opt_variances = tcx.variances_of(a_def_id);
+ let substs = relate_substs_with_variances(
+ relation,
+ a_def_id,
+ opt_variances,
+ a_substs,
+ b_substs,
+ false, // do not fetch `type_of(a_def_id)`, as it will cause a cycle
+ )?;
+ Ok(tcx.mk_opaque(a_def_id, substs))
+ }
}
_ => Err(TypeError::Sorts(expected_found(relation, a, b))),
@@ -592,7 +613,10 @@ pub fn super_relate_consts<'tcx, R: TypeRelation<'tcx>>(
if a_ty != b_ty {
relation.tcx().sess.delay_span_bug(
DUMMY_SP,
- &format!("cannot relate constants of different types: {} != {}", a_ty, b_ty),
+ &format!(
+ "cannot relate constants ({:?}, {:?}) of different types: {} != {}",
+ a, b, a_ty, b_ty
+ ),
);
}
@@ -602,11 +626,16 @@ pub fn super_relate_consts<'tcx, R: TypeRelation<'tcx>>(
// an unnormalized (i.e. unevaluated) const in the param-env.
// FIXME(generic_const_exprs): Once we always lazily unify unevaluated constants
// these `eval` calls can be removed.
- if !relation.tcx().features().generic_const_exprs {
+ if !tcx.features().generic_const_exprs {
a = a.eval(tcx, relation.param_env());
b = b.eval(tcx, relation.param_env());
}
+ if tcx.features().generic_const_exprs {
+ a = tcx.expand_abstract_consts(a);
+ b = tcx.expand_abstract_consts(b);
+ }
+
// Currently, the values that can be unified are primitive types,
// and those that derive both `PartialEq` and `Eq`, corresponding
// to structural-match types.
@@ -623,33 +652,69 @@ pub fn super_relate_consts<'tcx, R: TypeRelation<'tcx>>(
(ty::ConstKind::Placeholder(p1), ty::ConstKind::Placeholder(p2)) => p1 == p2,
(ty::ConstKind::Value(a_val), ty::ConstKind::Value(b_val)) => a_val == b_val,
- (ty::ConstKind::Unevaluated(au), ty::ConstKind::Unevaluated(bu))
- if tcx.features().generic_const_exprs =>
- {
- tcx.try_unify_abstract_consts(relation.param_env().and((au, bu)))
- }
-
// While this is slightly incorrect, it shouldn't matter for `min_const_generics`
// and is the better alternative to waiting until `generic_const_exprs` can
// be stabilized.
(ty::ConstKind::Unevaluated(au), ty::ConstKind::Unevaluated(bu)) if au.def == bu.def => {
+ assert_eq!(a.ty(), b.ty());
let substs = relation.relate_with_variance(
ty::Variance::Invariant,
ty::VarianceDiagInfo::default(),
au.substs,
bu.substs,
)?;
- return Ok(tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def: au.def, substs }),
- ty: a.ty(),
- }));
+ return Ok(tcx.mk_const(ty::UnevaluatedConst { def: au.def, substs }, a.ty()));
+ }
+ // Before calling relate on exprs, it is necessary to ensure that the nested consts
+ // have identical types.
+ (ty::ConstKind::Expr(ae), ty::ConstKind::Expr(be)) => {
+ let r = relation;
+
+ // FIXME(generic_const_exprs): is it possible to relate two consts which are not identical
+ // exprs? Should we care about that?
+ let expr = match (ae, be) {
+ (Expr::Binop(a_op, al, ar), Expr::Binop(b_op, bl, br))
+ if a_op == b_op && al.ty() == bl.ty() && ar.ty() == br.ty() =>
+ {
+ Expr::Binop(a_op, r.consts(al, bl)?, r.consts(ar, br)?)
+ }
+ (Expr::UnOp(a_op, av), Expr::UnOp(b_op, bv))
+ if a_op == b_op && av.ty() == bv.ty() =>
+ {
+ Expr::UnOp(a_op, r.consts(av, bv)?)
+ }
+ (Expr::Cast(ak, av, at), Expr::Cast(bk, bv, bt))
+ if ak == bk && av.ty() == bv.ty() =>
+ {
+ Expr::Cast(ak, r.consts(av, bv)?, r.tys(at, bt)?)
+ }
+ (Expr::FunctionCall(af, aa), Expr::FunctionCall(bf, ba))
+ if aa.len() == ba.len()
+ && af.ty() == bf.ty()
+ && aa
+ .iter()
+ .zip(ba.iter())
+ .all(|(a_arg, b_arg)| a_arg.ty() == b_arg.ty()) =>
+ {
+ let func = r.consts(af, bf)?;
+ let mut related_args = Vec::with_capacity(aa.len());
+ for (a_arg, b_arg) in aa.iter().zip(ba.iter()) {
+ related_args.push(r.consts(a_arg, b_arg)?);
+ }
+ let related_args = tcx.mk_const_list(related_args.iter());
+ Expr::FunctionCall(func, related_args)
+ }
+ _ => return Err(TypeError::ConstMismatch(expected_found(r, a, b))),
+ };
+ let kind = ty::ConstKind::Expr(expr);
+ return Ok(tcx.mk_const(kind, a.ty()));
}
_ => false,
};
if is_match { Ok(a) } else { Err(TypeError::ConstMismatch(expected_found(relation, a, b))) }
}
-impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>> {
+impl<'tcx> Relate<'tcx> for &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>> {
fn relate<R: TypeRelation<'tcx>>(
relation: &mut R,
a: Self,
diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs
index 2cad333e3..9f70b4f1f 100644
--- a/compiler/rustc_middle/src/ty/structural_impls.rs
+++ b/compiler/rustc_middle/src/ty/structural_impls.rs
@@ -68,7 +68,7 @@ impl<'tcx> fmt::Debug for ty::adjustment::Adjustment<'tcx> {
impl fmt::Debug for ty::BoundRegionKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
- ty::BrAnon(n) => write!(f, "BrAnon({:?})", n),
+ ty::BrAnon(n, span) => write!(f, "BrAnon({n:?}, {span:?})"),
ty::BrNamed(did, name) => {
if did.is_crate_root() {
write!(f, "BrNamed({})", name)
@@ -150,15 +150,23 @@ impl<'tcx> fmt::Debug for ty::Predicate<'tcx> {
}
}
+impl<'tcx> fmt::Debug for ty::Clause<'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ ty::Clause::Trait(ref a) => a.fmt(f),
+ ty::Clause::RegionOutlives(ref pair) => pair.fmt(f),
+ ty::Clause::TypeOutlives(ref pair) => pair.fmt(f),
+ ty::Clause::Projection(ref pair) => pair.fmt(f),
+ }
+ }
+}
+
impl<'tcx> fmt::Debug for ty::PredicateKind<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
- ty::PredicateKind::Trait(ref a) => a.fmt(f),
+ ty::PredicateKind::Clause(ref a) => a.fmt(f),
ty::PredicateKind::Subtype(ref pair) => pair.fmt(f),
ty::PredicateKind::Coerce(ref pair) => pair.fmt(f),
- ty::PredicateKind::RegionOutlives(ref pair) => pair.fmt(f),
- ty::PredicateKind::TypeOutlives(ref pair) => pair.fmt(f),
- ty::PredicateKind::Projection(ref pair) => pair.fmt(f),
ty::PredicateKind::WellFormed(data) => write!(f, "WellFormed({:?})", data),
ty::PredicateKind::ObjectSafe(trait_def_id) => {
write!(f, "ObjectSafe({:?})", trait_def_id)
@@ -173,6 +181,7 @@ impl<'tcx> fmt::Debug for ty::PredicateKind<'tcx> {
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
write!(f, "TypeWellFormedFromEnv({:?})", ty)
}
+ ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"),
}
}
}
@@ -240,7 +249,6 @@ TrivialTypeTraversalAndLiftImpls! {
Field,
interpret::Scalar,
rustc_target::abi::Size,
- ty::DelaySpanBugEmitted,
rustc_type_ir::DebruijnIndex,
ty::BoundVar,
ty::Placeholder<ty::BoundVar>,
@@ -587,12 +595,18 @@ impl<'tcx, T: TypeVisitable<'tcx>> TypeSuperVisitable<'tcx> for ty::Binder<'tcx,
}
}
-impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>> {
+impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>> {
fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, folder: &mut F) -> Result<Self, F::Error> {
ty::util::fold_list(self, folder, |tcx, v| tcx.intern_poly_existential_predicates(v))
}
}
+impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ty::Const<'tcx>> {
+ fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, folder: &mut F) -> Result<Self, F::Error> {
+ ty::util::fold_list(self, folder, |tcx, v| tcx.mk_const_list(v.iter()))
+ }
+}
+
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List<ProjectionKind> {
fn try_fold_with<F: FallibleTypeFolder<'tcx>>(self, folder: &mut F) -> Result<Self, F::Error> {
ty::util::fold_list(self, folder, |tcx, v| tcx.intern_projs(v))
@@ -806,7 +820,7 @@ impl<'tcx> TypeSuperFoldable<'tcx> for ty::Const<'tcx> {
let ty = self.ty().try_fold_with(folder)?;
let kind = self.kind().try_fold_with(folder)?;
if ty != self.ty() || kind != self.kind() {
- Ok(folder.tcx().mk_const(ty::ConstS { ty, kind }))
+ Ok(folder.tcx().mk_const(kind, ty))
} else {
Ok(self)
}
diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs
index cf420bafe..9cbda95a4 100644
--- a/compiler/rustc_middle/src/ty/sty.rs
+++ b/compiler/rustc_middle/src/ty/sty.rs
@@ -17,9 +17,11 @@ use rustc_data_structures::captures::Captures;
use rustc_data_structures::intern::Interned;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
+use rustc_hir::LangItem;
use rustc_index::vec::Idx;
use rustc_macros::HashStable;
use rustc_span::symbol::{kw, sym, Symbol};
+use rustc_span::Span;
use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi;
use std::borrow::Cow;
@@ -58,7 +60,7 @@ pub struct FreeRegion {
#[derive(HashStable)]
pub enum BoundRegionKind {
/// An anonymous region parameter for a given fn (&T)
- BrAnon(u32),
+ BrAnon(u32, Option<Span>),
/// Named region parameters for functions (a in &'a T)
///
@@ -81,7 +83,9 @@ pub struct BoundRegion {
impl BoundRegionKind {
pub fn is_named(&self) -> bool {
match *self {
- BoundRegionKind::BrNamed(_, name) => name != kw::UnderscoreLifetime,
+ BoundRegionKind::BrNamed(_, name) => {
+ name != kw::UnderscoreLifetime && name != kw::Empty
+ }
_ => false,
}
}
@@ -702,7 +706,9 @@ impl<'tcx> ExistentialPredicate<'tcx> {
}
}
-impl<'tcx> Binder<'tcx, ExistentialPredicate<'tcx>> {
+pub type PolyExistentialPredicate<'tcx> = Binder<'tcx, ExistentialPredicate<'tcx>>;
+
+impl<'tcx> PolyExistentialPredicate<'tcx> {
/// Given an existential predicate like `?Self: PartialEq<u32>` (e.g., derived from `dyn PartialEq<u32>`),
/// and a concrete type `self_ty`, returns a full predicate where the existentially quantified variable `?Self`
/// has been replaced with `self_ty` (e.g., `self_ty: PartialEq<u32>`, in our example).
@@ -716,17 +722,23 @@ impl<'tcx> Binder<'tcx, ExistentialPredicate<'tcx>> {
self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx)
}
ExistentialPredicate::AutoTrait(did) => {
- let trait_ref = self.rebind(ty::TraitRef {
- def_id: did,
- substs: tcx.mk_substs_trait(self_ty, &[]),
- });
- trait_ref.without_const().to_predicate(tcx)
+ let generics = tcx.generics_of(did);
+ let trait_ref = if generics.params.len() == 1 {
+ tcx.mk_trait_ref(did, [self_ty])
+ } else {
+ // If this is an ill-formed auto trait, then synthesize
+ // new error substs for the missing generics.
+ let err_substs =
+ ty::InternalSubsts::extend_with_error(tcx, did, &[self_ty.into()]);
+ tcx.mk_trait_ref(did, err_substs)
+ };
+ self.rebind(trait_ref).without_const().to_predicate(tcx)
}
}
}
}
-impl<'tcx> List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>> {
+impl<'tcx> List<ty::PolyExistentialPredicate<'tcx>> {
/// Returns the "principal `DefId`" of this set of existential predicates.
///
/// A Rust trait object type consists (in addition to a lifetime bound)
@@ -811,6 +823,13 @@ impl<'tcx> TraitRef<'tcx> {
TraitRef { def_id, substs }
}
+ pub fn with_self_type(self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> Self {
+ tcx.mk_trait_ref(
+ self.def_id,
+ [self_ty.into()].into_iter().chain(self.substs.iter().skip(1)),
+ )
+ }
+
/// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
/// are the parameters defined on trait.
pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> Binder<'tcx, TraitRef<'tcx>> {
@@ -845,23 +864,6 @@ impl<'tcx> PolyTraitRef<'tcx> {
pub fn def_id(&self) -> DefId {
self.skip_binder().def_id
}
-
- pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> {
- self.map_bound(|trait_ref| ty::TraitPredicate {
- trait_ref,
- constness: ty::BoundConstness::NotConst,
- polarity: ty::ImplPolarity::Positive,
- })
- }
-
- /// Same as [`PolyTraitRef::to_poly_trait_predicate`] but sets a negative polarity instead.
- pub fn to_poly_trait_predicate_negative_polarity(&self) -> ty::PolyTraitPredicate<'tcx> {
- self.map_bound(|trait_ref| ty::TraitPredicate {
- trait_ref,
- constness: ty::BoundConstness::NotConst,
- polarity: ty::ImplPolarity::Negative,
- })
- }
}
impl rustc_errors::IntoDiagnosticArg for PolyTraitRef<'_> {
@@ -906,7 +908,7 @@ impl<'tcx> ExistentialTraitRef<'tcx> {
// otherwise the escaping vars would be captured by the binder
// debug_assert!(!self_ty.has_escaping_bound_vars());
- ty::TraitRef { def_id: self.def_id, substs: tcx.mk_substs_trait(self_ty, self.substs) }
+ tcx.mk_trait_ref(self.def_id, [self_ty.into()].into_iter().chain(self.substs.iter()))
}
}
@@ -1282,6 +1284,12 @@ impl<'tcx> ParamTy {
pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
tcx.mk_ty_param(self.index, self.name)
}
+
+ pub fn span_from_generics(&self, tcx: TyCtxt<'tcx>, item_with_generics: DefId) -> Span {
+ let generics = tcx.generics_of(item_with_generics);
+ let type_param = generics.type_param(self, tcx);
+ tcx.def_span(type_param.def_id)
+ }
}
#[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)]
@@ -1603,7 +1611,7 @@ impl<'tcx> Region<'tcx> {
impl<'tcx> Ty<'tcx> {
#[inline(always)]
pub fn kind(self) -> &'tcx TyKind<'tcx> {
- &self.0.0.kind
+ &self.0.0
}
#[inline(always)]
@@ -2095,7 +2103,7 @@ impl<'tcx> Ty<'tcx> {
ty::Str | ty::Slice(_) => (tcx.types.usize, false),
ty::Dynamic(..) => {
- let dyn_metadata = tcx.lang_items().dyn_metadata().unwrap();
+ let dyn_metadata = tcx.require_lang_item(LangItem::DynMetadata, None);
(tcx.bound_type_of(dyn_metadata).subst(tcx, &[tail.into()]), false)
},
@@ -2117,8 +2125,7 @@ impl<'tcx> Ty<'tcx> {
/// parameter. This is kind of a phantom type, except that the
/// most convenient thing for us to are the integral types. This
/// function converts such a special type into the closure
- /// kind. To go the other way, use
- /// `tcx.closure_kind_ty(closure_kind)`.
+ /// kind. To go the other way, use `closure_kind.to_ty(tcx)`.
///
/// Note that during type checking, we use an inference variable
/// to represent the closure kind, because it has not yet been
@@ -2244,7 +2251,7 @@ impl<'tcx> Ty<'tcx> {
}
}
- // If `self` is a primitive, return its [`Symbol`].
+ /// If `self` is a primitive, return its [`Symbol`].
pub fn primitive_symbol(self) -> Option<Symbol> {
match self.kind() {
ty::Bool => Some(sym::bool),
diff --git a/compiler/rustc_middle/src/ty/subst.rs b/compiler/rustc_middle/src/ty/subst.rs
index 0660e9b79..a1b084a5e 100644
--- a/compiler/rustc_middle/src/ty/subst.rs
+++ b/compiler/rustc_middle/src/ty/subst.rs
@@ -6,11 +6,11 @@ use crate::ty::sty::{ClosureSubsts, GeneratorSubsts, InlineConstSubsts};
use crate::ty::visit::{TypeVisitable, TypeVisitor};
use crate::ty::{self, Lift, List, ParamConst, Ty, TyCtxt};
-use rustc_data_structures::captures::Captures;
-use rustc_data_structures::intern::{Interned, WithStableHash};
+use rustc_data_structures::intern::Interned;
use rustc_hir::def_id::DefId;
use rustc_macros::HashStable;
use rustc_serialize::{self, Decodable, Encodable};
+use rustc_type_ir::WithCachedTypeInfo;
use smallvec::SmallVec;
use core::intrinsics;
@@ -19,7 +19,7 @@ use std::fmt;
use std::marker::PhantomData;
use std::mem;
use std::num::NonZeroUsize;
-use std::ops::ControlFlow;
+use std::ops::{ControlFlow, Deref};
use std::slice;
/// An entity in the Rust type system, which can be one of
@@ -85,7 +85,7 @@ impl<'tcx> GenericArgKind<'tcx> {
GenericArgKind::Type(ty) => {
// Ensure we can use the tag bits.
assert_eq!(mem::align_of_val(&*ty.0.0) & TAG_MASK, 0);
- (TYPE_TAG, ty.0.0 as *const WithStableHash<ty::TyS<'tcx>> as usize)
+ (TYPE_TAG, ty.0.0 as *const WithCachedTypeInfo<ty::TyKind<'tcx>> as usize)
}
GenericArgKind::Const(ct) => {
// Ensure we can use the tag bits.
@@ -141,6 +141,15 @@ impl<'tcx> From<ty::Const<'tcx>> for GenericArg<'tcx> {
}
}
+impl<'tcx> From<ty::Term<'tcx>> for GenericArg<'tcx> {
+ fn from(value: ty::Term<'tcx>) -> Self {
+ match value.unpack() {
+ ty::TermKind::Ty(t) => t.into(),
+ ty::TermKind::Const(c) => c.into(),
+ }
+ }
+}
+
impl<'tcx> GenericArg<'tcx> {
#[inline]
pub fn unpack(self) -> GenericArgKind<'tcx> {
@@ -154,7 +163,7 @@ impl<'tcx> GenericArg<'tcx> {
&*((ptr & !TAG_MASK) as *const ty::RegionKind<'tcx>),
))),
TYPE_TAG => GenericArgKind::Type(Ty(Interned::new_unchecked(
- &*((ptr & !TAG_MASK) as *const WithStableHash<ty::TyS<'tcx>>),
+ &*((ptr & !TAG_MASK) as *const WithCachedTypeInfo<ty::TyKind<'tcx>>),
))),
CONST_TAG => GenericArgKind::Const(ty::Const(Interned::new_unchecked(
&*((ptr & !TAG_MASK) as *const ty::ConstS<'tcx>),
@@ -344,6 +353,22 @@ impl<'tcx> InternalSubsts<'tcx> {
}
}
+ // Extend an `original_substs` list to the full number of substs expected by `def_id`,
+ // filling in the missing parameters with error ty/ct or 'static regions.
+ pub fn extend_with_error(
+ tcx: TyCtxt<'tcx>,
+ def_id: DefId,
+ original_substs: &[GenericArg<'tcx>],
+ ) -> SubstsRef<'tcx> {
+ ty::InternalSubsts::for_item(tcx, def_id, |def, substs| {
+ if let Some(subst) = original_substs.get(def.index as usize) {
+ *subst
+ } else {
+ def.to_error(tcx, substs)
+ }
+ })
+ }
+
#[inline]
pub fn types(&'tcx self) -> impl DoubleEndedIterator<Item = Ty<'tcx>> + 'tcx {
self.iter()
@@ -506,6 +531,9 @@ impl<'tcx, T: TypeVisitable<'tcx>> TypeVisitable<'tcx> for &'tcx ty::List<T> {
}
}
+/// Similar to [`super::Binder`] except that it tracks early bound generics, i.e. `struct Foo<T>(T)`
+/// needs `T` substituted immediately. This type primarily exists to avoid forgetting to call
+/// `subst`.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[derive(Encodable, Decodable, HashStable)]
pub struct EarlyBinder<T>(pub T);
@@ -559,25 +587,94 @@ impl<T, U> EarlyBinder<(T, U)> {
}
}
-impl<'tcx, 's, T: IntoIterator<Item = I>, I: TypeFoldable<'tcx>> EarlyBinder<T> {
+impl<'tcx, 's, I: IntoIterator> EarlyBinder<I>
+where
+ I::Item: TypeFoldable<'tcx>,
+{
pub fn subst_iter(
self,
tcx: TyCtxt<'tcx>,
substs: &'s [GenericArg<'tcx>],
- ) -> impl Iterator<Item = I> + Captures<'s> + Captures<'tcx> {
- self.0.into_iter().map(move |t| EarlyBinder(t).subst(tcx, substs))
+ ) -> SubstIter<'s, 'tcx, I> {
+ SubstIter { it: self.0.into_iter(), tcx, substs }
+ }
+}
+
+pub struct SubstIter<'s, 'tcx, I: IntoIterator> {
+ it: I::IntoIter,
+ tcx: TyCtxt<'tcx>,
+ substs: &'s [GenericArg<'tcx>],
+}
+
+impl<'tcx, I: IntoIterator> Iterator for SubstIter<'_, 'tcx, I>
+where
+ I::Item: TypeFoldable<'tcx>,
+{
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(EarlyBinder(self.it.next()?).subst(self.tcx, self.substs))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
}
}
-impl<'tcx, 's, 'a, T: IntoIterator<Item = &'a I>, I: Copy + TypeFoldable<'tcx> + 'a>
- EarlyBinder<T>
+impl<'tcx, I: IntoIterator> DoubleEndedIterator for SubstIter<'_, 'tcx, I>
+where
+ I::IntoIter: DoubleEndedIterator,
+ I::Item: TypeFoldable<'tcx>,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ Some(EarlyBinder(self.it.next_back()?).subst(self.tcx, self.substs))
+ }
+}
+
+impl<'tcx, 's, I: IntoIterator> EarlyBinder<I>
+where
+ I::Item: Deref,
+ <I::Item as Deref>::Target: Copy + TypeFoldable<'tcx>,
{
pub fn subst_iter_copied(
self,
tcx: TyCtxt<'tcx>,
substs: &'s [GenericArg<'tcx>],
- ) -> impl Iterator<Item = I> + Captures<'s> + Captures<'tcx> + Captures<'a> {
- self.0.into_iter().map(move |t| EarlyBinder(*t).subst(tcx, substs))
+ ) -> SubstIterCopied<'s, 'tcx, I> {
+ SubstIterCopied { it: self.0.into_iter(), tcx, substs }
+ }
+}
+
+pub struct SubstIterCopied<'a, 'tcx, I: IntoIterator> {
+ it: I::IntoIter,
+ tcx: TyCtxt<'tcx>,
+ substs: &'a [GenericArg<'tcx>],
+}
+
+impl<'tcx, I: IntoIterator> Iterator for SubstIterCopied<'_, 'tcx, I>
+where
+ I::Item: Deref,
+ <I::Item as Deref>::Target: Copy + TypeFoldable<'tcx>,
+{
+ type Item = <I::Item as Deref>::Target;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(EarlyBinder(*self.it.next()?).subst(self.tcx, self.substs))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.it.size_hint()
+ }
+}
+
+impl<'tcx, I: IntoIterator> DoubleEndedIterator for SubstIterCopied<'_, 'tcx, I>
+where
+ I::IntoIter: DoubleEndedIterator,
+ I::Item: Deref,
+ <I::Item as Deref>::Target: Copy + TypeFoldable<'tcx>,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ Some(EarlyBinder(*self.it.next_back()?).subst(self.tcx, self.substs))
}
}
@@ -597,6 +694,10 @@ impl<T: Iterator> Iterator for EarlyBinderIter<T> {
fn next(&mut self) -> Option<Self::Item> {
self.t.next().map(|i| EarlyBinder(i))
}
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.t.size_hint()
+ }
}
impl<'tcx, T: TypeFoldable<'tcx>> ty::EarlyBinder<T> {
diff --git a/compiler/rustc_middle/src/ty/trait_def.rs b/compiler/rustc_middle/src/ty/trait_def.rs
index ac79949fc..b38a5fbf2 100644
--- a/compiler/rustc_middle/src/ty/trait_def.rs
+++ b/compiler/rustc_middle/src/ty/trait_def.rs
@@ -211,7 +211,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
-// Query provider for `trait_impls_of`.
+/// Query provider for `trait_impls_of`.
pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> TraitImpls {
let mut impls = TraitImpls::default();
@@ -255,7 +255,7 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait
impls
}
-// Query provider for `incoherent_impls`.
+/// Query provider for `incoherent_impls`.
pub(super) fn incoherent_impls_provider(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
let mut impls = Vec::new();
diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs
index f72e236ed..47c1ce807 100644
--- a/compiler/rustc_middle/src/ty/util.rs
+++ b/compiler/rustc_middle/src/ty/util.rs
@@ -8,8 +8,6 @@ use crate::ty::{
};
use crate::ty::{GenericArgKind, SubstsRef};
use rustc_apfloat::Float as _;
-use rustc_ast as ast;
-use rustc_attr::{self as attr, SignedInt, UnsignedInt};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::ErrorGuaranteed;
@@ -19,7 +17,7 @@ use rustc_hir::def_id::DefId;
use rustc_index::bit_set::GrowableBitSet;
use rustc_macros::HashStable;
use rustc_span::{sym, DUMMY_SP};
-use rustc_target::abi::{Integer, Size, TargetDataLayout};
+use rustc_target::abi::{Integer, IntegerType, Size, TargetDataLayout};
use rustc_target::spec::abi::Abi;
use smallvec::SmallVec;
use std::{fmt, iter};
@@ -104,21 +102,12 @@ pub trait IntTypeExt {
fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
}
-impl IntTypeExt for attr::IntType {
+impl IntTypeExt for IntegerType {
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
- match *self {
- SignedInt(ast::IntTy::I8) => tcx.types.i8,
- SignedInt(ast::IntTy::I16) => tcx.types.i16,
- SignedInt(ast::IntTy::I32) => tcx.types.i32,
- SignedInt(ast::IntTy::I64) => tcx.types.i64,
- SignedInt(ast::IntTy::I128) => tcx.types.i128,
- SignedInt(ast::IntTy::Isize) => tcx.types.isize,
- UnsignedInt(ast::UintTy::U8) => tcx.types.u8,
- UnsignedInt(ast::UintTy::U16) => tcx.types.u16,
- UnsignedInt(ast::UintTy::U32) => tcx.types.u32,
- UnsignedInt(ast::UintTy::U64) => tcx.types.u64,
- UnsignedInt(ast::UintTy::U128) => tcx.types.u128,
- UnsignedInt(ast::UintTy::Usize) => tcx.types.usize,
+ match self {
+ IntegerType::Pointer(true) => tcx.types.isize,
+ IntegerType::Pointer(false) => tcx.types.usize,
+ IntegerType::Fixed(i, s) => i.to_ty(tcx, *s),
}
}
@@ -1219,11 +1208,11 @@ pub fn is_trivially_const_drop<'tcx>(ty: Ty<'tcx>) -> bool {
}
}
-// Does the equivalent of
-// ```
-// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
-// folder.tcx().intern_*(&v)
-// ```
+/// Does the equivalent of
+/// ```ignore (ilustrative)
+/// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
+/// folder.tcx().intern_*(&v)
+/// ```
pub fn fold_list<'tcx, F, T>(
list: &'tcx ty::List<T>,
folder: &mut F,
@@ -1259,9 +1248,9 @@ where
#[derive(Copy, Clone, Debug, HashStable, TyEncodable, TyDecodable)]
pub struct AlwaysRequiresDrop;
-/// Normalizes all opaque types in the given value, replacing them
+/// Reveals all opaque types in the given value, replacing them
/// with their underlying types.
-pub fn normalize_opaque_types<'tcx>(
+pub fn reveal_opaque_types_in_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
val: &'tcx ty::List<ty::Predicate<'tcx>>,
) -> &'tcx ty::List<ty::Predicate<'tcx>> {
@@ -1298,7 +1287,7 @@ pub fn is_intrinsic(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
pub fn provide(providers: &mut ty::query::Providers) {
*providers = ty::query::Providers {
- normalize_opaque_types,
+ reveal_opaque_types_in_bounds,
is_doc_hidden,
is_doc_notable_trait,
is_intrinsic,
diff --git a/compiler/rustc_middle/src/ty/visit.rs b/compiler/rustc_middle/src/ty/visit.rs
index c09f71f9a..4cdfd9e59 100644
--- a/compiler/rustc_middle/src/ty/visit.rs
+++ b/compiler/rustc_middle/src/ty/visit.rs
@@ -72,12 +72,18 @@ pub trait TypeVisitable<'tcx>: fmt::Debug + Clone {
self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder }).is_break()
}
- /// Returns `true` if this `self` has any regions that escape `binder` (and
+ /// Returns `true` if this type has any regions that escape `binder` (and
/// hence are not bound by it).
fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool {
self.has_vars_bound_at_or_above(binder.shifted_in(1))
}
+ /// Return `true` if this type has regions that are not a part of the type.
+ /// For example, `for<'a> fn(&'a i32)` return `false`, while `fn(&'a i32)`
+ /// would return `true`. The latter can occur when traversing through the
+ /// former.
+ ///
+ /// See [`HasEscapingVarsVisitor`] for more information.
fn has_escaping_bound_vars(&self) -> bool {
self.has_vars_bound_at_or_above(ty::INNERMOST)
}
@@ -95,11 +101,15 @@ pub trait TypeVisitable<'tcx>: fmt::Debug + Clone {
fn references_error(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_ERROR)
}
- fn error_reported(&self) -> Option<ErrorGuaranteed> {
+ fn error_reported(&self) -> Result<(), ErrorGuaranteed> {
if self.references_error() {
- Some(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ if let Some(reported) = ty::tls::with(|tcx| tcx.sess.is_compilation_going_to_fail()) {
+ Err(reported)
+ } else {
+ bug!("expect tcx.sess.is_compilation_going_to_fail return `Some`");
+ }
} else {
- None
+ Ok(())
}
}
fn has_non_region_param(&self) -> bool {
diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs
index 5ca51c25a..6eae94511 100644
--- a/compiler/rustc_middle/src/ty/vtable.rs
+++ b/compiler/rustc_middle/src/ty/vtable.rs
@@ -66,7 +66,7 @@ pub(super) fn vtable_allocation_provider<'tcx>(
let layout = tcx
.layout_of(ty::ParamEnv::reveal_all().and(ty))
.expect("failed to build vtable representation");
- assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
+ assert!(layout.is_sized(), "can't create a vtable for an unsized type");
let size = layout.size.bytes();
let align = layout.align.abi.bytes();
diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs
index 91db9698c..4fab5abe9 100644
--- a/compiler/rustc_middle/src/ty/walk.rs
+++ b/compiler/rustc_middle/src/ty/walk.rs
@@ -214,6 +214,24 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>)
| ty::ConstKind::Value(_)
| ty::ConstKind::Error(_) => {}
+ ty::ConstKind::Expr(expr) => match expr {
+ ty::Expr::UnOp(_, v) => push_inner(stack, v.into()),
+ ty::Expr::Binop(_, l, r) => {
+ push_inner(stack, r.into());
+ push_inner(stack, l.into())
+ }
+ ty::Expr::FunctionCall(func, args) => {
+ for a in args.iter().rev() {
+ push_inner(stack, a.into());
+ }
+ push_inner(stack, func.into());
+ }
+ ty::Expr::Cast(_, c, t) => {
+ push_inner(stack, t.into());
+ push_inner(stack, c.into());
+ }
+ },
+
ty::ConstKind::Unevaluated(ct) => {
stack.extend(ct.substs.iter().rev());
}
diff --git a/compiler/rustc_middle/src/values.rs b/compiler/rustc_middle/src/values.rs
index f4b4c3fb0..70b98e59a 100644
--- a/compiler/rustc_middle/src/values.rs
+++ b/compiler/rustc_middle/src/values.rs
@@ -32,13 +32,23 @@ impl<'tcx> Value<TyCtxt<'tcx>> for ty::SymbolName<'_> {
}
impl<'tcx> Value<TyCtxt<'tcx>> for ty::Binder<'_, ty::FnSig<'_>> {
- fn from_cycle_error(tcx: TyCtxt<'tcx>, _: &[QueryInfo]) -> Self {
+ fn from_cycle_error(tcx: TyCtxt<'tcx>, stack: &[QueryInfo]) -> Self {
let err = tcx.ty_error();
- // FIXME(compiler-errors): It would be nice if we could get the
- // query key, so we could at least generate a fn signature that
- // has the right arity.
+
+ let arity = if let Some(frame) = stack.get(0)
+ && frame.query.name == "fn_sig"
+ && let Some(def_id) = frame.query.def_id
+ && let Some(node) = tcx.hir().get_if_local(def_id)
+ && let Some(sig) = node.fn_sig()
+ {
+ sig.decl.inputs.len() + sig.decl.implicit_self.has_implicit_self() as usize
+ } else {
+ tcx.sess.abort_if_errors();
+ unreachable!()
+ };
+
let fn_sig = ty::Binder::dummy(tcx.mk_fn_sig(
- [].into_iter(),
+ std::iter::repeat(err).take(arity),
err,
false,
rustc_hir::Unsafety::Normal,
@@ -185,7 +195,8 @@ fn find_item_ty_spans(
});
if check_params && let Some(args) = path.segments.last().unwrap().args {
let params_in_repr = tcx.params_in_repr(def_id);
- for (i, arg) in args.args.iter().enumerate() {
+ // the domain size check is needed because the HIR may not be well-formed at this point
+ for (i, arg) in args.args.iter().enumerate().take(params_in_repr.domain_size()) {
if let hir::GenericArg::Type(ty) = arg && params_in_repr.contains(i as u32) {
find_item_ty_spans(tcx, ty, needle, spans, seen_representable);
}
diff --git a/compiler/rustc_mir_build/Cargo.toml b/compiler/rustc_mir_build/Cargo.toml
index c726fa3a3..2baa3bfcb 100644
--- a/compiler/rustc_mir_build/Cargo.toml
+++ b/compiler/rustc_mir_build/Cargo.toml
@@ -8,6 +8,7 @@ edition = "2021"
[dependencies]
rustc_arena = { path = "../rustc_arena" }
tracing = "0.1"
+either = "1"
rustc_middle = { path = "../rustc_middle" }
rustc_apfloat = { path = "../rustc_apfloat" }
rustc_attr = { path = "../rustc_attr" }
diff --git a/compiler/rustc_mir_build/src/build/block.rs b/compiler/rustc_mir_build/src/build/block.rs
index 183db56d7..49d7136a2 100644
--- a/compiler/rustc_mir_build/src/build/block.rs
+++ b/compiler/rustc_mir_build/src/build/block.rs
@@ -1,4 +1,3 @@
-use crate::build::matches::ArmHasGuard;
use crate::build::ForGuard::OutsideGuard;
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use rustc_middle::middle::region::Scope;
@@ -118,7 +117,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
else_block: Some(else_block),
} => {
// When lowering the statement `let <pat> = <expr> else { <else> };`,
- // the `<else>` block is nested in the parent scope enclosing this statment.
+ // the `<else>` block is nested in the parent scope enclosing this statement.
// That scope is usually either the enclosing block scope,
// or the remainder scope of the last statement.
// This is to make sure that temporaries instantiated in `<expr>` are dropped
@@ -231,7 +230,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
visibility_scope,
remainder_span,
pattern,
- ArmHasGuard(false),
+ None,
Some((None, initializer_span)),
);
this.visit_primary_bindings(
@@ -308,7 +307,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
visibility_scope,
remainder_span,
pattern,
- ArmHasGuard(false),
+ None,
Some((None, initializer_span)),
);
this.expr_into_pattern(block, &pattern, init)
@@ -324,7 +323,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
visibility_scope,
remainder_span,
pattern,
- ArmHasGuard(false),
+ None,
None,
);
block.unit()
diff --git a/compiler/rustc_mir_build/src/build/custom/mod.rs b/compiler/rustc_mir_build/src/build/custom/mod.rs
new file mode 100644
index 000000000..eb021f477
--- /dev/null
+++ b/compiler/rustc_mir_build/src/build/custom/mod.rs
@@ -0,0 +1,155 @@
+//! Provides the implementation of the `custom_mir` attribute.
+//!
+//! Up until MIR building, this attribute has absolutely no effect. The `mir!` macro is a normal
+//! decl macro that expands like any other, and the code goes through parsing, name resolution and
+//! type checking like all other code. In MIR building we finally detect whether this attribute is
+//! present, and if so we branch off into this module, which implements the attribute by
+//! implementing a custom lowering from THIR to MIR.
+//!
+//! The result of this lowering is returned "normally" from the `mir_built` query, with the only
+//! notable difference being that the `injected` field in the body is set. Various components of the
+//! MIR pipeline, like borrowck and the pass manager will then consult this field (via
+//! `body.should_skip()`) to skip the parts of the MIR pipeline that precede the MIR phase the user
+//! specified.
+//!
+//! This file defines the general framework for the custom parsing. The parsing for all the
+//! "top-level" constructs can be found in the `parse` submodule, while the parsing for statements,
+//! terminators, and everything below can be found in the `parse::instruction` submodule.
+//!
+
+use rustc_ast::Attribute;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::def_id::DefId;
+use rustc_index::vec::IndexVec;
+use rustc_middle::{
+ mir::*,
+ thir::*,
+ ty::{Ty, TyCtxt},
+};
+use rustc_span::Span;
+
+mod parse;
+
+pub(super) fn build_custom_mir<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ did: DefId,
+ thir: &Thir<'tcx>,
+ expr: ExprId,
+ params: &IndexVec<ParamId, Param<'tcx>>,
+ return_ty: Ty<'tcx>,
+ return_ty_span: Span,
+ span: Span,
+ attr: &Attribute,
+) -> Body<'tcx> {
+ let mut body = Body {
+ basic_blocks: BasicBlocks::new(IndexVec::new()),
+ source: MirSource::item(did),
+ phase: MirPhase::Built,
+ source_scopes: IndexVec::new(),
+ generator: None,
+ local_decls: LocalDecls::new(),
+ user_type_annotations: IndexVec::new(),
+ arg_count: params.len(),
+ spread_arg: None,
+ var_debug_info: Vec::new(),
+ span,
+ required_consts: Vec::new(),
+ is_polymorphic: false,
+ tainted_by_errors: None,
+ injection_phase: None,
+ pass_count: 0,
+ };
+
+ body.local_decls.push(LocalDecl::new(return_ty, return_ty_span));
+ body.basic_blocks_mut().push(BasicBlockData::new(None));
+ body.source_scopes.push(SourceScopeData {
+ span,
+ parent_scope: None,
+ inlined: None,
+ inlined_parent_scope: None,
+ local_data: ClearCrossCrate::Clear,
+ });
+ body.injection_phase = Some(parse_attribute(attr));
+
+ let mut pctxt = ParseCtxt {
+ tcx,
+ thir,
+ source_scope: OUTERMOST_SOURCE_SCOPE,
+ body: &mut body,
+ local_map: FxHashMap::default(),
+ block_map: FxHashMap::default(),
+ };
+
+ let res = (|| {
+ pctxt.parse_args(&params)?;
+ pctxt.parse_body(expr)
+ })();
+ if let Err(err) = res {
+ tcx.sess.diagnostic().span_fatal(
+ err.span,
+ format!("Could not parse {}, found: {:?}", err.expected, err.item_description),
+ )
+ }
+
+ body
+}
+
+fn parse_attribute(attr: &Attribute) -> MirPhase {
+ let meta_items = attr.meta_item_list().unwrap();
+ let mut dialect: Option<String> = None;
+ let mut phase: Option<String> = None;
+
+ for nested in meta_items {
+ let name = nested.name_or_empty();
+ let value = nested.value_str().unwrap().as_str().to_string();
+ match name.as_str() {
+ "dialect" => {
+ assert!(dialect.is_none());
+ dialect = Some(value);
+ }
+ "phase" => {
+ assert!(phase.is_none());
+ phase = Some(value);
+ }
+ other => {
+ panic!("Unexpected key {}", other);
+ }
+ }
+ }
+
+ let Some(dialect) = dialect else {
+ assert!(phase.is_none());
+ return MirPhase::Built;
+ };
+
+ MirPhase::parse(dialect, phase)
+}
+
+struct ParseCtxt<'tcx, 'body> {
+ tcx: TyCtxt<'tcx>,
+ thir: &'body Thir<'tcx>,
+ source_scope: SourceScope,
+
+ body: &'body mut Body<'tcx>,
+ local_map: FxHashMap<LocalVarId, Local>,
+ block_map: FxHashMap<LocalVarId, BasicBlock>,
+}
+
+struct ParseError {
+ span: Span,
+ item_description: String,
+ expected: String,
+}
+
+impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
+ fn expr_error(&self, expr: ExprId, expected: &'static str) -> ParseError {
+ let expr = &self.thir[expr];
+ ParseError {
+ span: expr.span,
+ item_description: format!("{:?}", expr.kind),
+ expected: expected.to_string(),
+ }
+ }
+}
+
+type PResult<T> = Result<T, ParseError>;
diff --git a/compiler/rustc_mir_build/src/build/custom/parse.rs b/compiler/rustc_mir_build/src/build/custom/parse.rs
new file mode 100644
index 000000000..d72770e70
--- /dev/null
+++ b/compiler/rustc_mir_build/src/build/custom/parse.rs
@@ -0,0 +1,256 @@
+use rustc_index::vec::IndexVec;
+use rustc_middle::{mir::*, thir::*, ty::Ty};
+use rustc_span::Span;
+
+use super::{PResult, ParseCtxt, ParseError};
+
+mod instruction;
+
+/// Helper macro for parsing custom MIR.
+///
+/// Example usage looks something like:
+/// ```rust,ignore (incomplete example)
+/// parse_by_kind!(
+/// self, // : &ParseCtxt
+/// expr_id, // what you're matching against
+/// "assignment", // the thing you're trying to parse
+/// @call("mir_assign", args) => { args[0] }, // match invocations of the `mir_assign` special function
+/// ExprKind::Assign { lhs, .. } => { lhs }, // match thir assignment expressions
+/// // no need for fallthrough case - reasonable error is automatically generated
+/// )
+/// ```
+macro_rules! parse_by_kind {
+ (
+ $self:ident,
+ $expr_id:expr,
+ $expr_name:pat,
+ $expected:literal,
+ $(
+ @call($name:literal, $args:ident) => $call_expr:expr,
+ )*
+ $(
+ $pat:pat => $expr:expr,
+ )*
+ ) => {{
+ let expr_id = $self.preparse($expr_id);
+ let expr = &$self.thir[expr_id];
+ debug!("Trying to parse {:?} as {}", expr.kind, $expected);
+ let $expr_name = expr;
+ match &expr.kind {
+ $(
+ ExprKind::Call { ty, fun: _, args: $args, .. } if {
+ match ty.kind() {
+ ty::FnDef(did, _) => {
+ $self.tcx.is_diagnostic_item(rustc_span::Symbol::intern($name), *did)
+ }
+ _ => false,
+ }
+ } => $call_expr,
+ )*
+ $(
+ $pat => $expr,
+ )*
+ #[allow(unreachable_patterns)]
+ _ => return Err($self.expr_error(expr_id, $expected))
+ }
+ }};
+}
+pub(crate) use parse_by_kind;
+
+impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
+ /// Expressions should only ever be matched on after preparsing them. This removes extra scopes
+ /// we don't care about.
+ fn preparse(&self, expr_id: ExprId) -> ExprId {
+ let expr = &self.thir[expr_id];
+ match expr.kind {
+ ExprKind::Scope { value, .. } => self.preparse(value),
+ _ => expr_id,
+ }
+ }
+
+ fn statement_as_expr(&self, stmt_id: StmtId) -> PResult<ExprId> {
+ match &self.thir[stmt_id].kind {
+ StmtKind::Expr { expr, .. } => Ok(*expr),
+ kind @ StmtKind::Let { pattern, .. } => {
+ return Err(ParseError {
+ span: pattern.span,
+ item_description: format!("{:?}", kind),
+ expected: "expression".to_string(),
+ });
+ }
+ }
+ }
+
+ pub fn parse_args(&mut self, params: &IndexVec<ParamId, Param<'tcx>>) -> PResult<()> {
+ for param in params.iter() {
+ let (var, span) = {
+ let pat = param.pat.as_ref().unwrap();
+ match &pat.kind {
+ PatKind::Binding { var, .. } => (*var, pat.span),
+ _ => {
+ return Err(ParseError {
+ span: pat.span,
+ item_description: format!("{:?}", pat.kind),
+ expected: "local".to_string(),
+ });
+ }
+ }
+ };
+ let decl = LocalDecl::new(param.ty, span);
+ let local = self.body.local_decls.push(decl);
+ self.local_map.insert(var, local);
+ }
+
+ Ok(())
+ }
+
+ /// Bodies are of the form:
+ ///
+ /// ```text
+ /// {
+ /// let bb1: BasicBlock;
+ /// let bb2: BasicBlock;
+ /// {
+ /// let RET: _;
+ /// let local1;
+ /// let local2;
+ ///
+ /// {
+ /// { // entry block
+ /// statement1;
+ /// terminator1
+ /// };
+ ///
+ /// bb1 = {
+ /// statement2;
+ /// terminator2
+ /// };
+ ///
+ /// bb2 = {
+ /// statement3;
+ /// terminator3
+ /// }
+ ///
+ /// RET
+ /// }
+ /// }
+ /// }
+ /// ```
+ ///
+ /// This allows us to easily parse the basic blocks declarations, local declarations, and
+ /// basic block definitions in order.
+ pub fn parse_body(&mut self, expr_id: ExprId) -> PResult<()> {
+ let body = parse_by_kind!(self, expr_id, _, "whole body",
+ ExprKind::Block { block } => self.thir[*block].expr.unwrap(),
+ );
+ let (block_decls, rest) = parse_by_kind!(self, body, _, "body with block decls",
+ ExprKind::Block { block } => {
+ let block = &self.thir[*block];
+ (&block.stmts, block.expr.unwrap())
+ },
+ );
+ self.parse_block_decls(block_decls.iter().copied())?;
+
+ let (local_decls, rest) = parse_by_kind!(self, rest, _, "body with local decls",
+ ExprKind::Block { block } => {
+ let block = &self.thir[*block];
+ (&block.stmts, block.expr.unwrap())
+ },
+ );
+ self.parse_local_decls(local_decls.iter().copied())?;
+
+ let block_defs = parse_by_kind!(self, rest, _, "body with block defs",
+ ExprKind::Block { block } => &self.thir[*block].stmts,
+ );
+ for (i, block_def) in block_defs.iter().enumerate() {
+ let block = self.parse_block_def(self.statement_as_expr(*block_def)?)?;
+ self.body.basic_blocks_mut()[BasicBlock::from_usize(i)] = block;
+ }
+
+ Ok(())
+ }
+
+ fn parse_block_decls(&mut self, stmts: impl Iterator<Item = StmtId>) -> PResult<()> {
+ for stmt in stmts {
+ let (var, _, _) = self.parse_let_statement(stmt)?;
+ let data = BasicBlockData::new(None);
+ let block = self.body.basic_blocks_mut().push(data);
+ self.block_map.insert(var, block);
+ }
+
+ Ok(())
+ }
+
+ fn parse_local_decls(&mut self, mut stmts: impl Iterator<Item = StmtId>) -> PResult<()> {
+ let (ret_var, ..) = self.parse_let_statement(stmts.next().unwrap())?;
+ self.local_map.insert(ret_var, Local::from_u32(0));
+
+ for stmt in stmts {
+ let (var, ty, span) = self.parse_let_statement(stmt)?;
+ let decl = LocalDecl::new(ty, span);
+ let local = self.body.local_decls.push(decl);
+ self.local_map.insert(var, local);
+ }
+
+ Ok(())
+ }
+
+ fn parse_let_statement(&mut self, stmt_id: StmtId) -> PResult<(LocalVarId, Ty<'tcx>, Span)> {
+ let pattern = match &self.thir[stmt_id].kind {
+ StmtKind::Let { pattern, .. } => pattern,
+ StmtKind::Expr { expr, .. } => {
+ return Err(self.expr_error(*expr, "let statement"));
+ }
+ };
+
+ self.parse_var(pattern)
+ }
+
+ fn parse_var(&mut self, mut pat: &Pat<'tcx>) -> PResult<(LocalVarId, Ty<'tcx>, Span)> {
+ // Make sure we throw out any `AscribeUserType` we find
+ loop {
+ match &pat.kind {
+ PatKind::Binding { var, ty, .. } => break Ok((*var, *ty, pat.span)),
+ PatKind::AscribeUserType { subpattern, .. } => {
+ pat = subpattern;
+ }
+ _ => {
+ break Err(ParseError {
+ span: pat.span,
+ item_description: format!("{:?}", pat.kind),
+ expected: "local".to_string(),
+ });
+ }
+ }
+ }
+ }
+
+ fn parse_block_def(&self, expr_id: ExprId) -> PResult<BasicBlockData<'tcx>> {
+ let block = parse_by_kind!(self, expr_id, _, "basic block",
+ ExprKind::Block { block } => &self.thir[*block],
+ );
+
+ let mut data = BasicBlockData::new(None);
+ for stmt_id in &*block.stmts {
+ let stmt = self.statement_as_expr(*stmt_id)?;
+ let span = self.thir[stmt].span;
+ let statement = self.parse_statement(stmt)?;
+ data.statements.push(Statement {
+ source_info: SourceInfo { span, scope: self.source_scope },
+ kind: statement,
+ });
+ }
+
+ let Some(trailing) = block.expr else {
+ return Err(self.expr_error(expr_id, "terminator"))
+ };
+ let span = self.thir[trailing].span;
+ let terminator = self.parse_terminator(trailing)?;
+ data.terminator = Some(Terminator {
+ source_info: SourceInfo { span, scope: self.source_scope },
+ kind: terminator,
+ });
+
+ Ok(data)
+ }
+}
diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
new file mode 100644
index 000000000..03206af33
--- /dev/null
+++ b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
@@ -0,0 +1,105 @@
+use rustc_middle::mir::interpret::{ConstValue, Scalar};
+use rustc_middle::{mir::*, thir::*, ty};
+
+use super::{parse_by_kind, PResult, ParseCtxt};
+
+impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
+ pub fn parse_statement(&self, expr_id: ExprId) -> PResult<StatementKind<'tcx>> {
+ parse_by_kind!(self, expr_id, _, "statement",
+ @call("mir_retag", args) => {
+ Ok(StatementKind::Retag(RetagKind::Default, Box::new(self.parse_place(args[0])?)))
+ },
+ @call("mir_retag_raw", args) => {
+ Ok(StatementKind::Retag(RetagKind::Raw, Box::new(self.parse_place(args[0])?)))
+ },
+ ExprKind::Assign { lhs, rhs } => {
+ let lhs = self.parse_place(*lhs)?;
+ let rhs = self.parse_rvalue(*rhs)?;
+ Ok(StatementKind::Assign(Box::new((lhs, rhs))))
+ },
+ )
+ }
+
+ pub fn parse_terminator(&self, expr_id: ExprId) -> PResult<TerminatorKind<'tcx>> {
+ parse_by_kind!(self, expr_id, _, "terminator",
+ @call("mir_return", _args) => {
+ Ok(TerminatorKind::Return)
+ },
+ @call("mir_goto", args) => {
+ Ok(TerminatorKind::Goto { target: self.parse_block(args[0])? } )
+ },
+ )
+ }
+
+ fn parse_rvalue(&self, expr_id: ExprId) -> PResult<Rvalue<'tcx>> {
+ parse_by_kind!(self, expr_id, _, "rvalue",
+ ExprKind::Borrow { borrow_kind, arg } => Ok(
+ Rvalue::Ref(self.tcx.lifetimes.re_erased, *borrow_kind, self.parse_place(*arg)?)
+ ),
+ ExprKind::AddressOf { mutability, arg } => Ok(
+ Rvalue::AddressOf(*mutability, self.parse_place(*arg)?)
+ ),
+ _ => self.parse_operand(expr_id).map(Rvalue::Use),
+ )
+ }
+
+ fn parse_operand(&self, expr_id: ExprId) -> PResult<Operand<'tcx>> {
+ parse_by_kind!(self, expr_id, expr, "operand",
+ @call("mir_move", args) => self.parse_place(args[0]).map(Operand::Move),
+ @call("mir_static", args) => self.parse_static(args[0]),
+ @call("mir_static_mut", args) => self.parse_static(args[0]),
+ ExprKind::Literal { .. }
+ | ExprKind::NamedConst { .. }
+ | ExprKind::NonHirLiteral { .. }
+ | ExprKind::ZstLiteral { .. }
+ | ExprKind::ConstParam { .. }
+ | ExprKind::ConstBlock { .. } => {
+ Ok(Operand::Constant(Box::new(
+ crate::build::expr::as_constant::as_constant_inner(expr, |_| None, self.tcx)
+ )))
+ },
+ _ => self.parse_place(expr_id).map(Operand::Copy),
+ )
+ }
+
+ fn parse_place(&self, expr_id: ExprId) -> PResult<Place<'tcx>> {
+ parse_by_kind!(self, expr_id, _, "place",
+ ExprKind::Deref { arg } => Ok(
+ self.parse_place(*arg)?.project_deeper(&[PlaceElem::Deref], self.tcx)
+ ),
+ _ => self.parse_local(expr_id).map(Place::from),
+ )
+ }
+
+ fn parse_local(&self, expr_id: ExprId) -> PResult<Local> {
+ parse_by_kind!(self, expr_id, _, "local",
+ ExprKind::VarRef { id } => Ok(self.local_map[id]),
+ )
+ }
+
+ fn parse_block(&self, expr_id: ExprId) -> PResult<BasicBlock> {
+ parse_by_kind!(self, expr_id, _, "basic block",
+ ExprKind::VarRef { id } => Ok(self.block_map[id]),
+ )
+ }
+
+ fn parse_static(&self, expr_id: ExprId) -> PResult<Operand<'tcx>> {
+ let expr_id = parse_by_kind!(self, expr_id, _, "static",
+ ExprKind::Deref { arg } => *arg,
+ );
+
+ parse_by_kind!(self, expr_id, expr, "static",
+ ExprKind::StaticRef { alloc_id, ty, .. } => {
+ let const_val =
+ ConstValue::Scalar(Scalar::from_pointer((*alloc_id).into(), &self.tcx));
+ let literal = ConstantKind::Val(const_val, *ty);
+
+ Ok(Operand::Constant(Box::new(Constant {
+ span: expr.span,
+ user_ty: None,
+ literal
+ })))
+ },
+ )
+ }
+}
diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/build/expr/as_constant.rs
index 37dc1ad9f..717c62315 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_constant.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_constant.rs
@@ -8,7 +8,10 @@ use rustc_middle::mir::interpret::{
};
use rustc_middle::mir::*;
use rustc_middle::thir::*;
-use rustc_middle::ty::{self, CanonicalUserTypeAnnotation, TyCtxt};
+use rustc_middle::ty::{
+ self, CanonicalUserType, CanonicalUserTypeAnnotation, TyCtxt, UserTypeAnnotationIndex,
+};
+use rustc_span::DUMMY_SP;
use rustc_target::abi::Size;
impl<'a, 'tcx> Builder<'a, 'tcx> {
@@ -18,83 +21,87 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let this = self;
let tcx = this.tcx;
let Expr { ty, temp_lifetime: _, span, ref kind } = *expr;
- match *kind {
+ match kind {
ExprKind::Scope { region_scope: _, lint_level: _, value } => {
- this.as_constant(&this.thir[value])
- }
- ExprKind::Literal { lit, neg } => {
- let literal =
- match lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg }) {
- Ok(c) => c,
- Err(LitToConstError::Reported) => ConstantKind::Ty(tcx.const_error(ty)),
- Err(LitToConstError::TypeError) => {
- bug!("encountered type error in `lit_to_mir_constant")
- }
- };
-
- Constant { span, user_ty: None, literal }
+ this.as_constant(&this.thir[*value])
}
- ExprKind::NonHirLiteral { lit, ref user_ty } => {
- let user_ty = user_ty.as_ref().map(|user_ty| {
- this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {
+ _ => as_constant_inner(
+ expr,
+ |user_ty| {
+ Some(this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {
span,
user_ty: user_ty.clone(),
inferred_ty: ty,
- })
- });
- let literal = ConstantKind::Val(ConstValue::Scalar(Scalar::Int(lit)), ty);
+ }))
+ },
+ tcx,
+ ),
+ }
+ }
+}
- Constant { span, user_ty: user_ty, literal }
- }
- ExprKind::ZstLiteral { ref user_ty } => {
- let user_ty = user_ty.as_ref().map(|user_ty| {
- this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {
- span,
- user_ty: user_ty.clone(),
- inferred_ty: ty,
- })
- });
- let literal = ConstantKind::Val(ConstValue::ZeroSized, ty);
+pub fn as_constant_inner<'tcx>(
+ expr: &Expr<'tcx>,
+ push_cuta: impl FnMut(&Box<CanonicalUserType<'tcx>>) -> Option<UserTypeAnnotationIndex>,
+ tcx: TyCtxt<'tcx>,
+) -> Constant<'tcx> {
+ let Expr { ty, temp_lifetime: _, span, ref kind } = *expr;
+ match *kind {
+ ExprKind::Literal { lit, neg } => {
+ let literal =
+ match lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg }) {
+ Ok(c) => c,
+ Err(LitToConstError::Reported(guar)) => {
+ ConstantKind::Ty(tcx.const_error_with_guaranteed(ty, guar))
+ }
+ Err(LitToConstError::TypeError) => {
+ bug!("encountered type error in `lit_to_mir_constant")
+ }
+ };
- Constant { span, user_ty: user_ty, literal }
- }
- ExprKind::NamedConst { def_id, substs, ref user_ty } => {
- let user_ty = user_ty.as_ref().map(|user_ty| {
- this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {
- span,
- user_ty: user_ty.clone(),
- inferred_ty: ty,
- })
- });
+ Constant { span, user_ty: None, literal }
+ }
+ ExprKind::NonHirLiteral { lit, ref user_ty } => {
+ let user_ty = user_ty.as_ref().map(push_cuta).flatten();
- let uneval =
- mir::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
- let literal = ConstantKind::Unevaluated(uneval, ty);
+ let literal = ConstantKind::Val(ConstValue::Scalar(Scalar::Int(lit)), ty);
- Constant { user_ty, span, literal }
- }
- ExprKind::ConstParam { param, def_id: _ } => {
- let const_param =
- tcx.mk_const(ty::ConstS { kind: ty::ConstKind::Param(param), ty: expr.ty });
- let literal = ConstantKind::Ty(const_param);
+ Constant { span, user_ty: user_ty, literal }
+ }
+ ExprKind::ZstLiteral { ref user_ty } => {
+ let user_ty = user_ty.as_ref().map(push_cuta).flatten();
- Constant { user_ty: None, span, literal }
- }
- ExprKind::ConstBlock { did: def_id, substs } => {
- let uneval =
- mir::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
- let literal = ConstantKind::Unevaluated(uneval, ty);
+ let literal = ConstantKind::Val(ConstValue::ZeroSized, ty);
- Constant { user_ty: None, span, literal }
- }
- ExprKind::StaticRef { alloc_id, ty, .. } => {
- let const_val = ConstValue::Scalar(Scalar::from_pointer(alloc_id.into(), &tcx));
- let literal = ConstantKind::Val(const_val, ty);
+ Constant { span, user_ty: user_ty, literal }
+ }
+ ExprKind::NamedConst { def_id, substs, ref user_ty } => {
+ let user_ty = user_ty.as_ref().map(push_cuta).flatten();
- Constant { span, user_ty: None, literal }
- }
- _ => span_bug!(span, "expression is not a valid constant {:?}", kind),
+ let uneval = mir::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
+ let literal = ConstantKind::Unevaluated(uneval, ty);
+
+ Constant { user_ty, span, literal }
}
+ ExprKind::ConstParam { param, def_id: _ } => {
+ let const_param = tcx.mk_const(ty::ConstKind::Param(param), expr.ty);
+ let literal = ConstantKind::Ty(const_param);
+
+ Constant { user_ty: None, span, literal }
+ }
+ ExprKind::ConstBlock { did: def_id, substs } => {
+ let uneval = mir::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
+ let literal = ConstantKind::Unevaluated(uneval, ty);
+
+ Constant { user_ty: None, span, literal }
+ }
+ ExprKind::StaticRef { alloc_id, ty, .. } => {
+ let const_val = ConstValue::Scalar(Scalar::from_pointer(alloc_id.into(), &tcx));
+ let literal = ConstantKind::Val(const_val, ty);
+
+ Constant { span, user_ty: None, literal }
+ }
+ _ => span_bug!(span, "expression is not a valid constant {:?}", kind),
}
}
@@ -106,7 +113,15 @@ pub(crate) fn lit_to_mir_constant<'tcx>(
let LitToConstInput { lit, ty, neg } = lit_input;
let trunc = |n| {
let param_ty = ty::ParamEnv::reveal_all().and(ty);
- let width = tcx.layout_of(param_ty).map_err(|_| LitToConstError::Reported)?.size;
+ let width = tcx
+ .layout_of(param_ty)
+ .map_err(|_| {
+ LitToConstError::Reported(tcx.sess.delay_span_bug(
+ DUMMY_SP,
+ format!("couldn't compute width of literal: {:?}", lit_input.lit),
+ ))
+ })?
+ .size;
trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
let result = width.truncate(n);
trace!("trunc result: {}", result);
@@ -137,12 +152,20 @@ pub(crate) fn lit_to_mir_constant<'tcx>(
(ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => {
trunc(if neg { (*n as i128).overflowing_neg().0 as u128 } else { *n })?
}
- (ast::LitKind::Float(n, _), ty::Float(fty)) => {
- parse_float_into_constval(*n, *fty, neg).ok_or(LitToConstError::Reported)?
- }
+ (ast::LitKind::Float(n, _), ty::Float(fty)) => parse_float_into_constval(*n, *fty, neg)
+ .ok_or_else(|| {
+ LitToConstError::Reported(tcx.sess.delay_span_bug(
+ DUMMY_SP,
+ format!("couldn't parse float literal: {:?}", lit_input.lit),
+ ))
+ })?,
(ast::LitKind::Bool(b), ty::Bool) => ConstValue::Scalar(Scalar::from_bool(*b)),
(ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)),
- (ast::LitKind::Err, _) => return Err(LitToConstError::Reported),
+ (ast::LitKind::Err, _) => {
+ return Err(LitToConstError::Reported(
+ tcx.sess.delay_span_bug(DUMMY_SP, "encountered LitKind::Err during mir build"),
+ ));
+ }
_ => return Err(LitToConstError::TypeError),
};
diff --git a/compiler/rustc_mir_build/src/build/expr/as_place.rs b/compiler/rustc_mir_build/src/build/expr/as_place.rs
index 396782d45..edd527286 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_place.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_place.rs
@@ -65,7 +65,7 @@ pub(crate) enum PlaceBase {
/// `PlaceBuilder` is used to create places during MIR construction. It allows you to "build up" a
/// place by pushing more and more projections onto the end, and then convert the final set into a
-/// place using the `into_place` method.
+/// place using the `to_place` method.
///
/// This is used internally when building a place for an expression like `a.b.c`. The fields `b`
/// and `c` can be progressively pushed onto the place builder that is created when converting `a`.
@@ -167,59 +167,54 @@ fn find_capture_matching_projections<'a, 'tcx>(
})
}
-/// Takes a PlaceBuilder and resolves the upvar (if any) within it, so that the
-/// `PlaceBuilder` now starts from `PlaceBase::Local`.
-///
-/// Returns a Result with the error being the PlaceBuilder (`from_builder`) that was not found.
+/// Takes an upvar place and tries to resolve it into a `PlaceBuilder`
+/// with `PlaceBase::Local`
#[instrument(level = "trace", skip(cx), ret)]
fn to_upvars_resolved_place_builder<'tcx>(
- from_builder: PlaceBuilder<'tcx>,
cx: &Builder<'_, 'tcx>,
-) -> Result<PlaceBuilder<'tcx>, PlaceBuilder<'tcx>> {
- match from_builder.base {
- PlaceBase::Local(_) => Ok(from_builder),
- PlaceBase::Upvar { var_hir_id, closure_def_id } => {
- let Some((capture_index, capture)) =
- find_capture_matching_projections(
- &cx.upvars,
- var_hir_id,
- &from_builder.projection,
- ) else {
- let closure_span = cx.tcx.def_span(closure_def_id);
- if !enable_precise_capture(cx.tcx, closure_span) {
- bug!(
- "No associated capture found for {:?}[{:#?}] even though \
- capture_disjoint_fields isn't enabled",
- var_hir_id,
- from_builder.projection
- )
- } else {
- debug!(
- "No associated capture found for {:?}[{:#?}]",
- var_hir_id, from_builder.projection,
- );
- }
- return Err(from_builder);
- };
+ var_hir_id: LocalVarId,
+ closure_def_id: LocalDefId,
+ projection: &[PlaceElem<'tcx>],
+) -> Option<PlaceBuilder<'tcx>> {
+ let Some((capture_index, capture)) =
+ find_capture_matching_projections(
+ &cx.upvars,
+ var_hir_id,
+ &projection,
+ ) else {
+ let closure_span = cx.tcx.def_span(closure_def_id);
+ if !enable_precise_capture(cx.tcx, closure_span) {
+ bug!(
+ "No associated capture found for {:?}[{:#?}] even though \
+ capture_disjoint_fields isn't enabled",
+ var_hir_id,
+ projection
+ )
+ } else {
+ debug!(
+ "No associated capture found for {:?}[{:#?}]",
+ var_hir_id, projection,
+ );
+ }
+ return None;
+ };
- // Access the capture by accessing the field within the Closure struct.
- let capture_info = &cx.upvars[capture_index];
+ // Access the capture by accessing the field within the Closure struct.
+ let capture_info = &cx.upvars[capture_index];
- let mut upvar_resolved_place_builder = PlaceBuilder::from(capture_info.use_place);
+ let mut upvar_resolved_place_builder = PlaceBuilder::from(capture_info.use_place);
- // We used some of the projections to build the capture itself,
- // now we apply the remaining to the upvar resolved place.
- trace!(?capture.captured_place, ?from_builder.projection);
- let remaining_projections = strip_prefix(
- capture.captured_place.place.base_ty,
- from_builder.projection,
- &capture.captured_place.place.projections,
- );
- upvar_resolved_place_builder.projection.extend(remaining_projections);
+ // We used some of the projections to build the capture itself,
+ // now we apply the remaining to the upvar resolved place.
+ trace!(?capture.captured_place, ?projection);
+ let remaining_projections = strip_prefix(
+ capture.captured_place.place.base_ty,
+ projection,
+ &capture.captured_place.place.projections,
+ );
+ upvar_resolved_place_builder.projection.extend(remaining_projections);
- Ok(upvar_resolved_place_builder)
- }
- }
+ Some(upvar_resolved_place_builder)
}
/// Returns projections remaining after stripping an initial prefix of HIR
@@ -228,13 +223,14 @@ fn to_upvars_resolved_place_builder<'tcx>(
/// Supports only HIR projection kinds that represent a path that might be
/// captured by a closure or a generator, i.e., an `Index` or a `Subslice`
/// projection kinds are unsupported.
-fn strip_prefix<'tcx>(
+fn strip_prefix<'a, 'tcx>(
mut base_ty: Ty<'tcx>,
- projections: Vec<PlaceElem<'tcx>>,
+ projections: &'a [PlaceElem<'tcx>],
prefix_projections: &[HirProjection<'tcx>],
-) -> impl Iterator<Item = PlaceElem<'tcx>> {
+) -> impl Iterator<Item = PlaceElem<'tcx>> + 'a {
let mut iter = projections
- .into_iter()
+ .iter()
+ .copied()
// Filter out opaque casts, they are unnecessary in the prefix.
.filter(|elem| !matches!(elem, ProjectionElem::OpaqueCast(..)));
for projection in prefix_projections {
@@ -258,21 +254,21 @@ fn strip_prefix<'tcx>(
}
impl<'tcx> PlaceBuilder<'tcx> {
- pub(in crate::build) fn into_place(self, cx: &Builder<'_, 'tcx>) -> Place<'tcx> {
- if let PlaceBase::Local(local) = self.base {
- Place { local, projection: cx.tcx.intern_place_elems(&self.projection) }
- } else {
- self.expect_upvars_resolved(cx).into_place(cx)
- }
+ pub(in crate::build) fn to_place(&self, cx: &Builder<'_, 'tcx>) -> Place<'tcx> {
+ self.try_to_place(cx).unwrap()
}
- fn expect_upvars_resolved(self, cx: &Builder<'_, 'tcx>) -> PlaceBuilder<'tcx> {
- to_upvars_resolved_place_builder(self, cx).unwrap()
+ /// Creates a `Place` or returns `None` if an upvar cannot be resolved
+ pub(in crate::build) fn try_to_place(&self, cx: &Builder<'_, 'tcx>) -> Option<Place<'tcx>> {
+ let resolved = self.resolve_upvar(cx);
+ let builder = resolved.as_ref().unwrap_or(self);
+ let PlaceBase::Local(local) = builder.base else { return None };
+ let projection = cx.tcx.intern_place_elems(&builder.projection);
+ Some(Place { local, projection })
}
/// Attempts to resolve the `PlaceBuilder`.
- /// On success, it will return the resolved `PlaceBuilder`.
- /// On failure, it will return itself.
+ /// Returns `None` if this is not an upvar.
///
/// Upvars resolve may fail for a `PlaceBuilder` when attempting to
/// resolve a disjoint field whose root variable is not captured
@@ -281,11 +277,14 @@ impl<'tcx> PlaceBuilder<'tcx> {
/// not captured. This can happen because the final mir that will be
/// generated doesn't require a read for this place. Failures will only
/// happen inside closures.
- pub(in crate::build) fn try_upvars_resolved(
- self,
+ pub(in crate::build) fn resolve_upvar(
+ &self,
cx: &Builder<'_, 'tcx>,
- ) -> Result<PlaceBuilder<'tcx>, PlaceBuilder<'tcx>> {
- to_upvars_resolved_place_builder(self, cx)
+ ) -> Option<PlaceBuilder<'tcx>> {
+ let PlaceBase::Upvar { var_hir_id, closure_def_id } = self.base else {
+ return None;
+ };
+ to_upvars_resolved_place_builder(cx, var_hir_id, closure_def_id, &self.projection)
}
pub(crate) fn base(&self) -> PlaceBase {
@@ -316,6 +315,14 @@ impl<'tcx> PlaceBuilder<'tcx> {
self.projection.push(elem);
self
}
+
+ /// Same as `.clone().project(..)` but more efficient
+ pub(crate) fn clone_project(&self, elem: PlaceElem<'tcx>) -> Self {
+ Self {
+ base: self.base,
+ projection: Vec::from_iter(self.projection.iter().copied().chain([elem])),
+ }
+ }
}
impl<'tcx> From<Local> for PlaceBuilder<'tcx> {
@@ -355,7 +362,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr: &Expr<'tcx>,
) -> BlockAnd<Place<'tcx>> {
let place_builder = unpack!(block = self.as_place_builder(block, expr));
- block.and(place_builder.into_place(self))
+ block.and(place_builder.to_place(self))
}
/// This is used when constructing a compound `Place`, so that we can avoid creating
@@ -379,7 +386,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
expr: &Expr<'tcx>,
) -> BlockAnd<Place<'tcx>> {
let place_builder = unpack!(block = self.as_read_only_place_builder(block, expr));
- block.and(place_builder.into_place(self))
+ block.and(place_builder.to_place(self))
}
/// This is used when constructing a compound `Place`, so that we can avoid creating
@@ -474,7 +481,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
inferred_ty: expr.ty,
});
- let place = place_builder.clone().into_place(this);
+ let place = place_builder.to_place(this);
this.cfg.push(
block,
Statement {
@@ -599,7 +606,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let is_outermost_index = fake_borrow_temps.is_none();
let fake_borrow_temps = fake_borrow_temps.unwrap_or(base_fake_borrow_temps);
- let mut base_place =
+ let base_place =
unpack!(block = self.expr_as_place(block, base, mutability, Some(fake_borrow_temps),));
// Making this a *fresh* temporary means we do not have to worry about
@@ -607,14 +614,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// The "retagging" transformation (for Stacked Borrows) relies on this.
let idx = unpack!(block = self.as_temp(block, temp_lifetime, index, Mutability::Not,));
- block = self.bounds_check(block, base_place.clone(), idx, expr_span, source_info);
+ block = self.bounds_check(block, &base_place, idx, expr_span, source_info);
if is_outermost_index {
self.read_fake_borrows(block, fake_borrow_temps, source_info)
} else {
- base_place = base_place.expect_upvars_resolved(self);
self.add_fake_borrows_of_base(
- &base_place,
+ base_place.to_place(self),
block,
fake_borrow_temps,
expr_span,
@@ -628,7 +634,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn bounds_check(
&mut self,
block: BasicBlock,
- slice: PlaceBuilder<'tcx>,
+ slice: &PlaceBuilder<'tcx>,
index: Local,
expr_span: Span,
source_info: SourceInfo,
@@ -640,7 +646,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let lt = self.temp(bool_ty, expr_span);
// len = len(slice)
- self.cfg.push_assign(block, source_info, len, Rvalue::Len(slice.into_place(self)));
+ self.cfg.push_assign(block, source_info, len, Rvalue::Len(slice.to_place(self)));
// lt = idx < len
self.cfg.push_assign(
block,
@@ -658,19 +664,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn add_fake_borrows_of_base(
&mut self,
- base_place: &PlaceBuilder<'tcx>,
+ base_place: Place<'tcx>,
block: BasicBlock,
fake_borrow_temps: &mut Vec<Local>,
expr_span: Span,
source_info: SourceInfo,
) {
let tcx = self.tcx;
- let local = match base_place.base {
- PlaceBase::Local(local) => local,
- PlaceBase::Upvar { .. } => bug!("Expected PlacseBase::Local found Upvar"),
- };
- let place_ty = Place::ty_from(local, &base_place.projection, &self.local_decls, tcx);
+ let place_ty = base_place.ty(&self.local_decls, tcx);
if let ty::Slice(_) = place_ty.ty.kind() {
// We need to create fake borrows to ensure that the bounds
// check that we just did stays valid. Since we can't assign to
@@ -680,7 +682,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
match elem {
ProjectionElem::Deref => {
let fake_borrow_deref_ty = Place::ty_from(
- local,
+ base_place.local,
&base_place.projection[..idx],
&self.local_decls,
tcx,
@@ -698,14 +700,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Rvalue::Ref(
tcx.lifetimes.re_erased,
BorrowKind::Shallow,
- Place { local, projection },
+ Place { local: base_place.local, projection },
),
);
fake_borrow_temps.push(fake_borrow_temp);
}
ProjectionElem::Index(_) => {
let index_ty = Place::ty_from(
- local,
+ base_place.local,
&base_place.projection[..idx],
&self.local_decls,
tcx,
diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
index 3dafdcb78..0814793f2 100644
--- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
+++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
@@ -2,6 +2,7 @@
use rustc_index::vec::Idx;
use rustc_middle::ty::util::IntTypeExt;
+use rustc_target::abi::{Abi, Primitive};
use crate::build::expr::as_place::PlaceBase;
use crate::build::expr::category::{Category, RvalueFunc};
@@ -198,6 +199,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind() && adt_def.is_enum() {
let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx);
let temp = unpack!(block = this.as_temp(block, scope, source, Mutability::Not));
+ let layout = this.tcx.layout_of(this.param_env.and(source.ty));
let discr = this.temp(discr_ty, source.span);
this.cfg.push_assign(
block,
@@ -205,8 +207,55 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
discr,
Rvalue::Discriminant(temp.into()),
);
+ let (op,ty) = (Operand::Move(discr), discr_ty);
+
+ if let Abi::Scalar(scalar) = layout.unwrap().abi{
+ if let Primitive::Int(_, signed) = scalar.primitive() {
+ let range = scalar.valid_range(&this.tcx);
+ // FIXME: Handle wraparound cases too.
+ if range.end >= range.start {
+ let mut assumer = |range: u128, bin_op: BinOp| {
+ // We will be overwriting this val if our scalar is signed value
+ // because sign extension on unsigned types might cause unintended things
+ let mut range_val =
+ ConstantKind::from_bits(this.tcx, range, ty::ParamEnv::empty().and(discr_ty));
+ let bool_ty = this.tcx.types.bool;
+ if signed {
+ let scalar_size_extend = scalar.size(&this.tcx).sign_extend(range);
+ let discr_layout = this.tcx.layout_of(this.param_env.and(discr_ty));
+ let truncated_val = discr_layout.unwrap().size.truncate(scalar_size_extend);
+ range_val = ConstantKind::from_bits(
+ this.tcx,
+ truncated_val,
+ ty::ParamEnv::empty().and(discr_ty),
+ );
+ }
+ let lit_op = this.literal_operand(expr.span, range_val);
+ let is_bin_op = this.temp(bool_ty, expr_span);
+ this.cfg.push_assign(
+ block,
+ source_info,
+ is_bin_op,
+ Rvalue::BinaryOp(bin_op, Box::new(((lit_op), (Operand::Copy(discr))))),
+ );
+ this.cfg.push(
+ block,
+ Statement {
+ source_info,
+ kind: StatementKind::Intrinsic(Box::new(NonDivergingIntrinsic::Assume(
+ Operand::Copy(is_bin_op),
+ ))),
+ },
+ )
+ };
+ assumer(range.end, BinOp::Ge);
+ assumer(range.start, BinOp::Le);
+ }
+ }
+ }
+
+ (op,ty)
- (Operand::Move(discr), discr_ty)
} else {
let ty = source.ty;
let source = unpack!(
@@ -320,8 +369,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let place_builder =
unpack!(block = this.as_place_builder(block, &this.thir[*thir_place]));
- if let Ok(place_builder_resolved) = place_builder.try_upvars_resolved(this) {
- let mir_place = place_builder_resolved.into_place(this);
+ if let Some(mir_place) = place_builder.try_to_place(this) {
this.cfg.push_fake_read(
block,
this.source_info(this.tcx.hir().span(*hir_id)),
@@ -612,7 +660,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// by the parent itself. The mutability of the current capture
// is same as that of the capture in the parent closure.
PlaceBase::Upvar { .. } => {
- let enclosing_upvars_resolved = arg_place_builder.clone().into_place(this);
+ let enclosing_upvars_resolved = arg_place_builder.to_place(this);
match enclosing_upvars_resolved.as_ref() {
PlaceRef {
@@ -649,7 +697,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
};
- let arg_place = arg_place_builder.into_place(this);
+ let arg_place = arg_place_builder.to_place(this);
this.cfg.push_assign(
block,
diff --git a/compiler/rustc_mir_build/src/build/expr/category.rs b/compiler/rustc_mir_build/src/build/expr/category.rs
index a4386319d..d33401f07 100644
--- a/compiler/rustc_mir_build/src/build/expr/category.rs
+++ b/compiler/rustc_mir_build/src/build/expr/category.rs
@@ -2,35 +2,35 @@ use rustc_middle::thir::*;
#[derive(Debug, PartialEq)]
pub(crate) enum Category {
- // An assignable memory location like `x`, `x.f`, `foo()[3]`, that
- // sort of thing. Something that could appear on the LHS of an `=`
- // sign.
+ /// An assignable memory location like `x`, `x.f`, `foo()[3]`, that
+ /// sort of thing. Something that could appear on the LHS of an `=`
+ /// sign.
Place,
- // A literal like `23` or `"foo"`. Does not include constant
- // expressions like `3 + 5`.
+ /// A literal like `23` or `"foo"`. Does not include constant
+ /// expressions like `3 + 5`.
Constant,
- // Something that generates a new value at runtime, like `x + y`
- // or `foo()`.
+ /// Something that generates a new value at runtime, like `x + y`
+ /// or `foo()`.
Rvalue(RvalueFunc),
}
-// Rvalues fall into different "styles" that will determine which fn
-// is best suited to generate them.
+/// Rvalues fall into different "styles" that will determine which fn
+/// is best suited to generate them.
#[derive(Debug, PartialEq)]
pub(crate) enum RvalueFunc {
- // Best generated by `into`. This is generally exprs that
- // cause branching, like `match`, but also includes calls.
+ /// Best generated by `into`. This is generally exprs that
+ /// cause branching, like `match`, but also includes calls.
Into,
- // Best generated by `as_rvalue`. This is usually the case.
+ /// Best generated by `as_rvalue`. This is usually the case.
AsRvalue,
}
-/// Determines the category for a given expression. Note that scope
-/// and paren expressions have no category.
impl Category {
+ /// Determines the category for a given expression. Note that scope
+ /// and paren expressions have no category.
pub(crate) fn of(ek: &ExprKind<'_>) -> Option<Category> {
match *ek {
ExprKind::Scope { .. } => None,
diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs
index 24ecd0a53..218a26e62 100644
--- a/compiler/rustc_mir_build/src/build/expr/into.rs
+++ b/compiler/rustc_mir_build/src/build/expr/into.rs
@@ -108,7 +108,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ExprKind::Let { expr, ref pat } => {
let scope = this.local_scope();
let (true_block, false_block) = this.in_if_then_scope(scope, expr_span, |this| {
- this.lower_let_expr(block, &this.thir[expr], pat, scope, None, expr_span)
+ this.lower_let_expr(block, &this.thir[expr], pat, scope, None, expr_span, true)
});
this.cfg.push_assign_constant(
@@ -271,15 +271,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// MIR checks and ultimately whether code is accepted or not. We can only
// omit the return edge if a return type is visibly uninhabited to a module
// that makes the call.
- target: if this.tcx.is_ty_uninhabited_from(
- this.parent_module,
- expr.ty,
- this.param_env,
- ) {
- None
- } else {
- Some(success)
- },
+ target: expr
+ .ty
+ .is_inhabited_from(this.tcx, this.parent_module, this.param_env)
+ .then_some(success),
from_hir_call,
fn_span,
},
@@ -363,10 +358,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
.map(|(n, ty)| match fields_map.get(&n) {
Some(v) => v.clone(),
None => {
- let place_builder = place_builder.clone();
- this.consume_by_copy_or_move(
- place_builder.field(n, *ty).into_place(this),
- )
+ let place = place_builder.clone_project(PlaceElem::Field(n, *ty));
+ this.consume_by_copy_or_move(place.to_place(this))
}
})
.collect()
diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs
index 3f813e0af..691cbee2c 100644
--- a/compiler/rustc_mir_build/src/build/matches/mod.rs
+++ b/compiler/rustc_mir_build/src/build/matches/mod.rs
@@ -84,6 +84,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
break_scope,
Some(variable_source_info.scope),
variable_source_info.span,
+ true,
),
_ => {
let temp_scope = temp_scope_override.unwrap_or_else(|| this.local_scope());
@@ -168,7 +169,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let scrutinee_place =
unpack!(block = self.lower_scrutinee(block, scrutinee, scrutinee_span,));
- let mut arm_candidates = self.create_match_candidates(scrutinee_place.clone(), &arms);
+ let mut arm_candidates = self.create_match_candidates(&scrutinee_place, &arms);
let match_has_guard = arm_candidates.iter().any(|(_, candidate)| candidate.has_guard);
let mut candidates =
@@ -220,8 +221,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let cause_matched_place = FakeReadCause::ForMatchedPlace(None);
let source_info = self.source_info(scrutinee_span);
- if let Ok(scrutinee_builder) = scrutinee_place_builder.clone().try_upvars_resolved(self) {
- let scrutinee_place = scrutinee_builder.into_place(self);
+ if let Some(scrutinee_place) = scrutinee_place_builder.try_to_place(self) {
self.cfg.push_fake_read(block, source_info, cause_matched_place, scrutinee_place);
}
@@ -231,7 +231,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Create the initial `Candidate`s for a `match` expression.
fn create_match_candidates<'pat>(
&mut self,
- scrutinee: PlaceBuilder<'tcx>,
+ scrutinee: &PlaceBuilder<'tcx>,
arms: &'pat [ArmId],
) -> Vec<(&'pat Arm<'tcx>, Candidate<'pat, 'tcx>)>
where
@@ -334,7 +334,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let arm_scope = (arm.scope, arm_source_info);
let match_scope = self.local_scope();
self.in_scope(arm_scope, arm.lint_level, |this| {
- // `try_upvars_resolved` may fail if it is unable to resolve the given
+ // `try_to_place` may fail if it is unable to resolve the given
// `PlaceBuilder` inside a closure. In this case, we don't want to include
// a scrutinee place. `scrutinee_place_builder` will fail to be resolved
// if the only match arm is a wildcard (`_`).
@@ -345,31 +345,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// match foo { _ => () };
// };
// ```
- let mut opt_scrutinee_place: Option<(Option<&Place<'tcx>>, Span)> = None;
- let scrutinee_place: Place<'tcx>;
- if let Ok(scrutinee_builder) =
- scrutinee_place_builder.clone().try_upvars_resolved(this)
- {
- scrutinee_place = scrutinee_builder.into_place(this);
- opt_scrutinee_place = Some((Some(&scrutinee_place), scrutinee_span));
- }
+ let scrutinee_place = scrutinee_place_builder.try_to_place(this);
+ let opt_scrutinee_place =
+ scrutinee_place.as_ref().map(|place| (Some(place), scrutinee_span));
let scope = this.declare_bindings(
None,
arm.span,
&arm.pattern,
- ArmHasGuard(arm.guard.is_some()),
+ arm.guard.as_ref(),
opt_scrutinee_place,
);
let arm_block = this.bind_pattern(
outer_source_info,
candidate,
- arm.guard.as_ref(),
&fake_borrow_temps,
scrutinee_span,
- Some(arm.span),
- Some(arm.scope),
- Some(match_scope),
+ Some((arm, match_scope)),
false,
);
@@ -410,12 +402,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
&mut self,
outer_source_info: SourceInfo,
candidate: Candidate<'_, 'tcx>,
- guard: Option<&Guard<'tcx>>,
fake_borrow_temps: &[(Place<'tcx>, Local)],
scrutinee_span: Span,
- arm_span: Option<Span>,
- arm_scope: Option<region::Scope>,
- match_scope: Option<region::Scope>,
+ arm_match_scope: Option<(&Arm<'tcx>, region::Scope)>,
storages_alive: bool,
) -> BasicBlock {
if candidate.subcandidates.is_empty() {
@@ -424,11 +413,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.bind_and_guard_matched_candidate(
candidate,
&[],
- guard,
fake_borrow_temps,
scrutinee_span,
- arm_span,
- match_scope,
+ arm_match_scope,
true,
storages_alive,
)
@@ -449,6 +436,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// we lower the guard.
let target_block = self.cfg.start_new_block();
let mut schedule_drops = true;
+ let arm = arm_match_scope.unzip().0;
// We keep a stack of all of the bindings and type ascriptions
// from the parent candidates that we visit, that also need to
// be bound for each candidate.
@@ -456,21 +444,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
candidate,
&mut Vec::new(),
&mut |leaf_candidate, parent_bindings| {
- if let Some(arm_scope) = arm_scope {
- self.clear_top_scope(arm_scope);
+ if let Some(arm) = arm {
+ self.clear_top_scope(arm.scope);
}
let binding_end = self.bind_and_guard_matched_candidate(
leaf_candidate,
parent_bindings,
- guard,
&fake_borrow_temps,
scrutinee_span,
- arm_span,
- match_scope,
+ arm_match_scope,
schedule_drops,
storages_alive,
);
- if arm_scope.is_none() {
+ if arm.is_none() {
schedule_drops = false;
}
self.cfg.goto(binding_end, outer_source_info, target_block);
@@ -600,7 +586,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
while let Some(next) = {
for binding in &candidate_ref.bindings {
let local = self.var_local_id(binding.var_id, OutsideGuard);
- // `try_upvars_resolved` may fail if it is unable to resolve the given
+ // `try_to_place` may fail if it is unable to resolve the given
// `PlaceBuilder` inside a closure. In this case, we don't want to include
// a scrutinee place. `scrutinee_place_builder` will fail for destructured
// assignments. This is because a closure only captures the precise places
@@ -614,9 +600,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// let (v1, v2) = foo;
// };
// ```
- if let Ok(match_pair_resolved) = initializer.clone().try_upvars_resolved(self) {
- let place = match_pair_resolved.into_place(self);
-
+ if let Some(place) = initializer.try_to_place(self) {
let Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var(
VarBindingForm { opt_match_place: Some((ref mut match_place, _)), .. },
)))) = self.local_decls[local].local_info else {
@@ -636,12 +620,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.bind_pattern(
self.source_info(irrefutable_pat.span),
candidate,
- None,
&fake_borrow_temps,
irrefutable_pat.span,
None,
- None,
- None,
false,
)
.unit()
@@ -657,7 +638,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
mut visibility_scope: Option<SourceScope>,
scope_span: Span,
pattern: &Pat<'tcx>,
- has_guard: ArmHasGuard,
+ guard: Option<&Guard<'tcx>>,
opt_match_place: Option<(Option<&Place<'tcx>>, Span)>,
) -> Option<SourceScope> {
self.visit_primary_bindings(
@@ -679,12 +660,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
var,
ty,
user_ty,
- has_guard,
+ ArmHasGuard(guard.is_some()),
opt_match_place.map(|(x, y)| (x.cloned(), y)),
pattern.span,
);
},
);
+ if let Some(Guard::IfLet(guard_pat, _)) = guard {
+ // FIXME: pass a proper `opt_match_place`
+ self.declare_bindings(visibility_scope, scope_span, guard_pat, None, None);
+ }
visibility_scope
}
@@ -1352,7 +1337,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
bug!("Or-patterns should have been sorted to the end");
};
let or_span = match_pair.pattern.span;
- let place = match_pair.place;
first_candidate.visit_leaves(|leaf_candidate| {
self.test_or_pattern(
@@ -1360,7 +1344,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
&mut otherwise,
pats,
or_span,
- place.clone(),
+ &match_pair.place,
fake_borrows,
);
});
@@ -1388,7 +1372,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
otherwise: &mut Option<BasicBlock>,
pats: &'pat [Box<Pat<'tcx>>],
or_span: Span,
- place: PlaceBuilder<'tcx>,
+ place: &PlaceBuilder<'tcx>,
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
) {
debug!("candidate={:#?}\npats={:#?}", candidate, pats);
@@ -1606,10 +1590,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
// Insert a Shallow borrow of any places that is switched on.
- if let Some(fb) = fake_borrows && let Ok(match_place_resolved) =
- match_place.clone().try_upvars_resolved(self)
+ if let Some(fb) = fake_borrows
+ && let Some(resolved_place) = match_place.try_to_place(self)
{
- let resolved_place = match_place_resolved.into_place(self);
fb.insert(resolved_place);
}
@@ -1628,7 +1611,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// encounter a candidate where the test is not relevant; at
// that point, we stop sorting.
while let Some(candidate) = candidates.first_mut() {
- let Some(idx) = self.sort_candidate(&match_place.clone(), &test, candidate) else {
+ let Some(idx) = self.sort_candidate(&match_place, &test, candidate) else {
break;
};
let (candidate, rest) = candidates.split_first_mut().unwrap();
@@ -1697,7 +1680,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
target_blocks
};
- self.perform_test(span, scrutinee_span, block, match_place, &test, make_target_blocks);
+ self.perform_test(span, scrutinee_span, block, &match_place, &test, make_target_blocks);
}
/// Determine the fake borrows that are needed from a set of places that
@@ -1778,6 +1761,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Pat binding - used for `let` and function parameters as well.
impl<'a, 'tcx> Builder<'a, 'tcx> {
+ /// If the bindings have already been declared, set `declare_bindings` to
+ /// `false` to avoid duplicated bindings declaration. Used for if-let guards.
pub(crate) fn lower_let_expr(
&mut self,
mut block: BasicBlock,
@@ -1786,6 +1771,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
else_target: region::Scope,
source_scope: Option<SourceScope>,
span: Span,
+ declare_bindings: bool,
) -> BlockAnd<()> {
let expr_span = expr.span;
let expr_place_builder = unpack!(block = self.lower_scrutinee(block, expr, expr_span));
@@ -1800,32 +1786,21 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
false,
&mut [&mut guard_candidate, &mut otherwise_candidate],
);
- let mut opt_expr_place: Option<(Option<&Place<'tcx>>, Span)> = None;
- let expr_place: Place<'tcx>;
- if let Ok(expr_builder) = expr_place_builder.try_upvars_resolved(self) {
- expr_place = expr_builder.into_place(self);
- opt_expr_place = Some((Some(&expr_place), expr_span));
- }
+ let expr_place = expr_place_builder.try_to_place(self);
+ let opt_expr_place = expr_place.as_ref().map(|place| (Some(place), expr_span));
let otherwise_post_guard_block = otherwise_candidate.pre_binding_block.unwrap();
self.break_for_else(otherwise_post_guard_block, else_target, self.source_info(expr_span));
- self.declare_bindings(
- source_scope,
- pat.span.to(span),
- pat,
- ArmHasGuard(false),
- opt_expr_place,
- );
+ if declare_bindings {
+ self.declare_bindings(source_scope, pat.span.to(span), pat, None, opt_expr_place);
+ }
let post_guard_block = self.bind_pattern(
self.source_info(pat.span),
guard_candidate,
- None,
&fake_borrow_temps,
expr.span,
None,
- None,
- None,
false,
);
@@ -1844,11 +1819,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
&mut self,
candidate: Candidate<'pat, 'tcx>,
parent_bindings: &[(Vec<Binding<'tcx>>, Vec<Ascription<'tcx>>)],
- guard: Option<&Guard<'tcx>>,
fake_borrows: &[(Place<'tcx>, Local)],
scrutinee_span: Span,
- arm_span: Option<Span>,
- match_scope: Option<region::Scope>,
+ arm_match_scope: Option<(&Arm<'tcx>, region::Scope)>,
schedule_drops: bool,
storages_alive: bool,
) -> BasicBlock {
@@ -1960,7 +1933,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// the reference that we create for the arm.
// * So we eagerly create the reference for the arm and then take a
// reference to that.
- if let Some(guard) = guard {
+ if let Some((arm, match_scope)) = arm_match_scope
+ && let Some(guard) = &arm.guard
+ {
let tcx = self.tcx;
let bindings = parent_bindings
.iter()
@@ -1981,8 +1956,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.cfg.push_assign(block, scrutinee_source_info, Place::from(temp), borrow);
}
- let arm_span = arm_span.unwrap();
- let match_scope = match_scope.unwrap();
let mut guard_span = rustc_span::DUMMY_SP;
let (post_guard_block, otherwise_post_guard_block) =
@@ -1995,13 +1968,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
e,
None,
match_scope,
- this.source_info(arm_span),
+ this.source_info(arm.span),
)
}
Guard::IfLet(ref pat, scrutinee) => {
let s = &this.thir[scrutinee];
guard_span = s.span;
- this.lower_let_expr(block, s, pat, match_scope, None, arm_span)
+ this.lower_let_expr(block, s, pat, match_scope, None, arm.span, false)
}
});
@@ -2317,24 +2290,18 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let matching = this.bind_pattern(
this.source_info(pattern.span),
candidate,
- None,
&fake_borrow_temps,
initializer_span,
None,
- None,
- None,
true,
);
// This block is for the failure case
let failure = this.bind_pattern(
this.source_info(else_block_span),
wildcard,
- None,
&fake_borrow_temps,
initializer_span,
None,
- None,
- None,
true,
);
this.break_for_else(failure, *let_else_scope, this.source_info(initializer_span));
diff --git a/compiler/rustc_mir_build/src/build/matches/simplify.rs b/compiler/rustc_mir_build/src/build/matches/simplify.rs
index 924d2f555..f6b1955fd 100644
--- a/compiler/rustc_mir_build/src/build/matches/simplify.rs
+++ b/compiler/rustc_mir_build/src/build/matches/simplify.rs
@@ -73,8 +73,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
{
existing_bindings.extend_from_slice(&new_bindings);
mem::swap(&mut candidate.bindings, &mut existing_bindings);
- candidate.subcandidates =
- self.create_or_subcandidates(candidate, place.clone(), pats);
+ candidate.subcandidates = self.create_or_subcandidates(candidate, &place, pats);
return true;
}
@@ -127,7 +126,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn create_or_subcandidates<'pat>(
&mut self,
candidate: &Candidate<'pat, 'tcx>,
- place: PlaceBuilder<'tcx>,
+ place: &PlaceBuilder<'tcx>,
pats: &'pat [Box<Pat<'tcx>>],
) -> Vec<Candidate<'pat, 'tcx>> {
pats.iter()
@@ -156,10 +155,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ascription: thir::Ascription { ref annotation, variance },
} => {
// Apply the type ascription to the value at `match_pair.place`, which is the
- if let Ok(place_resolved) = match_pair.place.clone().try_upvars_resolved(self) {
+ if let Some(source) = match_pair.place.try_to_place(self) {
candidate.ascriptions.push(Ascription {
annotation: annotation.clone(),
- source: place_resolved.into_place(self),
+ source,
variance,
});
}
@@ -183,10 +182,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ref subpattern,
is_primary: _,
} => {
- if let Ok(place_resolved) = match_pair.place.clone().try_upvars_resolved(self) {
+ if let Some(source) = match_pair.place.try_to_place(self) {
candidate.bindings.push(Binding {
span: match_pair.pattern.span,
- source: place_resolved.into_place(self),
+ source,
var_id: var,
binding_mode: mode,
});
@@ -264,10 +263,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let irrefutable = adt_def.variants().iter_enumerated().all(|(i, v)| {
i == variant_index || {
self.tcx.features().exhaustive_patterns
- && v.inhabited_predicate(self.tcx, adt_def)
+ && !v
+ .inhabited_predicate(self.tcx, adt_def)
.subst(self.tcx, substs)
- .apply_any_module(self.tcx, self.param_env)
- != Some(true)
+ .apply_ignore_module(self.tcx, self.param_env)
}
}) && (adt_def.did().is_local()
|| !adt_def.is_variant_list_non_exhaustive());
diff --git a/compiler/rustc_mir_build/src/build/matches/test.rs b/compiler/rustc_mir_build/src/build/matches/test.rs
index b597ecfaa..58513bde2 100644
--- a/compiler/rustc_mir_build/src/build/matches/test.rs
+++ b/compiler/rustc_mir_build/src/build/matches/test.rs
@@ -150,11 +150,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
match_start_span: Span,
scrutinee_span: Span,
block: BasicBlock,
- place_builder: PlaceBuilder<'tcx>,
+ place_builder: &PlaceBuilder<'tcx>,
test: &Test<'tcx>,
make_target_blocks: impl FnOnce(&mut Self) -> Vec<BasicBlock>,
) {
- let place = place_builder.into_place(self);
+ let place = place_builder.to_place(self);
let place_ty = place.ty(&self.local_decls, self.tcx);
debug!(?place, ?place_ty,);
@@ -240,6 +240,39 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
TestKind::Eq { value, ty } => {
+ let tcx = self.tcx;
+ if let ty::Adt(def, _) = ty.kind() && Some(def.did()) == tcx.lang_items().string() {
+ if !tcx.features().string_deref_patterns {
+ bug!("matching on `String` went through without enabling string_deref_patterns");
+ }
+ let re_erased = tcx.lifetimes.re_erased;
+ let ref_string = self.temp(tcx.mk_imm_ref(re_erased, ty), test.span);
+ let ref_str_ty = tcx.mk_imm_ref(re_erased, tcx.types.str_);
+ let ref_str = self.temp(ref_str_ty, test.span);
+ let deref = tcx.require_lang_item(LangItem::Deref, None);
+ let method = trait_method(tcx, deref, sym::deref, [ty]);
+ let eq_block = self.cfg.start_new_block();
+ self.cfg.push_assign(block, source_info, ref_string, Rvalue::Ref(re_erased, BorrowKind::Shared, place));
+ self.cfg.terminate(
+ block,
+ source_info,
+ TerminatorKind::Call {
+ func: Operand::Constant(Box::new(Constant {
+ span: test.span,
+ user_ty: None,
+ literal: method,
+ })),
+ args: vec![Operand::Move(ref_string)],
+ destination: ref_str,
+ target: Some(eq_block),
+ cleanup: None,
+ from_hir_call: false,
+ fn_span: source_info.span
+ }
+ );
+ self.non_scalar_compare(eq_block, make_target_blocks, source_info, value, ref_str, ref_str_ty);
+ return;
+ }
if !ty.is_scalar() {
// Use `PartialEq::eq` instead of `BinOp::Eq`
// (the binop can only handle primitives)
@@ -411,8 +444,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
bug!("non_scalar_compare called on non-reference type: {}", ty);
};
- let eq_def_id = self.tcx.require_lang_item(LangItem::PartialEq, None);
- let method = trait_method(self.tcx, eq_def_id, sym::eq, deref_ty, &[deref_ty.into()]);
+ let eq_def_id = self.tcx.require_lang_item(LangItem::PartialEq, Some(source_info.span));
+ let method = trait_method(self.tcx, eq_def_id, sym::eq, [deref_ty, deref_ty]);
let bool_ty = self.tcx.types.bool;
let eq_result = self.temp(bool_ty, source_info.span);
@@ -727,7 +760,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let downcast_place = match_pair.place.downcast(adt_def, variant_index); // `(x as Variant)`
let consequent_match_pairs = subpatterns.iter().map(|subpattern| {
// e.g., `(x as Variant).0`
- let place = downcast_place.clone().field(subpattern.field, subpattern.pattern.ty);
+ let place = downcast_place
+ .clone_project(PlaceElem::Field(subpattern.field, subpattern.pattern.ty));
// e.g., `(x as Variant).0 @ P1`
MatchPair::new(place, &subpattern.pattern, self)
});
@@ -804,10 +838,9 @@ fn trait_method<'tcx>(
tcx: TyCtxt<'tcx>,
trait_def_id: DefId,
method_name: Symbol,
- self_ty: Ty<'tcx>,
- params: &[GenericArg<'tcx>],
+ substs: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
) -> ConstantKind<'tcx> {
- let substs = tcx.mk_substs_trait(self_ty, params);
+ let substs = tcx.mk_substs(substs.into_iter().map(Into::into));
// The unhygienic comparison here is acceptable because this is only
// used on known traits.
@@ -817,8 +850,7 @@ fn trait_method<'tcx>(
.find(|item| item.kind == ty::AssocKind::Fn)
.expect("trait method not found");
- let method_ty = tcx.bound_type_of(item.def_id);
- let method_ty = method_ty.subst(tcx, substs);
+ let method_ty = tcx.mk_fn_def(item.def_id, substs);
ConstantKind::zero_sized(method_ty)
}
diff --git a/compiler/rustc_mir_build/src/build/matches/util.rs b/compiler/rustc_mir_build/src/build/matches/util.rs
index b854ba47f..bd435f9ab 100644
--- a/compiler/rustc_mir_build/src/build/matches/util.rs
+++ b/compiler/rustc_mir_build/src/build/matches/util.rs
@@ -18,7 +18,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
subpatterns
.iter()
.map(|fieldpat| {
- let place = place.clone().field(fieldpat.field, fieldpat.pattern.ty);
+ let place =
+ place.clone_project(PlaceElem::Field(fieldpat.field, fieldpat.pattern.ty));
MatchPair::new(place, &fieldpat.pattern, self)
})
.collect()
@@ -33,26 +34,24 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
suffix: &'pat [Box<Pat<'tcx>>],
) {
let tcx = self.tcx;
- let (min_length, exact_size) =
- if let Ok(place_resolved) = place.clone().try_upvars_resolved(self) {
- match place_resolved.into_place(self).ty(&self.local_decls, tcx).ty.kind() {
- ty::Array(_, length) => (length.eval_usize(tcx, self.param_env), true),
- _ => ((prefix.len() + suffix.len()).try_into().unwrap(), false),
- }
- } else {
- ((prefix.len() + suffix.len()).try_into().unwrap(), false)
- };
+ let (min_length, exact_size) = if let Some(place_resolved) = place.try_to_place(self) {
+ match place_resolved.ty(&self.local_decls, tcx).ty.kind() {
+ ty::Array(_, length) => (length.eval_usize(tcx, self.param_env), true),
+ _ => ((prefix.len() + suffix.len()).try_into().unwrap(), false),
+ }
+ } else {
+ ((prefix.len() + suffix.len()).try_into().unwrap(), false)
+ };
match_pairs.extend(prefix.iter().enumerate().map(|(idx, subpattern)| {
let elem =
ProjectionElem::ConstantIndex { offset: idx as u64, min_length, from_end: false };
- let place = place.clone().project(elem);
- MatchPair::new(place, subpattern, self)
+ MatchPair::new(place.clone_project(elem), subpattern, self)
}));
if let Some(subslice_pat) = opt_slice {
let suffix_len = suffix.len() as u64;
- let subslice = place.clone().project(ProjectionElem::Subslice {
+ let subslice = place.clone_project(PlaceElem::Subslice {
from: prefix.len() as u64,
to: if exact_size { min_length - suffix_len } else { suffix_len },
from_end: !exact_size,
@@ -67,7 +66,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
min_length,
from_end: !exact_size,
};
- let place = place.clone().project(elem);
+ let place = place.clone_project(elem);
MatchPair::new(place, subpattern, self)
}));
}
@@ -97,15 +96,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
pub(in crate::build) fn new(
- place: PlaceBuilder<'tcx>,
+ mut place: PlaceBuilder<'tcx>,
pattern: &'pat Pat<'tcx>,
cx: &Builder<'_, 'tcx>,
) -> MatchPair<'pat, 'tcx> {
// Force the place type to the pattern's type.
// FIXME(oli-obk): can we use this to simplify slice/array pattern hacks?
- let mut place = match place.try_upvars_resolved(cx) {
- Ok(val) | Err(val) => val,
- };
+ if let Some(resolved) = place.resolve_upvar(cx) {
+ place = resolved;
+ }
// Only add the OpaqueCast projection if the given place is an opaque type and the
// expected type from the pattern is not.
diff --git a/compiler/rustc_mir_build/src/build/misc.rs b/compiler/rustc_mir_build/src/build/misc.rs
index 86f466ff7..baeb2718c 100644
--- a/compiler/rustc_mir_build/src/build/misc.rs
+++ b/compiler/rustc_mir_build/src/build/misc.rs
@@ -34,8 +34,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Operand::Constant(constant)
}
- // Returns a zero literal operand for the appropriate type, works for
- // bool, char and integers.
+ /// Returns a zero literal operand for the appropriate type, works for
+ /// bool, char and integers.
pub(crate) fn zero_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
let literal = ConstantKind::from_bits(self.tcx, 0, ty::ParamEnv::empty().and(ty));
diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs
index cbcf9cd12..007f3b55e 100644
--- a/compiler/rustc_mir_build/src/build/mod.rs
+++ b/compiler/rustc_mir_build/src/build/mod.rs
@@ -481,6 +481,22 @@ fn construct_fn<'tcx>(
(None, fn_sig.output())
};
+ if let Some(custom_mir_attr) =
+ tcx.hir().attrs(fn_id).iter().find(|attr| attr.name_or_empty() == sym::custom_mir)
+ {
+ return custom::build_custom_mir(
+ tcx,
+ fn_def.did.to_def_id(),
+ thir,
+ expr,
+ arguments,
+ return_ty,
+ return_ty_span,
+ span_with_body,
+ custom_mir_attr,
+ );
+ }
+
let infcx = tcx.infer_ctxt().build();
let mut builder = Builder::new(
thir,
@@ -908,7 +924,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
scope,
expr.span,
&pat,
- matches::ArmHasGuard(false),
+ None,
Some((Some(&place), span)),
);
let place_builder = PlaceBuilder::from(local);
@@ -932,20 +948,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
original_source_scope: SourceScope,
pattern_span: Span,
) {
- let tcx = self.tcx;
- let current_root = tcx.maybe_lint_level_root_bounded(arg_hir_id, self.hir_id);
- let parent_root = tcx.maybe_lint_level_root_bounded(
- self.source_scopes[original_source_scope]
- .local_data
- .as_ref()
- .assert_crate_local()
- .lint_root,
- self.hir_id,
- );
- if current_root != parent_root {
- self.source_scope =
- self.new_source_scope(pattern_span, LintLevel::Explicit(current_root), None);
- }
+ let parent_id = self.source_scopes[original_source_scope]
+ .local_data
+ .as_ref()
+ .assert_crate_local()
+ .lint_root;
+ self.maybe_new_source_scope(pattern_span, None, arg_hir_id, parent_id);
}
fn get_unit_temp(&mut self) -> Place<'tcx> {
@@ -1033,6 +1041,7 @@ pub(crate) fn parse_float_into_scalar(
mod block;
mod cfg;
+mod custom;
mod expr;
mod matches;
mod misc;
diff --git a/compiler/rustc_mir_build/src/build/scope.rs b/compiler/rustc_mir_build/src/build/scope.rs
index 3cebd5ebe..33f49ffda 100644
--- a/compiler/rustc_mir_build/src/build/scope.rs
+++ b/compiler/rustc_mir_build/src/build/scope.rs
@@ -85,6 +85,7 @@ use std::mem;
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
use rustc_data_structures::fx::FxHashMap;
+use rustc_hir::HirId;
use rustc_index::vec::IndexVec;
use rustc_middle::middle::region;
use rustc_middle::mir::*;
@@ -443,8 +444,9 @@ impl<'tcx> Scopes<'tcx> {
impl<'a, 'tcx> Builder<'a, 'tcx> {
// Adding and removing scopes
// ==========================
- // Start a breakable scope, which tracks where `continue`, `break` and
- // `return` should branch to.
+
+ /// Start a breakable scope, which tracks where `continue`, `break` and
+ /// `return` should branch to.
pub(crate) fn in_breakable_scope<F>(
&mut self,
loop_block: Option<BasicBlock>,
@@ -566,25 +568,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
{
let source_scope = self.source_scope;
- let tcx = self.tcx;
if let LintLevel::Explicit(current_hir_id) = lint_level {
- // Use `maybe_lint_level_root_bounded` with `root_lint_level` as a bound
- // to avoid adding Hir dependencies on our parents.
- // We estimate the true lint roots here to avoid creating a lot of source scopes.
-
- let parent_root = tcx.maybe_lint_level_root_bounded(
- self.source_scopes[source_scope].local_data.as_ref().assert_crate_local().lint_root,
- self.hir_id,
- );
- let current_root = tcx.maybe_lint_level_root_bounded(current_hir_id, self.hir_id);
-
- if parent_root != current_root {
- self.source_scope = self.new_source_scope(
- region_scope.1.span,
- LintLevel::Explicit(current_root),
- None,
- );
- }
+ let parent_id =
+ self.source_scopes[source_scope].local_data.as_ref().assert_crate_local().lint_root;
+ self.maybe_new_source_scope(region_scope.1.span, None, current_hir_id, parent_id);
}
self.push_scope(region_scope);
let mut block;
@@ -757,6 +744,40 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
))
}
+ /// Possibly creates a new source scope if `current_root` and `parent_root`
+ /// are different, or if -Zmaximal-hir-to-mir-coverage is enabled.
+ pub(crate) fn maybe_new_source_scope(
+ &mut self,
+ span: Span,
+ safety: Option<Safety>,
+ current_id: HirId,
+ parent_id: HirId,
+ ) {
+ let (current_root, parent_root) =
+ if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
+ // Some consumers of rustc need to map MIR locations back to HIR nodes. Currently the
+ // the only part of rustc that tracks MIR -> HIR is the `SourceScopeLocalData::lint_root`
+ // field that tracks lint levels for MIR locations. Normally the number of source scopes
+ // is limited to the set of nodes with lint annotations. The -Zmaximal-hir-to-mir-coverage
+ // flag changes this behavior to maximize the number of source scopes, increasing the
+ // granularity of the MIR->HIR mapping.
+ (current_id, parent_id)
+ } else {
+ // Use `maybe_lint_level_root_bounded` with `self.hir_id` as a bound
+ // to avoid adding Hir dependencies on our parents.
+ // We estimate the true lint roots here to avoid creating a lot of source scopes.
+ (
+ self.tcx.maybe_lint_level_root_bounded(current_id, self.hir_id),
+ self.tcx.maybe_lint_level_root_bounded(parent_id, self.hir_id),
+ )
+ };
+
+ if current_root != parent_root {
+ let lint_level = LintLevel::Explicit(current_root);
+ self.source_scope = self.new_source_scope(span, lint_level, safety);
+ }
+ }
+
/// Creates a new source scope, nested in the current one.
pub(crate) fn new_source_scope(
&mut self,
@@ -799,6 +820,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Finding scopes
// ==============
+
/// Returns the scope that we should use as the lifetime of an
/// operand. Basically, an operand must live until it is consumed.
/// This is similar to, but not quite the same as, the temporary
@@ -824,6 +846,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Scheduling drops
// ================
+
pub(crate) fn schedule_drop_storage_and_value(
&mut self,
span: Span,
@@ -996,6 +1019,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Other
// =====
+
/// Returns the [DropIdx] for the innermost drop if the function unwound at
/// this point. The `DropIdx` will be created if it doesn't already exist.
fn diverge_cleanup(&mut self) -> DropIdx {
diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs
index b53bd3d07..879752945 100644
--- a/compiler/rustc_mir_build/src/lib.rs
+++ b/compiler/rustc_mir_build/src/lib.rs
@@ -3,6 +3,7 @@
//! This crate also contains the match exhaustiveness and usefulness checking.
#![allow(rustc::potential_query_instability)]
#![feature(assert_matches)]
+#![feature(associated_type_bounds)]
#![feature(box_patterns)]
#![feature(control_flow_enum)]
#![feature(if_let_guard)]
diff --git a/compiler/rustc_mir_build/src/thir/constant.rs b/compiler/rustc_mir_build/src/thir/constant.rs
index f626571b5..a9ed945d4 100644
--- a/compiler/rustc_mir_build/src/thir/constant.rs
+++ b/compiler/rustc_mir_build/src/thir/constant.rs
@@ -1,6 +1,7 @@
use rustc_ast as ast;
use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
use rustc_middle::ty::{self, ParamEnv, ScalarInt, TyCtxt};
+use rustc_span::DUMMY_SP;
pub(crate) fn lit_to_const<'tcx>(
tcx: TyCtxt<'tcx>,
@@ -10,7 +11,15 @@ pub(crate) fn lit_to_const<'tcx>(
let trunc = |n| {
let param_ty = ParamEnv::reveal_all().and(ty);
- let width = tcx.layout_of(param_ty).map_err(|_| LitToConstError::Reported)?.size;
+ let width = tcx
+ .layout_of(param_ty)
+ .map_err(|_| {
+ LitToConstError::Reported(tcx.sess.delay_span_bug(
+ DUMMY_SP,
+ format!("couldn't compute width of literal: {:?}", lit_input.lit),
+ ))
+ })?
+ .size;
trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
let result = width.truncate(n);
trace!("trunc result: {}", result);
@@ -44,9 +53,13 @@ pub(crate) fn lit_to_const<'tcx>(
}
(ast::LitKind::Bool(b), ty::Bool) => ty::ValTree::from_scalar_int((*b).into()),
(ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()),
- (ast::LitKind::Err, _) => return Err(LitToConstError::Reported),
+ (ast::LitKind::Err, _) => {
+ return Err(LitToConstError::Reported(
+ tcx.sess.delay_span_bug(DUMMY_SP, "encountered LitKind::Err during mir build"),
+ ));
+ }
_ => return Err(LitToConstError::TypeError),
};
- Ok(ty::Const::from_value(tcx, valtree, ty))
+ Ok(tcx.mk_const(valtree, ty))
}
diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs
index c7a7c3e3f..261b95ba9 100644
--- a/compiler/rustc_mir_build/src/thir/cx/expr.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs
@@ -14,11 +14,10 @@ use rustc_middle::thir::*;
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, PointerCast,
};
-use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
+use rustc_middle::ty::subst::InternalSubsts;
use rustc_middle::ty::{
self, AdtKind, InlineConstSubsts, InlineConstSubstsParts, ScalarInt, Ty, UpvarSubsts, UserType,
};
-use rustc_span::def_id::DefId;
use rustc_span::Span;
use rustc_target::abi::VariantIdx;
@@ -51,11 +50,17 @@ impl<'tcx> Cx<'tcx> {
trace!(?expr.ty);
// Now apply adjustments, if any.
- for adjustment in self.typeck_results.expr_adjustments(hir_expr) {
- trace!(?expr, ?adjustment);
- let span = expr.span;
- expr =
- self.apply_adjustment(hir_expr, expr, adjustment, adjustment_span.unwrap_or(span));
+ if self.apply_adjustments {
+ for adjustment in self.typeck_results.expr_adjustments(hir_expr) {
+ trace!(?expr, ?adjustment);
+ let span = expr.span;
+ expr = self.apply_adjustment(
+ hir_expr,
+ expr,
+ adjustment,
+ adjustment_span.unwrap_or(span),
+ );
+ }
}
trace!(?expr.ty, "after adjustments");
@@ -481,7 +486,7 @@ impl<'tcx> Cx<'tcx> {
substs,
user_ty,
fields: self.field_refs(fields),
- base: base.as_ref().map(|base| FruInfo {
+ base: base.map(|base| FruInfo {
base: self.mirror_expr(base),
field_types: self.typeck_results().fru_field_types()[expr.hir_id]
.iter()
@@ -584,7 +589,7 @@ impl<'tcx> Cx<'tcx> {
InlineAsmOperand::Out {
reg,
late,
- expr: expr.as_ref().map(|expr| self.mirror_expr(expr)),
+ expr: expr.map(|expr| self.mirror_expr(expr)),
}
}
hir::InlineAsmOperand::InOut { reg, late, ref expr } => {
@@ -599,27 +604,25 @@ impl<'tcx> Cx<'tcx> {
reg,
late,
in_expr: self.mirror_expr(in_expr),
- out_expr: out_expr.as_ref().map(|expr| self.mirror_expr(expr)),
+ out_expr: out_expr.map(|expr| self.mirror_expr(expr)),
},
hir::InlineAsmOperand::Const { ref anon_const } => {
- let anon_const_def_id = tcx.hir().local_def_id(anon_const.hir_id);
let value = mir::ConstantKind::from_anon_const(
tcx,
- anon_const_def_id,
+ anon_const.def_id,
self.param_env,
);
- let span = tcx.hir().span(anon_const.hir_id);
+ let span = tcx.def_span(anon_const.def_id);
InlineAsmOperand::Const { value, span }
}
hir::InlineAsmOperand::SymFn { ref anon_const } => {
- let anon_const_def_id = tcx.hir().local_def_id(anon_const.hir_id);
let value = mir::ConstantKind::from_anon_const(
tcx,
- anon_const_def_id,
+ anon_const.def_id,
self.param_env,
);
- let span = tcx.hir().span(anon_const.hir_id);
+ let span = tcx.def_span(anon_const.def_id);
InlineAsmOperand::SymFn { value, span }
}
@@ -634,7 +637,7 @@ impl<'tcx> Cx<'tcx> {
hir::ExprKind::ConstBlock(ref anon_const) => {
let ty = self.typeck_results().node_type(anon_const.hir_id);
- let did = tcx.hir().local_def_id(anon_const.hir_id).to_def_id();
+ let did = anon_const.def_id.to_def_id();
let typeck_root_def_id = tcx.typeck_root_def_id(did);
let parent_substs =
tcx.erase_regions(InternalSubsts::identity_for_item(tcx, typeck_root_def_id));
@@ -653,13 +656,11 @@ impl<'tcx> Cx<'tcx> {
ExprKind::Repeat { value: self.mirror_expr(v), count: *count }
}
- hir::ExprKind::Ret(ref v) => {
- ExprKind::Return { value: v.as_ref().map(|v| self.mirror_expr(v)) }
- }
+ hir::ExprKind::Ret(ref v) => ExprKind::Return { value: v.map(|v| self.mirror_expr(v)) },
hir::ExprKind::Break(dest, ref value) => match dest.target_id {
Ok(target_id) => ExprKind::Break {
label: region::Scope { id: target_id.local_id, data: region::ScopeData::Node },
- value: value.as_ref().map(|value| self.mirror_expr(value)),
+ value: value.map(|value| self.mirror_expr(value)),
},
Err(err) => bug!("invalid loop id for break: {}", err),
},
@@ -703,7 +704,7 @@ impl<'tcx> Cx<'tcx> {
hir::ExprKind::Field(ref source, ..) => ExprKind::Field {
lhs: self.mirror_expr(source),
variant_index: VariantIdx::new(0),
- name: Field::new(tcx.field_index(expr.hir_id, self.typeck_results)),
+ name: Field::new(self.typeck_results.field_index(expr.hir_id)),
},
hir::ExprKind::Cast(ref source, ref cast_ty) => {
// Check for a user-given type annotation on this `cast`
@@ -802,12 +803,12 @@ impl<'tcx> Cx<'tcx> {
&mut self,
expr: &hir::Expr<'_>,
span: Span,
- overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
+ overloaded_callee: Option<Ty<'tcx>>,
) -> Expr<'tcx> {
let temp_lifetime =
self.rvalue_scopes.temporary_scope(self.region_scope_tree, expr.hir_id.local_id);
- let (def_id, substs, user_ty) = match overloaded_callee {
- Some((def_id, substs)) => (def_id, substs, None),
+ let (ty, user_ty) = match overloaded_callee {
+ Some(fn_def) => (fn_def, None),
None => {
let (kind, def_id) =
self.typeck_results().type_dependent_def(expr.hir_id).unwrap_or_else(|| {
@@ -815,10 +816,12 @@ impl<'tcx> Cx<'tcx> {
});
let user_ty = self.user_substs_applied_to_res(expr.hir_id, Res::Def(kind, def_id));
debug!("method_callee: user_ty={:?}", user_ty);
- (def_id, self.typeck_results().node_substs(expr.hir_id), user_ty)
+ (
+ self.tcx().mk_fn_def(def_id, self.typeck_results().node_substs(expr.hir_id)),
+ user_ty,
+ )
}
};
- let ty = self.tcx().mk_fn_def(def_id, substs);
Expr { temp_lifetime, ty, span, kind: ExprKind::ZstLiteral { user_ty } }
}
@@ -853,9 +856,7 @@ impl<'tcx> Cx<'tcx> {
Res::Def(DefKind::ConstParam, def_id) => {
let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
- let item_id = self.tcx.hir().get_parent_node(hir_id);
- let item_def_id = self.tcx.hir().local_def_id(item_id);
- let generics = self.tcx.generics_of(item_def_id);
+ let generics = self.tcx.generics_of(hir_id.owner);
let index = generics.param_def_id_to_index[&def_id];
let name = self.tcx.hir().name(hir_id);
let param = ty::ParamConst::new(index, name);
@@ -955,7 +956,7 @@ impl<'tcx> Cx<'tcx> {
&mut self,
expr: &'tcx hir::Expr<'tcx>,
place_ty: Ty<'tcx>,
- overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
+ overloaded_callee: Option<Ty<'tcx>>,
args: Box<[ExprId]>,
span: Span,
) -> ExprKind<'tcx> {
@@ -1078,7 +1079,7 @@ impl<'tcx> Cx<'tcx> {
fields
.iter()
.map(|field| FieldExpr {
- name: Field::new(self.tcx.field_index(field.hir_id, self.typeck_results)),
+ name: Field::new(self.typeck_results.field_index(field.hir_id)),
expr: self.mirror_expr(field.expr),
})
.collect()
diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs
index 1d95d6b53..b5c4b7b13 100644
--- a/compiler/rustc_mir_build/src/thir/cx/mod.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs
@@ -80,6 +80,9 @@ struct Cx<'tcx> {
/// for the receiver.
adjustment_span: Option<(HirId, Span)>,
+ /// False to indicate that adjustments should not be applied. Only used for `custom_mir`
+ apply_adjustments: bool,
+
/// The `DefId` of the owner of this body.
body_owner: DefId,
}
@@ -87,6 +90,8 @@ struct Cx<'tcx> {
impl<'tcx> Cx<'tcx> {
fn new(tcx: TyCtxt<'tcx>, def: ty::WithOptConstParam<LocalDefId>) -> Cx<'tcx> {
let typeck_results = tcx.typeck_opt_const_arg(def);
+ let did = def.did;
+ let hir = tcx.hir();
Cx {
tcx,
thir: Thir::new(),
@@ -94,8 +99,12 @@ impl<'tcx> Cx<'tcx> {
region_scope_tree: tcx.region_scope_tree(def.did),
typeck_results,
rvalue_scopes: &typeck_results.rvalue_scopes,
- body_owner: def.did.to_def_id(),
+ body_owner: did.to_def_id(),
adjustment_span: None,
+ apply_adjustments: hir
+ .attrs(hir.local_def_id_to_hir_id(did))
+ .iter()
+ .all(|attr| attr.name_or_empty() != rustc_span::sym::custom_mir),
}
}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
index 858129c74..e369dba55 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
@@ -79,7 +79,10 @@ impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, '_, 'tcx> {
intravisit::walk_local(self, loc);
let els = loc.els;
if let Some(init) = loc.init && els.is_some() {
- self.check_let(&loc.pat, init, loc.span);
+ // Build a span without the else { ... } as we don't want to underline
+ // the entire else block in the IDE setting.
+ let span = loc.span.with_hi(init.span.hi());
+ self.check_let(&loc.pat, init, span);
}
let (msg, sp) = match loc.source {
@@ -507,7 +510,7 @@ impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
_ => "aren't",
},
),
- " else { todo!() }".to_string(),
+ " else { todo!() }",
Applicability::HasPlaceholders,
);
}
@@ -561,7 +564,7 @@ fn check_for_bindings_named_same_as_variants(
&& let ty::Adt(edef, _) = pat_ty.kind()
&& edef.is_enum()
&& edef.variants().iter().any(|variant| {
- variant.ident(cx.tcx) == ident && variant.ctor_kind == CtorKind::Const
+ variant.ident(cx.tcx) == ident && variant.ctor_kind() == Some(CtorKind::Const)
})
{
let variant_count = edef.variants().len();
@@ -630,11 +633,6 @@ fn irrefutable_let_patterns(
count: usize,
span: Span,
) {
- let span = match source {
- LetSource::LetElse(span) => span,
- _ => span,
- };
-
macro_rules! emit_diag {
(
$lint:expr,
@@ -680,7 +678,7 @@ fn irrefutable_let_patterns(
"removing the guard and adding a `let` inside the match arm"
);
}
- LetSource::LetElse(..) => {
+ LetSource::LetElse => {
emit_diag!(
lint,
"`let...else`",
@@ -820,7 +818,7 @@ fn non_exhaustive_match<'p, 'tcx>(
}
}
if let ty::Ref(_, sub_ty, _) = scrut_ty.kind() {
- if cx.tcx.is_ty_uninhabited_from(cx.module, *sub_ty, cx.param_env) {
+ if !sub_ty.is_inhabited_from(cx.tcx, cx.module, cx.param_env) {
err.note("references are always considered inhabited");
}
}
@@ -1046,11 +1044,19 @@ fn check_borrow_conflicts_in_at_patterns(cx: &MatchVisitor<'_, '_, '_>, pat: &Pa
name,
typeck_results.node_type(pat.hir_id),
);
- sess.struct_span_err(pat.span, "borrow of moved value")
- .span_label(binding_span, format!("value moved into `{}` here", name))
+ let mut err = sess.struct_span_err(pat.span, "borrow of moved value");
+ err.span_label(binding_span, format!("value moved into `{}` here", name))
.span_label(binding_span, occurs_because)
- .span_labels(conflicts_ref, "value borrowed here after move")
- .emit();
+ .span_labels(conflicts_ref, "value borrowed here after move");
+ if pat.span.contains(binding_span) {
+ err.span_suggestion_verbose(
+ binding_span.shrink_to_lo(),
+ "borrow this binding in the pattern to avoid moving the value",
+ "ref ".to_string(),
+ Applicability::MachineApplicable,
+ );
+ }
+ err.emit();
}
return;
}
@@ -1127,7 +1133,7 @@ pub enum LetSource {
GenericLet,
IfLet,
IfLetGuard,
- LetElse(Span),
+ LetElse,
WhileLet,
}
@@ -1156,8 +1162,8 @@ fn let_source_parent(tcx: TyCtxt<'_>, parent: HirId, pat_id: Option<HirId>) -> L
let parent_parent = hir.get_parent_node(parent);
let parent_parent_node = hir.get(parent_parent);
match parent_parent_node {
- hir::Node::Stmt(hir::Stmt { kind: hir::StmtKind::Local(_), span, .. }) => {
- return LetSource::LetElse(*span);
+ hir::Node::Stmt(hir::Stmt { kind: hir::StmtKind::Local(_), .. }) => {
+ return LetSource::LetElse;
}
hir::Node::Arm(hir::Arm { guard: Some(hir::Guard::If(_)), .. }) => {
return LetSource::IfLetGuard;
diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
index ad12e0116..a21f6cd39 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
@@ -232,8 +232,7 @@ impl<'tcx> ConstToPat<'tcx> {
ObligationCause::misc(self.span, self.id),
partial_eq_trait_id,
0,
- ty,
- &[],
+ [ty, ty],
);
// FIXME: should this call a `predicate_must_hold` variant instead?
diff --git a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
index 595abc8f6..d60e8722c 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs
@@ -42,16 +42,17 @@
//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`]; for slices, see
//! [`SplitVarLenSlice`].
-use self::Constructor::*;
-use self::SliceKind::*;
+use std::cell::Cell;
+use std::cmp::{self, max, min, Ordering};
+use std::fmt;
+use std::iter::{once, IntoIterator};
+use std::ops::RangeInclusive;
-use super::compare_const_vals;
-use super::usefulness::{MatchCheckCtxt, PatCtxt};
+use smallvec::{smallvec, SmallVec};
use rustc_data_structures::captures::Captures;
-use rustc_index::vec::Idx;
-
use rustc_hir::{HirId, RangeEnd};
+use rustc_index::vec::Idx;
use rustc_middle::mir::{self, Field};
use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange};
use rustc_middle::ty::layout::IntegerExt;
@@ -61,12 +62,11 @@ use rustc_session::lint;
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{Integer, Size, VariantIdx};
-use smallvec::{smallvec, SmallVec};
-use std::cell::Cell;
-use std::cmp::{self, max, min, Ordering};
-use std::fmt;
-use std::iter::{once, IntoIterator};
-use std::ops::RangeInclusive;
+use self::Constructor::*;
+use self::SliceKind::*;
+
+use super::compare_const_vals;
+use super::usefulness::{MatchCheckCtxt, PatCtxt};
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
@@ -147,11 +147,7 @@ impl IntRange {
// straight to the result, after doing a bit of checking. (We
// could remove this branch and just fall through, which
// is more general but much slower.)
- if let Ok(Ok(bits)) = scalar.to_bits_or_ptr_internal(target_size) {
- return Some(bits);
- } else {
- return None;
- }
+ return scalar.to_bits_or_ptr_internal(target_size).unwrap().left();
}
mir::ConstantKind::Ty(c) => match c.kind() {
ty::ConstKind::Value(_) => bug!(
diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
index 2526522a2..48a231a6c 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
@@ -216,7 +216,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
let lo = lo_expr.map(|e| self.lower_range_expr(e));
let hi = hi_expr.map(|e| self.lower_range_expr(e));
- let (lp, hp) = (lo.as_ref().map(|x| &x.0), hi.as_ref().map(|x| &x.0));
+ let (lp, hp) = (lo.as_ref().map(|(x, _)| x), hi.as_ref().map(|(x, _)| x));
let mut kind = match self.normalize_range_pattern_ends(ty, lp, hp) {
Some((lc, hc)) => self.lower_pattern_range(ty, lc, hc, end, lo_span),
None => {
@@ -321,7 +321,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
let subpatterns = fields
.iter()
.map(|field| FieldPat {
- field: Field::new(self.tcx.field_index(field.hir_id, self.typeck_results)),
+ field: Field::new(self.typeck_results.field_index(field.hir_id)),
pattern: self.lower_pattern(&field.pat),
})
.collect();
@@ -358,7 +358,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
&mut self,
pat: &'tcx Option<&'tcx hir::Pat<'tcx>>,
) -> Option<Box<Pat<'tcx>>> {
- pat.as_ref().map(|p| self.lower_pattern(p))
+ pat.map(|p| self.lower_pattern(p))
}
fn slice_or_array_pattern(
@@ -565,8 +565,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
id: hir::HirId,
span: Span,
) -> PatKind<'tcx> {
- let anon_const_def_id = self.tcx.hir().local_def_id(anon_const.hir_id);
- let value = mir::ConstantKind::from_inline_const(self.tcx, anon_const_def_id);
+ let value = mir::ConstantKind::from_inline_const(self.tcx, anon_const.def_id);
// Evaluate early like we do in `lower_path`.
let value = value.eval(self.tcx, self.param_env);
@@ -577,6 +576,9 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
self.errors.push(PatternError::ConstParamInPattern(span));
return PatKind::Wild;
}
+ ConstKind::Error(_) => {
+ return PatKind::Wild;
+ }
_ => bug!("Expected ConstKind::Param"),
},
mir::ConstantKind::Val(_, _) => self.const_to_pat(value, id, span, false).kind,
@@ -614,7 +616,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
LitToConstInput { lit: &lit.node, ty: self.typeck_results.expr_ty(expr), neg };
match self.tcx.at(expr.span).lit_to_mir_constant(lit_input) {
Ok(constant) => self.const_to_pat(constant, expr.hir_id, lit.span, false).kind,
- Err(LitToConstError::Reported) => PatKind::Wild,
+ Err(LitToConstError::Reported(_)) => PatKind::Wild,
Err(LitToConstError::TypeError) => bug!("lower_lit: had type error"),
}
}
diff --git a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
index 8dc9976ea..3e370a053 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs
@@ -324,7 +324,7 @@ pub(crate) struct MatchCheckCtxt<'p, 'tcx> {
impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
if self.tcx.features().exhaustive_patterns {
- self.tcx.is_ty_uninhabited_from(self.module, ty, self.param_env)
+ !ty.is_inhabited_from(self.tcx, self.module, self.param_env)
} else {
false
}
diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
index 23403628c..ce87a1916 100644
--- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
@@ -407,9 +407,8 @@ where
self.drop_ladder(fields, succ, unwind).0
}
+ #[instrument(level = "debug", ret)]
fn open_drop_for_box(&mut self, adt: ty::AdtDef<'tcx>, substs: SubstsRef<'tcx>) -> BasicBlock {
- debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
-
// drop glue is sent straight to codegen
// box cannot be directly dereferenced
let unique_ty = adt.non_enum_variant().fields[0].ty(self.tcx(), substs);
@@ -431,8 +430,8 @@ where
self.drop_subpath(interior, interior_path, succ, unwind_succ)
}
+ #[instrument(level = "debug", ret)]
fn open_drop_for_adt(&mut self, adt: ty::AdtDef<'tcx>, substs: SubstsRef<'tcx>) -> BasicBlock {
- debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
if adt.variants().is_empty() {
return self.elaborator.patch().new_block(BasicBlockData {
statements: vec![],
@@ -616,7 +615,7 @@ where
let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
let ty = self.place_ty(self.place);
- let substs = tcx.mk_substs_trait(ty, &[]);
+ let substs = tcx.mk_substs_trait(ty, []);
let ref_ty =
tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty, mutbl: hir::Mutability::Mut });
diff --git a/compiler/rustc_mir_dataflow/src/framework/engine.rs b/compiler/rustc_mir_dataflow/src/framework/engine.rs
index bc75645e7..6ddbe69e1 100644
--- a/compiler/rustc_mir_dataflow/src/framework/engine.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/engine.rs
@@ -294,14 +294,7 @@ where
None if tcx.sess.opts.unstable_opts.dump_mir_dataflow
&& dump_enabled(tcx, A::NAME, def_id) =>
{
- create_dump_file(
- tcx,
- ".dot",
- None,
- A::NAME,
- &pass_name.unwrap_or("-----"),
- body.source,
- )?
+ create_dump_file(tcx, ".dot", false, A::NAME, &pass_name.unwrap_or("-----"), body)?
}
_ => return Ok(()),
diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
index 579fe68a1..c9d5601f2 100644
--- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
@@ -475,7 +475,10 @@ where
r#"<td colspan="{colspan}" {fmt} align="left">{state}</td>"#,
colspan = this.style.num_state_columns(),
fmt = fmt,
- state = format!("{:?}", DebugWithAdapter { this: state, ctxt: analysis }),
+ state = dot::escape_html(&format!(
+ "{:?}",
+ DebugWithAdapter { this: state, ctxt: analysis }
+ )),
)
})
}
diff --git a/compiler/rustc_mir_dataflow/src/framework/lattice.rs b/compiler/rustc_mir_dataflow/src/framework/lattice.rs
index d6b89eb82..f0e75c53e 100644
--- a/compiler/rustc_mir_dataflow/src/framework/lattice.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/lattice.rs
@@ -73,6 +73,16 @@ pub trait MeetSemiLattice: Eq {
fn meet(&mut self, other: &Self) -> bool;
}
+/// A set that has a "bottom" element, which is less than or equal to any other element.
+pub trait HasBottom {
+ fn bottom() -> Self;
+}
+
+/// A set that has a "top" element, which is greater than or equal to any other element.
+pub trait HasTop {
+ fn top() -> Self;
+}
+
/// A `bool` is a "two-point" lattice with `true` as the top element and `false` as the bottom:
///
/// ```text
@@ -102,6 +112,18 @@ impl MeetSemiLattice for bool {
}
}
+impl HasBottom for bool {
+ fn bottom() -> Self {
+ false
+ }
+}
+
+impl HasTop for bool {
+ fn top() -> Self {
+ true
+ }
+}
+
/// A tuple (or list) of lattices is itself a lattice whose least upper bound is the concatenation
/// of the least upper bounds of each element of the tuple (or list).
///
@@ -250,3 +272,15 @@ impl<T: Clone + Eq> MeetSemiLattice for FlatSet<T> {
true
}
}
+
+impl<T> HasBottom for FlatSet<T> {
+ fn bottom() -> Self {
+ Self::Bottom
+ }
+}
+
+impl<T> HasTop for FlatSet<T> {
+ fn top() -> Self {
+ Self::Top
+ }
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/init_locals.rs b/compiler/rustc_mir_dataflow/src/impls/init_locals.rs
deleted file mode 100644
index 83ce4c44b..000000000
--- a/compiler/rustc_mir_dataflow/src/impls/init_locals.rs
+++ /dev/null
@@ -1,122 +0,0 @@
-//! A less precise version of `MaybeInitializedPlaces` whose domain is entire locals.
-//!
-//! A local will be maybe initialized if *any* projections of that local might be initialized.
-
-use crate::{CallReturnPlaces, GenKill};
-
-use rustc_index::bit_set::BitSet;
-use rustc_middle::mir::visit::{PlaceContext, Visitor};
-use rustc_middle::mir::{self, BasicBlock, Local, Location};
-
-pub struct MaybeInitializedLocals;
-
-impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeInitializedLocals {
- type Domain = BitSet<Local>;
-
- const NAME: &'static str = "maybe_init_locals";
-
- fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
- // bottom = uninit
- BitSet::new_empty(body.local_decls.len())
- }
-
- fn initialize_start_block(&self, body: &mir::Body<'tcx>, entry_set: &mut Self::Domain) {
- // Function arguments are initialized to begin with.
- for arg in body.args_iter() {
- entry_set.insert(arg);
- }
- }
-}
-
-impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeInitializedLocals {
- type Idx = Local;
-
- fn statement_effect(
- &self,
- trans: &mut impl GenKill<Self::Idx>,
- statement: &mir::Statement<'tcx>,
- loc: Location,
- ) {
- TransferFunction { trans }.visit_statement(statement, loc)
- }
-
- fn terminator_effect(
- &self,
- trans: &mut impl GenKill<Self::Idx>,
- terminator: &mir::Terminator<'tcx>,
- loc: Location,
- ) {
- TransferFunction { trans }.visit_terminator(terminator, loc)
- }
-
- fn call_return_effect(
- &self,
- trans: &mut impl GenKill<Self::Idx>,
- _block: BasicBlock,
- return_places: CallReturnPlaces<'_, 'tcx>,
- ) {
- return_places.for_each(|place| trans.gen(place.local));
- }
-
- /// See `Analysis::apply_yield_resume_effect`.
- fn yield_resume_effect(
- &self,
- trans: &mut impl GenKill<Self::Idx>,
- _resume_block: BasicBlock,
- resume_place: mir::Place<'tcx>,
- ) {
- trans.gen(resume_place.local)
- }
-}
-
-struct TransferFunction<'a, T> {
- trans: &'a mut T,
-}
-
-impl<T> Visitor<'_> for TransferFunction<'_, T>
-where
- T: GenKill<Local>,
-{
- // FIXME: Using `visit_local` here is a bug. For example, on `move _5.field` we mark `_5` as
- // deinitialized, although clearly it is only partially deinitialized. This analysis is not
- // actually used anywhere at the moment, so this is not critical, but this does need to be fixed
- // before it starts being used again.
- fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
- use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, NonUseContext};
- match context {
- // These are handled specially in `call_return_effect` and `yield_resume_effect`.
- PlaceContext::MutatingUse(
- MutatingUseContext::Call
- | MutatingUseContext::AsmOutput
- | MutatingUseContext::Yield,
- ) => {}
-
- // If it's deinitialized, it's no longer init
- PlaceContext::MutatingUse(MutatingUseContext::Deinit) => self.trans.kill(local),
-
- // Otherwise, when a place is mutated, we must consider it possibly initialized.
- PlaceContext::MutatingUse(_) => self.trans.gen(local),
-
- // If the local is moved out of, or if it gets marked `StorageDead`, consider it no
- // longer initialized.
- PlaceContext::NonUse(NonUseContext::StorageDead)
- | PlaceContext::NonMutatingUse(NonMutatingUseContext::Move) => self.trans.kill(local),
-
- // All other uses do not affect this analysis.
- PlaceContext::NonUse(
- NonUseContext::StorageLive
- | NonUseContext::AscribeUserTy
- | NonUseContext::VarDebugInfo,
- )
- | PlaceContext::NonMutatingUse(
- NonMutatingUseContext::Inspect
- | NonMutatingUseContext::Copy
- | NonMutatingUseContext::SharedBorrow
- | NonMutatingUseContext::ShallowBorrow
- | NonMutatingUseContext::UniqueBorrow
- | NonMutatingUseContext::AddressOf
- | NonMutatingUseContext::Projection,
- ) => {}
- }
- }
-}
diff --git a/compiler/rustc_mir_dataflow/src/impls/mod.rs b/compiler/rustc_mir_dataflow/src/impls/mod.rs
index fd1e49277..bc31ec42b 100644
--- a/compiler/rustc_mir_dataflow/src/impls/mod.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/mod.rs
@@ -19,13 +19,11 @@ use crate::{drop_flag_effects, on_all_children_bits};
use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
mod borrowed_locals;
-mod init_locals;
mod liveness;
mod storage_liveness;
pub use self::borrowed_locals::borrowed_locals;
pub use self::borrowed_locals::MaybeBorrowedLocals;
-pub use self::init_locals::MaybeInitializedLocals;
pub use self::liveness::MaybeLiveLocals;
pub use self::liveness::MaybeTransitiveLiveLocals;
pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs
index b471d04fd..7f40cfca3 100644
--- a/compiler/rustc_mir_dataflow/src/lib.rs
+++ b/compiler/rustc_mir_dataflow/src/lib.rs
@@ -41,6 +41,7 @@ pub mod move_paths;
pub mod rustc_peek;
pub mod storage;
pub mod un_derefer;
+pub mod value_analysis;
pub(crate) mod indexes {
pub(crate) use super::move_paths::MovePathIndex;
diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs
new file mode 100644
index 000000000..7df011422
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs
@@ -0,0 +1,927 @@
+//! This module provides a framework on top of the normal MIR dataflow framework to simplify the
+//! implementation of analyses that track information about the values stored in certain places.
+//! We are using the term "place" here to refer to a `mir::Place` (a place expression) instead of
+//! an `interpret::Place` (a memory location).
+//!
+//! The default methods of [`ValueAnalysis`] (prefixed with `super_` instead of `handle_`)
+//! provide some behavior that should be valid for all abstract domains that are based only on the
+//! value stored in a certain place. On top of these default rules, an implementation should
+//! override some of the `handle_` methods. For an example, see `ConstAnalysis`.
+//!
+//! An implementation must also provide a [`Map`]. Before the analysis begins, all places that
+//! should be tracked during the analysis must be registered. During the analysis, no new places
+//! can be registered. The [`State`] can be queried to retrieve the abstract value stored for a
+//! certain place by passing the map.
+//!
+//! This framework is currently experimental. Originally, it supported shared references and enum
+//! variants. However, it was discovered that both of these were unsound, and especially references
+//! had subtle but serious issues. In the future, they could be added back in, but we should clarify
+//! the rules for optimizations that rely on the aliasing model first.
+//!
+//!
+//! # Notes
+//!
+//! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation
+//! of the actual execution, we can also use this state for places where access would be UB.
+//!
+//! - The assignment logic in `State::assign_place_idx` assumes that the places are non-overlapping,
+//! or identical. Note that this refers to place expressions, not memory locations.
+//!
+//! - Currently, places that have their reference taken cannot be tracked. Although this would be
+//! possible, it has to rely on some aliasing model, which we are not ready to commit to yet.
+//! Because of that, we can assume that the only way to change the value behind a tracked place is
+//! by direct assignment.
+
+use std::fmt::{Debug, Formatter};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_index::vec::IndexVec;
+use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
+use rustc_middle::mir::*;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_target::abi::VariantIdx;
+
+use crate::lattice::{HasBottom, HasTop};
+use crate::{
+ fmt::DebugWithContext, Analysis, AnalysisDomain, CallReturnPlaces, JoinSemiLattice,
+ SwitchIntEdgeEffects,
+};
+
+pub trait ValueAnalysis<'tcx> {
+ /// For each place of interest, the analysis tracks a value of the given type.
+ type Value: Clone + JoinSemiLattice + HasBottom + HasTop;
+
+ const NAME: &'static str;
+
+ fn map(&self) -> &Map;
+
+ fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
+ self.super_statement(statement, state)
+ }
+
+ fn super_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
+ match &statement.kind {
+ StatementKind::Assign(box (place, rvalue)) => {
+ self.handle_assign(*place, rvalue, state);
+ }
+ StatementKind::SetDiscriminant { .. } => {
+ // Could treat this as writing a constant to a pseudo-place.
+ // But discriminants are currently not tracked, so we do nothing.
+ // Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84
+ }
+ StatementKind::Intrinsic(box intrinsic) => {
+ self.handle_intrinsic(intrinsic, state);
+ }
+ StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
+ // StorageLive leaves the local in an uninitialized state.
+ // StorageDead makes it UB to access the local afterwards.
+ state.flood_with(Place::from(*local).as_ref(), self.map(), Self::Value::bottom());
+ }
+ StatementKind::Deinit(box place) => {
+ // Deinit makes the place uninitialized.
+ state.flood_with(place.as_ref(), self.map(), Self::Value::bottom());
+ }
+ StatementKind::Retag(..) => {
+ // We don't track references.
+ }
+ StatementKind::Nop
+ | StatementKind::FakeRead(..)
+ | StatementKind::Coverage(..)
+ | StatementKind::AscribeUserType(..) => (),
+ }
+ }
+
+ fn handle_intrinsic(
+ &self,
+ intrinsic: &NonDivergingIntrinsic<'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ self.super_intrinsic(intrinsic, state);
+ }
+
+ fn super_intrinsic(
+ &self,
+ intrinsic: &NonDivergingIntrinsic<'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ match intrinsic {
+ NonDivergingIntrinsic::Assume(..) => {
+ // Could use this, but ignoring it is sound.
+ }
+ NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { dst, .. }) => {
+ if let Some(place) = dst.place() {
+ state.flood(place.as_ref(), self.map());
+ }
+ }
+ }
+ }
+
+ fn handle_assign(
+ &self,
+ target: Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ self.super_assign(target, rvalue, state)
+ }
+
+ fn super_assign(
+ &self,
+ target: Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ let result = self.handle_rvalue(rvalue, state);
+ state.assign(target.as_ref(), result, self.map());
+ }
+
+ fn handle_rvalue(
+ &self,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State<Self::Value>,
+ ) -> ValueOrPlace<Self::Value> {
+ self.super_rvalue(rvalue, state)
+ }
+
+ fn super_rvalue(
+ &self,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State<Self::Value>,
+ ) -> ValueOrPlace<Self::Value> {
+ match rvalue {
+ Rvalue::Use(operand) => self.handle_operand(operand, state),
+ Rvalue::CopyForDeref(place) => self.handle_operand(&Operand::Copy(*place), state),
+ Rvalue::Ref(..) | Rvalue::AddressOf(..) => {
+ // We don't track such places.
+ ValueOrPlace::top()
+ }
+ Rvalue::Repeat(..)
+ | Rvalue::ThreadLocalRef(..)
+ | Rvalue::Len(..)
+ | Rvalue::Cast(..)
+ | Rvalue::BinaryOp(..)
+ | Rvalue::CheckedBinaryOp(..)
+ | Rvalue::NullaryOp(..)
+ | Rvalue::UnaryOp(..)
+ | Rvalue::Discriminant(..)
+ | Rvalue::Aggregate(..)
+ | Rvalue::ShallowInitBox(..) => {
+ // No modification is possible through these r-values.
+ ValueOrPlace::top()
+ }
+ }
+ }
+
+ fn handle_operand(
+ &self,
+ operand: &Operand<'tcx>,
+ state: &mut State<Self::Value>,
+ ) -> ValueOrPlace<Self::Value> {
+ self.super_operand(operand, state)
+ }
+
+ fn super_operand(
+ &self,
+ operand: &Operand<'tcx>,
+ state: &mut State<Self::Value>,
+ ) -> ValueOrPlace<Self::Value> {
+ match operand {
+ Operand::Constant(box constant) => {
+ ValueOrPlace::Value(self.handle_constant(constant, state))
+ }
+ Operand::Copy(place) | Operand::Move(place) => {
+ // On move, we would ideally flood the place with bottom. But with the current
+ // framework this is not possible (similar to `InterpCx::eval_operand`).
+ self.map()
+ .find(place.as_ref())
+ .map(ValueOrPlace::Place)
+ .unwrap_or(ValueOrPlace::top())
+ }
+ }
+ }
+
+ fn handle_constant(
+ &self,
+ constant: &Constant<'tcx>,
+ state: &mut State<Self::Value>,
+ ) -> Self::Value {
+ self.super_constant(constant, state)
+ }
+
+ fn super_constant(
+ &self,
+ _constant: &Constant<'tcx>,
+ _state: &mut State<Self::Value>,
+ ) -> Self::Value {
+ Self::Value::top()
+ }
+
+ /// The effect of a successful function call return should not be
+ /// applied here, see [`Analysis::apply_terminator_effect`].
+ fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
+ self.super_terminator(terminator, state)
+ }
+
+ fn super_terminator(&self, terminator: &Terminator<'tcx>, _state: &mut State<Self::Value>) {
+ match &terminator.kind {
+ TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
+ // Effect is applied by `handle_call_return`.
+ }
+ TerminatorKind::Drop { .. } => {
+ // We don't track dropped places.
+ }
+ TerminatorKind::DropAndReplace { .. } | TerminatorKind::Yield { .. } => {
+ // They would have an effect, but are not allowed in this phase.
+ bug!("encountered disallowed terminator");
+ }
+ TerminatorKind::Goto { .. }
+ | TerminatorKind::SwitchInt { .. }
+ | TerminatorKind::Resume
+ | TerminatorKind::Abort
+ | TerminatorKind::Return
+ | TerminatorKind::Unreachable
+ | TerminatorKind::Assert { .. }
+ | TerminatorKind::GeneratorDrop
+ | TerminatorKind::FalseEdge { .. }
+ | TerminatorKind::FalseUnwind { .. } => {
+ // These terminators have no effect on the analysis.
+ }
+ }
+ }
+
+ fn handle_call_return(
+ &self,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ self.super_call_return(return_places, state)
+ }
+
+ fn super_call_return(
+ &self,
+ return_places: CallReturnPlaces<'_, 'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ return_places.for_each(|place| {
+ state.flood(place.as_ref(), self.map());
+ })
+ }
+
+ fn handle_switch_int(
+ &self,
+ discr: &Operand<'tcx>,
+ apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
+ ) {
+ self.super_switch_int(discr, apply_edge_effects)
+ }
+
+ fn super_switch_int(
+ &self,
+ _discr: &Operand<'tcx>,
+ _apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
+ ) {
+ }
+
+ fn wrap(self) -> ValueAnalysisWrapper<Self>
+ where
+ Self: Sized,
+ {
+ ValueAnalysisWrapper(self)
+ }
+}
+
+pub struct ValueAnalysisWrapper<T>(pub T);
+
+impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper<T> {
+ type Domain = State<T::Value>;
+
+ type Direction = crate::Forward;
+
+ const NAME: &'static str = T::NAME;
+
+ fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
+ State(StateData::Unreachable)
+ }
+
+ fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
+ // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥.
+ assert!(matches!(state.0, StateData::Unreachable));
+ let values = IndexVec::from_elem_n(T::Value::bottom(), self.0.map().value_count);
+ *state = State(StateData::Reachable(values));
+ for arg in body.args_iter() {
+ state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
+ }
+ }
+}
+
+impl<'tcx, T> Analysis<'tcx> for ValueAnalysisWrapper<T>
+where
+ T: ValueAnalysis<'tcx>,
+{
+ fn apply_statement_effect(
+ &self,
+ state: &mut Self::Domain,
+ statement: &Statement<'tcx>,
+ _location: Location,
+ ) {
+ if state.is_reachable() {
+ self.0.handle_statement(statement, state);
+ }
+ }
+
+ fn apply_terminator_effect(
+ &self,
+ state: &mut Self::Domain,
+ terminator: &Terminator<'tcx>,
+ _location: Location,
+ ) {
+ if state.is_reachable() {
+ self.0.handle_terminator(terminator, state);
+ }
+ }
+
+ fn apply_call_return_effect(
+ &self,
+ state: &mut Self::Domain,
+ _block: BasicBlock,
+ return_places: crate::CallReturnPlaces<'_, 'tcx>,
+ ) {
+ if state.is_reachable() {
+ self.0.handle_call_return(return_places, state)
+ }
+ }
+
+ fn apply_switch_int_edge_effects(
+ &self,
+ _block: BasicBlock,
+ discr: &Operand<'tcx>,
+ apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
+ ) {
+ // FIXME: Dataflow framework provides no access to current state here.
+ self.0.handle_switch_int(discr, apply_edge_effects)
+ }
+}
+
+rustc_index::newtype_index!(
+ /// This index uniquely identifies a place.
+ ///
+ /// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked
+ /// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
+ pub struct PlaceIndex {}
+);
+
+rustc_index::newtype_index!(
+ /// This index uniquely identifies a tracked place and therefore a slot in [`State`].
+ ///
+ /// It is an implementation detail of this module.
+ struct ValueIndex {}
+);
+
+/// See [`State`].
+#[derive(PartialEq, Eq, Debug)]
+enum StateData<V> {
+ Reachable(IndexVec<ValueIndex, V>),
+ Unreachable,
+}
+
+impl<V: Clone> Clone for StateData<V> {
+ fn clone(&self) -> Self {
+ match self {
+ Self::Reachable(x) => Self::Reachable(x.clone()),
+ Self::Unreachable => Self::Unreachable,
+ }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ match (&mut *self, source) {
+ (Self::Reachable(x), Self::Reachable(y)) => {
+ // We go through `raw` here, because `IndexVec` currently has a naive `clone_from`.
+ x.raw.clone_from(&y.raw);
+ }
+ _ => *self = source.clone(),
+ }
+ }
+}
+
+/// The dataflow state for an instance of [`ValueAnalysis`].
+///
+/// Every instance specifies a lattice that represents the possible values of a single tracked
+/// place. If we call this lattice `V` and set of tracked places `P`, then a [`State`] is an
+/// element of `{unreachable} ∪ (P -> V)`. This again forms a lattice, where the bottom element is
+/// `unreachable` and the top element is the mapping `p ↦ ⊤`. Note that the mapping `p ↦ ⊥` is not
+/// the bottom element (because joining an unreachable and any other reachable state yields a
+/// reachable state). All operations on unreachable states are ignored.
+///
+/// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
+#[derive(PartialEq, Eq, Debug)]
+pub struct State<V>(StateData<V>);
+
+impl<V: Clone> Clone for State<V> {
+ fn clone(&self) -> Self {
+ Self(self.0.clone())
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ self.0.clone_from(&source.0);
+ }
+}
+
+impl<V: Clone + HasTop + HasBottom> State<V> {
+ pub fn is_reachable(&self) -> bool {
+ matches!(&self.0, StateData::Reachable(_))
+ }
+
+ pub fn mark_unreachable(&mut self) {
+ self.0 = StateData::Unreachable;
+ }
+
+ pub fn flood_all(&mut self) {
+ self.flood_all_with(V::top())
+ }
+
+ pub fn flood_all_with(&mut self, value: V) {
+ let StateData::Reachable(values) = &mut self.0 else { return };
+ values.raw.fill(value);
+ }
+
+ pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
+ if let Some(root) = map.find(place) {
+ self.flood_idx_with(root, map, value);
+ }
+ }
+
+ pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) {
+ self.flood_with(place, map, V::top())
+ }
+
+ pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) {
+ let StateData::Reachable(values) = &mut self.0 else { return };
+ map.preorder_invoke(place, &mut |place| {
+ if let Some(vi) = map.places[place].value_index {
+ values[vi] = value.clone();
+ }
+ });
+ }
+
+ pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) {
+ self.flood_idx_with(place, map, V::top())
+ }
+
+ /// Copies `source` to `target`, including all tracked places beneath.
+ ///
+ /// If `target` contains a place that is not contained in `source`, it will be overwritten with
+ /// Top. Also, because this will copy all entries one after another, it may only be used for
+ /// places that are non-overlapping or identical.
+ pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
+ let StateData::Reachable(values) = &mut self.0 else { return };
+
+ // If both places are tracked, we copy the value to the target. If the target is tracked,
+ // but the source is not, we have to invalidate the value in target. If the target is not
+ // tracked, then we don't have to do anything.
+ if let Some(target_value) = map.places[target].value_index {
+ if let Some(source_value) = map.places[source].value_index {
+ values[target_value] = values[source_value].clone();
+ } else {
+ values[target_value] = V::top();
+ }
+ }
+ for target_child in map.children(target) {
+ // Try to find corresponding child and recurse. Reasoning is similar as above.
+ let projection = map.places[target_child].proj_elem.unwrap();
+ if let Some(source_child) = map.projections.get(&(source, projection)) {
+ self.assign_place_idx(target_child, *source_child, map);
+ } else {
+ self.flood_idx(target_child, map);
+ }
+ }
+ }
+
+ pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map) {
+ if let Some(target) = map.find(target) {
+ self.assign_idx(target, result, map);
+ } else {
+ // We don't track this place nor any projections, assignment can be ignored.
+ }
+ }
+
+ pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlace<V>, map: &Map) {
+ match result {
+ ValueOrPlace::Value(value) => {
+ // First flood the target place in case we also track any projections (although
+ // this scenario is currently not well-supported by the API).
+ self.flood_idx(target, map);
+ let StateData::Reachable(values) = &mut self.0 else { return };
+ if let Some(value_index) = map.places[target].value_index {
+ values[value_index] = value;
+ }
+ }
+ ValueOrPlace::Place(source) => self.assign_place_idx(target, source, map),
+ }
+ }
+
+ /// Retrieve the value stored for a place, or ⊤ if it is not tracked.
+ pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V {
+ map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top())
+ }
+
+ /// Retrieve the value stored for a place index, or ⊤ if it is not tracked.
+ pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V {
+ match &self.0 {
+ StateData::Reachable(values) => {
+ map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::top())
+ }
+ StateData::Unreachable => {
+ // Because this is unreachable, we can return any value we want.
+ V::bottom()
+ }
+ }
+ }
+}
+
+impl<V: JoinSemiLattice + Clone> JoinSemiLattice for State<V> {
+ fn join(&mut self, other: &Self) -> bool {
+ match (&mut self.0, &other.0) {
+ (_, StateData::Unreachable) => false,
+ (StateData::Unreachable, _) => {
+ *self = other.clone();
+ true
+ }
+ (StateData::Reachable(this), StateData::Reachable(other)) => this.join(other),
+ }
+ }
+}
+
+/// Partial mapping from [`Place`] to [`PlaceIndex`], where some places also have a [`ValueIndex`].
+///
+/// This data structure essentially maintains a tree of places and their projections. Some
+/// additional bookkeeping is done, to speed up traversal over this tree:
+/// - For iteration, every [`PlaceInfo`] contains an intrusive linked list of its children.
+/// - To directly get the child for a specific projection, there is a `projections` map.
+#[derive(Debug)]
+pub struct Map {
+ locals: IndexVec<Local, Option<PlaceIndex>>,
+ projections: FxHashMap<(PlaceIndex, TrackElem), PlaceIndex>,
+ places: IndexVec<PlaceIndex, PlaceInfo>,
+ value_count: usize,
+}
+
+impl Map {
+ fn new() -> Self {
+ Self {
+ locals: IndexVec::new(),
+ projections: FxHashMap::default(),
+ places: IndexVec::new(),
+ value_count: 0,
+ }
+ }
+
+ /// Returns a map that only tracks places whose type passes the filter.
+ ///
+ /// This is currently the only way to create a [`Map`]. The way in which the tracked places are
+ /// chosen is an implementation detail and may not be relied upon (other than that their type
+ /// passes the filter).
+ #[instrument(skip_all, level = "debug")]
+ pub fn from_filter<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ filter: impl FnMut(Ty<'tcx>) -> bool,
+ ) -> Self {
+ let mut map = Self::new();
+ let exclude = excluded_locals(body);
+ map.register_with_filter(tcx, body, filter, &exclude);
+ debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len());
+ map
+ }
+
+ /// Register all non-excluded places that pass the filter.
+ fn register_with_filter<'tcx>(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ body: &Body<'tcx>,
+ mut filter: impl FnMut(Ty<'tcx>) -> bool,
+ exclude: &IndexVec<Local, bool>,
+ ) {
+ // We use this vector as stack, pushing and popping projections.
+ let mut projection = Vec::new();
+ for (local, decl) in body.local_decls.iter_enumerated() {
+ if !exclude[local] {
+ self.register_with_filter_rec(tcx, local, &mut projection, decl.ty, &mut filter);
+ }
+ }
+ }
+
+ /// Potentially register the (local, projection) place and its fields, recursively.
+ ///
+ /// Invariant: The projection must only contain fields.
+ fn register_with_filter_rec<'tcx>(
+ &mut self,
+ tcx: TyCtxt<'tcx>,
+ local: Local,
+ projection: &mut Vec<PlaceElem<'tcx>>,
+ ty: Ty<'tcx>,
+ filter: &mut impl FnMut(Ty<'tcx>) -> bool,
+ ) {
+ // Note: The framework supports only scalars for now.
+ if filter(ty) && ty.is_scalar() {
+ // We know that the projection only contains trackable elements.
+ let place = self.make_place(local, projection).unwrap();
+
+ // Allocate a value slot if it doesn't have one.
+ if self.places[place].value_index.is_none() {
+ self.places[place].value_index = Some(self.value_count.into());
+ self.value_count += 1;
+ }
+ }
+
+ // Recurse with all fields of this place.
+ iter_fields(ty, tcx, |variant, field, ty| {
+ if variant.is_some() {
+ // Downcasts are currently not supported.
+ return;
+ }
+ projection.push(PlaceElem::Field(field, ty));
+ self.register_with_filter_rec(tcx, local, projection, ty, filter);
+ projection.pop();
+ });
+ }
+
+ /// Tries to add the place to the map, without allocating a value slot.
+ ///
+ /// Can fail if the projection contains non-trackable elements.
+ fn make_place<'tcx>(
+ &mut self,
+ local: Local,
+ projection: &[PlaceElem<'tcx>],
+ ) -> Result<PlaceIndex, ()> {
+ // Get the base index of the local.
+ let mut index =
+ *self.locals.get_or_insert_with(local, || self.places.push(PlaceInfo::new(None)));
+
+ // Apply the projection.
+ for &elem in projection {
+ let elem = elem.try_into()?;
+ index = *self.projections.entry((index, elem)).or_insert_with(|| {
+ // Prepend new child to the linked list.
+ let next = self.places.push(PlaceInfo::new(Some(elem)));
+ self.places[next].next_sibling = self.places[index].first_child;
+ self.places[index].first_child = Some(next);
+ next
+ });
+ }
+
+ Ok(index)
+ }
+
+ /// Returns the number of tracked places, i.e., those for which a value can be stored.
+ pub fn tracked_places(&self) -> usize {
+ self.value_count
+ }
+
+ /// Applies a single projection element, yielding the corresponding child.
+ pub fn apply(&self, place: PlaceIndex, elem: TrackElem) -> Option<PlaceIndex> {
+ self.projections.get(&(place, elem)).copied()
+ }
+
+ /// Locates the given place, if it exists in the tree.
+ pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
+ let mut index = *self.locals.get(place.local)?.as_ref()?;
+
+ for &elem in place.projection {
+ index = self.apply(index, elem.try_into().ok()?)?;
+ }
+
+ Some(index)
+ }
+
+ /// Iterate over all direct children.
+ pub fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
+ Children::new(self, parent)
+ }
+
+ /// Invoke a function on the given place and all descendants.
+ pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) {
+ f(root);
+ for child in self.children(root) {
+ self.preorder_invoke(child, f);
+ }
+ }
+}
+
+/// This is the information tracked for every [`PlaceIndex`] and is stored by [`Map`].
+///
+/// Together, `first_child` and `next_sibling` form an intrusive linked list, which is used to
+/// model a tree structure (a replacement for a member like `children: Vec<PlaceIndex>`).
+#[derive(Debug)]
+struct PlaceInfo {
+ /// We store a [`ValueIndex`] if and only if the placed is tracked by the analysis.
+ value_index: Option<ValueIndex>,
+
+ /// The projection used to go from parent to this node (only None for root).
+ proj_elem: Option<TrackElem>,
+
+ /// The left-most child.
+ first_child: Option<PlaceIndex>,
+
+ /// Index of the sibling to the right of this node.
+ next_sibling: Option<PlaceIndex>,
+}
+
+impl PlaceInfo {
+ fn new(proj_elem: Option<TrackElem>) -> Self {
+ Self { next_sibling: None, first_child: None, proj_elem, value_index: None }
+ }
+}
+
+struct Children<'a> {
+ map: &'a Map,
+ next: Option<PlaceIndex>,
+}
+
+impl<'a> Children<'a> {
+ fn new(map: &'a Map, parent: PlaceIndex) -> Self {
+ Self { map, next: map.places[parent].first_child }
+ }
+}
+
+impl<'a> Iterator for Children<'a> {
+ type Item = PlaceIndex;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self.next {
+ Some(child) => {
+ self.next = self.map.places[child].next_sibling;
+ Some(child)
+ }
+ None => None,
+ }
+ }
+}
+
+/// Used as the result of an operand or r-value.
+pub enum ValueOrPlace<V> {
+ Value(V),
+ Place(PlaceIndex),
+}
+
+impl<V: HasTop> ValueOrPlace<V> {
+ pub fn top() -> Self {
+ ValueOrPlace::Value(V::top())
+ }
+}
+
+/// The set of projection elements that can be used by a tracked place.
+///
+/// Although only field projections are currently allowed, this could change in the future.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TrackElem {
+ Field(Field),
+}
+
+impl<V, T> TryFrom<ProjectionElem<V, T>> for TrackElem {
+ type Error = ();
+
+ fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> {
+ match value {
+ ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)),
+ _ => Err(()),
+ }
+ }
+}
+
+/// Invokes `f` on all direct fields of `ty`.
+fn iter_fields<'tcx>(
+ ty: Ty<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ mut f: impl FnMut(Option<VariantIdx>, Field, Ty<'tcx>),
+) {
+ match ty.kind() {
+ ty::Tuple(list) => {
+ for (field, ty) in list.iter().enumerate() {
+ f(None, field.into(), ty);
+ }
+ }
+ ty::Adt(def, substs) => {
+ if def.is_union() {
+ return;
+ }
+ for (v_index, v_def) in def.variants().iter_enumerated() {
+ let variant = if def.is_struct() { None } else { Some(v_index) };
+ for (f_index, f_def) in v_def.fields.iter().enumerate() {
+ let field_ty = f_def.ty(tcx, substs);
+ let field_ty = tcx
+ .try_normalize_erasing_regions(ty::ParamEnv::reveal_all(), field_ty)
+ .unwrap_or(field_ty);
+ f(variant, f_index.into(), field_ty);
+ }
+ }
+ }
+ ty::Closure(_, substs) => {
+ iter_fields(substs.as_closure().tupled_upvars_ty(), tcx, f);
+ }
+ _ => (),
+ }
+}
+
+/// Returns all locals with projections that have their reference or address taken.
+fn excluded_locals<'tcx>(body: &Body<'tcx>) -> IndexVec<Local, bool> {
+ struct Collector {
+ result: IndexVec<Local, bool>,
+ }
+
+ impl<'tcx> Visitor<'tcx> for Collector {
+ fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
+ if context.is_borrow()
+ || context.is_address_of()
+ || context.is_drop()
+ || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput)
+ {
+ // A pointer to a place could be used to access other places with the same local,
+ // hence we have to exclude the local completely.
+ self.result[place.local] = true;
+ }
+ }
+ }
+
+ let mut collector = Collector { result: IndexVec::from_elem(false, &body.local_decls) };
+ collector.visit_body(body);
+ collector.result
+}
+
+/// This is used to visualize the dataflow analysis.
+impl<'tcx, T> DebugWithContext<ValueAnalysisWrapper<T>> for State<T::Value>
+where
+ T: ValueAnalysis<'tcx>,
+ T::Value: Debug,
+{
+ fn fmt_with(&self, ctxt: &ValueAnalysisWrapper<T>, f: &mut Formatter<'_>) -> std::fmt::Result {
+ match &self.0 {
+ StateData::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
+ StateData::Unreachable => write!(f, "unreachable"),
+ }
+ }
+
+ fn fmt_diff_with(
+ &self,
+ old: &Self,
+ ctxt: &ValueAnalysisWrapper<T>,
+ f: &mut Formatter<'_>,
+ ) -> std::fmt::Result {
+ match (&self.0, &old.0) {
+ (StateData::Reachable(this), StateData::Reachable(old)) => {
+ debug_with_context(this, Some(old), ctxt.0.map(), f)
+ }
+ _ => Ok(()), // Consider printing something here.
+ }
+ }
+}
+
+fn debug_with_context_rec<V: Debug + Eq>(
+ place: PlaceIndex,
+ place_str: &str,
+ new: &IndexVec<ValueIndex, V>,
+ old: Option<&IndexVec<ValueIndex, V>>,
+ map: &Map,
+ f: &mut Formatter<'_>,
+) -> std::fmt::Result {
+ if let Some(value) = map.places[place].value_index {
+ match old {
+ None => writeln!(f, "{}: {:?}", place_str, new[value])?,
+ Some(old) => {
+ if new[value] != old[value] {
+ writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
+ writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
+ }
+ }
+ }
+ }
+
+ for child in map.children(place) {
+ let info_elem = map.places[child].proj_elem.unwrap();
+ let child_place_str = match info_elem {
+ TrackElem::Field(field) => {
+ if place_str.starts_with('*') {
+ format!("({}).{}", place_str, field.index())
+ } else {
+ format!("{}.{}", place_str, field.index())
+ }
+ }
+ };
+ debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
+ }
+
+ Ok(())
+}
+
+fn debug_with_context<V: Debug + Eq>(
+ new: &IndexVec<ValueIndex, V>,
+ old: Option<&IndexVec<ValueIndex, V>>,
+ map: &Map,
+ f: &mut Formatter<'_>,
+) -> std::fmt::Result {
+ for (local, place) in map.locals.iter_enumerated() {
+ if let Some(place) = place {
+ debug_with_context_rec(*place, &format!("{:?}", local), new, old, map, f)?;
+ }
+ }
+ Ok(())
+}
diff --git a/compiler/rustc_mir_transform/Cargo.toml b/compiler/rustc_mir_transform/Cargo.toml
index 53545cff0..962536669 100644
--- a/compiler/rustc_mir_transform/Cargo.toml
+++ b/compiler/rustc_mir_transform/Cargo.toml
@@ -9,6 +9,7 @@ edition = "2021"
itertools = "0.10.1"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing = "0.1"
+either = "1"
rustc_ast = { path = "../rustc_ast" }
rustc_attr = { path = "../rustc_attr" }
rustc_data_structures = { path = "../rustc_data_structures" }
diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
index ffb5d8c6d..9b2260f68 100644
--- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
+++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
@@ -5,37 +5,36 @@ use crate::util;
use crate::MirPass;
use rustc_middle::mir::patch::MirPatch;
-// This pass moves values being dropped that are within a packed
-// struct to a separate local before dropping them, to ensure that
-// they are dropped from an aligned address.
-//
-// For example, if we have something like
-// ```Rust
-// #[repr(packed)]
-// struct Foo {
-// dealign: u8,
-// data: Vec<u8>
-// }
-//
-// let foo = ...;
-// ```
-//
-// We want to call `drop_in_place::<Vec<u8>>` on `data` from an aligned
-// address. This means we can't simply drop `foo.data` directly, because
-// its address is not aligned.
-//
-// Instead, we move `foo.data` to a local and drop that:
-// ```
-// storage.live(drop_temp)
-// drop_temp = foo.data;
-// drop(drop_temp) -> next
-// next:
-// storage.dead(drop_temp)
-// ```
-//
-// The storage instructions are required to avoid stack space
-// blowup.
-
+/// This pass moves values being dropped that are within a packed
+/// struct to a separate local before dropping them, to ensure that
+/// they are dropped from an aligned address.
+///
+/// For example, if we have something like
+/// ```ignore (ilustrative)
+/// #[repr(packed)]
+/// struct Foo {
+/// dealign: u8,
+/// data: Vec<u8>
+/// }
+///
+/// let foo = ...;
+/// ```
+///
+/// We want to call `drop_in_place::<Vec<u8>>` on `data` from an aligned
+/// address. This means we can't simply drop `foo.data` directly, because
+/// its address is not aligned.
+///
+/// Instead, we move `foo.data` to a local and drop that:
+/// ```ignore (ilustrative)
+/// storage.live(drop_temp)
+/// drop_temp = foo.data;
+/// drop(drop_temp) -> next
+/// next:
+/// storage.dead(drop_temp)
+/// ```
+///
+/// The storage instructions are required to avoid stack space
+/// blowup.
pub struct AddMovesForPackedDrops;
impl<'tcx> MirPass<'tcx> for AddMovesForPackedDrops {
diff --git a/compiler/rustc_mir_transform/src/add_retag.rs b/compiler/rustc_mir_transform/src/add_retag.rs
index 036b55898..3d22035f0 100644
--- a/compiler/rustc_mir_transform/src/add_retag.rs
+++ b/compiler/rustc_mir_transform/src/add_retag.rs
@@ -10,16 +10,6 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
pub struct AddRetag;
-/// Determines whether this place is "stable": Whether, if we evaluate it again
-/// after the assignment, we can be sure to obtain the same place value.
-/// (Concurrent accesses by other threads are no problem as these are anyway non-atomic
-/// copies. Data races are UB.)
-fn is_stable(place: PlaceRef<'_>) -> bool {
- // Which place this evaluates to can change with any memory write,
- // so cannot assume deref to be stable.
- !place.has_deref()
-}
-
/// Determine whether this type may contain a reference (or box), and thus needs retagging.
/// We will only recurse `depth` times into Tuples/ADTs to bound the cost of this.
fn may_contain_reference<'tcx>(ty: Ty<'tcx>, depth: u32, tcx: TyCtxt<'tcx>) -> bool {
@@ -69,22 +59,10 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
let basic_blocks = body.basic_blocks.as_mut();
let local_decls = &body.local_decls;
let needs_retag = |place: &Place<'tcx>| {
- // FIXME: Instead of giving up for unstable places, we should introduce
- // a temporary and retag on that.
- is_stable(place.as_ref())
+ !place.has_deref() // we're not eally interested in stores to "outside" locations, they are hard to keep track of anyway
&& may_contain_reference(place.ty(&*local_decls, tcx).ty, /*depth*/ 3, tcx)
&& !local_decls[place.local].is_deref_temp()
};
- let place_base_raw = |place: &Place<'tcx>| {
- // If this is a `Deref`, get the type of what we are deref'ing.
- if place.has_deref() {
- let ty = &local_decls[place.local].ty;
- ty.is_unsafe_ptr()
- } else {
- // Not a deref, and thus not raw.
- false
- }
- };
// PART 1
// Retag arguments at the beginning of the start block.
@@ -108,7 +86,7 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
}
// PART 2
- // Retag return values of functions. Also escape-to-raw the argument of `drop`.
+ // Retag return values of functions.
// We collect the return destinations because we cannot mutate while iterating.
let returns = basic_blocks
.iter_mut()
@@ -140,30 +118,25 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
}
// PART 3
- // Add retag after assignment.
+ // Add retag after assignments where data "enters" this function: the RHS is behind a deref and the LHS is not.
for block_data in basic_blocks {
// We want to insert statements as we iterate. To this end, we
// iterate backwards using indices.
for i in (0..block_data.statements.len()).rev() {
let (retag_kind, place) = match block_data.statements[i].kind {
- // Retag-as-raw after escaping to a raw pointer, if the referent
- // is not already a raw pointer.
- StatementKind::Assign(box (lplace, Rvalue::AddressOf(_, ref rplace)))
- if !place_base_raw(rplace) =>
- {
- (RetagKind::Raw, lplace)
- }
// Retag after assignments of reference type.
StatementKind::Assign(box (ref place, ref rvalue)) if needs_retag(place) => {
- let kind = match rvalue {
- Rvalue::Ref(_, borrow_kind, _)
- if borrow_kind.allows_two_phase_borrow() =>
- {
- RetagKind::TwoPhase
- }
- _ => RetagKind::Default,
+ let add_retag = match rvalue {
+ // Ptr-creating operations already do their own internal retagging, no
+ // need to also add a retag statement.
+ Rvalue::Ref(..) | Rvalue::AddressOf(..) => false,
+ _ => true,
};
- (kind, *place)
+ if add_retag {
+ (RetagKind::Default, *place)
+ } else {
+ continue;
+ }
}
// Do nothing for the rest
_ => continue,
diff --git a/compiler/rustc_mir_transform/src/check_unsafety.rs b/compiler/rustc_mir_transform/src/check_unsafety.rs
index f8f04214a..e783d1891 100644
--- a/compiler/rustc_mir_transform/src/check_unsafety.rs
+++ b/compiler/rustc_mir_transform/src/check_unsafety.rs
@@ -4,6 +4,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::hir_id::HirId;
use rustc_hir::intravisit;
+use rustc_hir::{BlockCheckMode, ExprKind, Node};
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::query::Providers;
@@ -101,12 +102,10 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> {
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
+ | StatementKind::Intrinsic(..)
| StatementKind::Nop => {
// safe (at least as emitted during MIR construction)
}
-
- // Move to above list once mir construction uses it.
- StatementKind::Intrinsic(..) => unreachable!(),
}
self.super_statement(statement, location);
}
@@ -472,6 +471,14 @@ fn unsafety_check_result<'tcx>(
// `mir_built` force this.
let body = &tcx.mir_built(def).borrow();
+ if body.should_skip() {
+ return tcx.arena.alloc(UnsafetyCheckResult {
+ violations: Vec::new(),
+ used_unsafe_blocks: FxHashSet::default(),
+ unused_unsafes: Some(Vec::new()),
+ });
+ }
+
let param_env = tcx.param_env(def.did);
let mut checker = UnsafetyChecker::new(body, def.did, tcx, param_env);
@@ -519,24 +526,48 @@ pub fn check_unsafety(tcx: TyCtxt<'_>, def_id: LocalDefId) {
for &UnsafetyViolation { source_info, lint_root, kind, details } in violations.iter() {
let (description, note) = details.description_and_note();
- // Report an error.
- let unsafe_fn_msg =
- if unsafe_op_in_unsafe_fn_allowed(tcx, lint_root) { " function or" } else { "" };
-
match kind {
UnsafetyViolationKind::General => {
// once
- struct_span_err!(
+ let unsafe_fn_msg = if unsafe_op_in_unsafe_fn_allowed(tcx, lint_root) {
+ " function or"
+ } else {
+ ""
+ };
+
+ let mut err = struct_span_err!(
tcx.sess,
source_info.span,
E0133,
"{} is unsafe and requires unsafe{} block",
description,
unsafe_fn_msg,
- )
- .span_label(source_info.span, description)
- .note(note)
- .emit();
+ );
+ err.span_label(source_info.span, description).note(note);
+ let note_non_inherited = tcx.hir().parent_iter(lint_root).find(|(id, node)| {
+ if let Node::Expr(block) = node
+ && let ExprKind::Block(block, _) = block.kind
+ && let BlockCheckMode::UnsafeBlock(_) = block.rules
+ {
+ true
+ }
+ else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id)
+ && sig.header.is_unsafe()
+ {
+ true
+ } else {
+ false
+ }
+ });
+ if let Some((id, _)) = note_non_inherited {
+ let span = tcx.hir().span(id);
+ err.span_label(
+ tcx.sess.source_map().guess_head_span(span),
+ "items do not inherit unsafety from separate enclosing items",
+ );
+ }
+
+ err.emit();
}
UnsafetyViolationKind::UnsafeFn => tcx.struct_span_lint_hir(
UNSAFE_OP_IN_UNSAFE_FN,
diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs
index 4e4515888..b0514e033 100644
--- a/compiler/rustc_mir_transform/src/const_prop.rs
+++ b/compiler/rustc_mir_transform/src/const_prop.rs
@@ -3,6 +3,8 @@
use std::cell::Cell;
+use either::Right;
+
use rustc_ast::Mutability;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def::DefKind;
@@ -266,7 +268,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_tcx: TyCtxt<'tcx>,
_machine: &Self,
_alloc_id: AllocId,
- alloc: ConstAllocation<'tcx, Self::Provenance, Self::AllocExtra>,
+ alloc: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
is_write: bool,
) -> InterpResult<'tcx> {
@@ -385,7 +387,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// I don't know how return types can seem to be unsized but this happens in the
// `type/type-unsatisfiable.rs` test.
.filter(|ret_layout| {
- !ret_layout.is_unsized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
+ ret_layout.is_sized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
})
.unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap());
@@ -429,7 +431,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// Try to read the local as an immediate so that if it is representable as a scalar, we can
// handle it as such, but otherwise, just return the value as is.
Some(match self.ecx.read_immediate_raw(&op) {
- Ok(Ok(imm)) => imm.into(),
+ Ok(Right(imm)) => imm.into(),
_ => op,
})
}
@@ -471,7 +473,8 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
return None;
}
- self.ecx.const_to_op(&c.literal, None).ok()
+ // No span, we don't want errors to be shown.
+ self.ecx.eval_mir_constant(&c.literal, None, None).ok()
}
/// Returns the value, if any, of evaluating `place`.
@@ -742,7 +745,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// FIXME> figure out what to do when read_immediate_raw fails
let imm = self.use_ecx(|this| this.ecx.read_immediate_raw(value));
- if let Some(Ok(imm)) = imm {
+ if let Some(Right(imm)) = imm {
match *imm {
interpret::Immediate::Scalar(scalar) => {
*rval = Rvalue::Use(self.operand_from_scalar(
diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs
index 479c4e577..0ab67228f 100644
--- a/compiler/rustc_mir_transform/src/const_prop_lint.rs
+++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs
@@ -1,11 +1,10 @@
//! Propagates constants for early reporting of statically known
//! assertion failures
-use crate::const_prop::CanConstProp;
-use crate::const_prop::ConstPropMachine;
-use crate::const_prop::ConstPropMode;
-use crate::MirLint;
-use rustc_const_eval::const_eval::ConstEvalErr;
+use std::cell::Cell;
+
+use either::{Left, Right};
+
use rustc_const_eval::interpret::Immediate;
use rustc_const_eval::interpret::{
self, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, Scalar, StackPopCleanup,
@@ -27,12 +26,17 @@ use rustc_session::lint;
use rustc_span::Span;
use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
use rustc_trait_selection::traits;
-use std::cell::Cell;
+
+use crate::const_prop::CanConstProp;
+use crate::const_prop::ConstPropMachine;
+use crate::const_prop::ConstPropMode;
+use crate::MirLint;
/// The maximum number of bytes that we'll allocate space for a local or the return value.
/// Needed for #66397, because otherwise we eval into large places and that can cause OOM or just
/// Severely regress performance.
const MAX_ALLOC_LIMIT: u64 = 1024;
+
pub struct ConstProp;
impl<'tcx> MirLint<'tcx> for ConstProp {
@@ -199,7 +203,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// I don't know how return types can seem to be unsized but this happens in the
// `type/type-unsatisfiable.rs` test.
.filter(|ret_layout| {
- !ret_layout.is_unsized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
+ ret_layout.is_sized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
})
.unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap());
@@ -244,7 +248,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// Try to read the local as an immediate so that if it is representable as a scalar, we can
// handle it as such, but otherwise, just return the value as is.
Some(match self.ecx.read_immediate_raw(&op) {
- Ok(Ok(imm)) => imm.into(),
+ Ok(Left(imm)) => imm.into(),
_ => op,
})
}
@@ -267,7 +271,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
F: FnOnce(&mut Self) -> InterpResult<'tcx, T>,
{
// Overwrite the PC -- whatever the interpreter does to it does not make any sense anyway.
- self.ecx.frame_mut().loc = Err(source_info.span);
+ self.ecx.frame_mut().loc = Right(source_info.span);
match f(self) {
Ok(val) => Some(val),
Err(error) => {
@@ -286,25 +290,13 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
}
/// Returns the value, if any, of evaluating `c`.
- fn eval_constant(
- &mut self,
- c: &Constant<'tcx>,
- _source_info: SourceInfo,
- ) -> Option<OpTy<'tcx>> {
+ fn eval_constant(&mut self, c: &Constant<'tcx>, source_info: SourceInfo) -> Option<OpTy<'tcx>> {
// FIXME we need to revisit this for #67176
if c.needs_subst() {
return None;
}
- match self.ecx.const_to_op(&c.literal, None) {
- Ok(op) => Some(op),
- Err(error) => {
- let tcx = self.ecx.tcx.at(c.span);
- let err = ConstEvalErr::new(&self.ecx, error, Some(c.span));
- err.report_as_error(tcx, "erroneous constant used");
- None
- }
- }
+ self.use_ecx(source_info, |this| this.ecx.eval_mir_constant(&c.literal, Some(c.span), None))
}
/// Returns the value, if any, of evaluating `place`.
diff --git a/compiler/rustc_mir_transform/src/coverage/debug.rs b/compiler/rustc_mir_transform/src/coverage/debug.rs
index 0f8679b0b..d6a298fad 100644
--- a/compiler/rustc_mir_transform/src/coverage/debug.rs
+++ b/compiler/rustc_mir_transform/src/coverage/debug.rs
@@ -638,7 +638,7 @@ pub(super) fn dump_coverage_spanview<'tcx>(
let def_id = mir_source.def_id();
let span_viewables = span_viewables(tcx, mir_body, basic_coverage_blocks, &coverage_spans);
- let mut file = create_dump_file(tcx, "html", None, pass_name, &0, mir_source)
+ let mut file = create_dump_file(tcx, "html", false, pass_name, &0, mir_body)
.expect("Unexpected error creating MIR spanview HTML file");
let crate_name = tcx.crate_name(def_id.krate);
let item_name = tcx.def_path(def_id).to_filename_friendly_no_crate();
@@ -739,7 +739,7 @@ pub(super) fn dump_coverage_graphviz<'tcx>(
.join("\n ")
));
}
- let mut file = create_dump_file(tcx, "dot", None, pass_name, &0, mir_source)
+ let mut file = create_dump_file(tcx, "dot", false, pass_name, &0, mir_body)
.expect("Unexpected error creating BasicCoverageBlock graphviz DOT file");
graphviz_writer
.write_graphviz(tcx, &mut file)
diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
new file mode 100644
index 000000000..e90273874
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
@@ -0,0 +1,530 @@
+//! A constant propagation optimization pass based on dataflow analysis.
+//!
+//! Currently, this pass only propagates scalar values.
+
+use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_middle::mir::visit::{MutVisitor, Visitor};
+use rustc_middle::mir::*;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace};
+use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
+use rustc_span::DUMMY_SP;
+
+use crate::MirPass;
+
+// These constants are somewhat random guesses and have not been optimized.
+// If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
+const BLOCK_LIMIT: usize = 100;
+const PLACE_LIMIT: usize = 100;
+
+pub struct DataflowConstProp;
+
+impl<'tcx> MirPass<'tcx> for DataflowConstProp {
+ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
+ sess.mir_opt_level() >= 3
+ }
+
+ #[instrument(skip_all level = "debug")]
+ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
+ debug!("aborted dataflow const prop due too many basic blocks");
+ return;
+ }
+
+ // Decide which places to track during the analysis.
+ let map = Map::from_filter(tcx, body, Ty::is_scalar);
+
+ // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
+ // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
+ // applications, where `h` is the height of the lattice. Because the height of our lattice
+ // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
+ // because every transfer function application could traverse the whole map, this becomes
+ // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
+ // map nodes is strongly correlated to the number of tracked places, this becomes more or
+ // less `O(n)` if we place a constant limit on the number of tracked places.
+ if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT {
+ debug!("aborted dataflow const prop due to too many tracked places");
+ return;
+ }
+
+ // Perform the actual dataflow analysis.
+ let analysis = ConstAnalysis::new(tcx, body, map);
+ let results = debug_span!("analyze")
+ .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
+
+ // Collect results and patch the body afterwards.
+ let mut visitor = CollectAndPatch::new(tcx, &results.analysis.0.map);
+ debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
+ debug_span!("patch").in_scope(|| visitor.visit_body(body));
+ }
+}
+
+struct ConstAnalysis<'tcx> {
+ map: Map,
+ tcx: TyCtxt<'tcx>,
+ ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
+ param_env: ty::ParamEnv<'tcx>,
+}
+
+impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
+ type Value = FlatSet<ScalarTy<'tcx>>;
+
+ const NAME: &'static str = "ConstAnalysis";
+
+ fn map(&self) -> &Map {
+ &self.map
+ }
+
+ fn handle_assign(
+ &self,
+ target: Place<'tcx>,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State<Self::Value>,
+ ) {
+ match rvalue {
+ Rvalue::CheckedBinaryOp(op, box (left, right)) => {
+ let target = self.map().find(target.as_ref());
+ if let Some(target) = target {
+ // We should not track any projections other than
+ // what is overwritten below, but just in case...
+ state.flood_idx(target, self.map());
+ }
+
+ let value_target = target
+ .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into())));
+ let overflow_target = target
+ .and_then(|target| self.map().apply(target, TrackElem::Field(1_u32.into())));
+
+ if value_target.is_some() || overflow_target.is_some() {
+ let (val, overflow) = self.binary_op(state, *op, left, right);
+
+ if let Some(value_target) = value_target {
+ state.assign_idx(value_target, ValueOrPlace::Value(val), self.map());
+ }
+ if let Some(overflow_target) = overflow_target {
+ let overflow = match overflow {
+ FlatSet::Top => FlatSet::Top,
+ FlatSet::Elem(overflow) => {
+ if overflow {
+ // Overflow cannot be reliably propagated. See: https://github.com/rust-lang/rust/pull/101168#issuecomment-1288091446
+ FlatSet::Top
+ } else {
+ self.wrap_scalar(Scalar::from_bool(false), self.tcx.types.bool)
+ }
+ }
+ FlatSet::Bottom => FlatSet::Bottom,
+ };
+ state.assign_idx(
+ overflow_target,
+ ValueOrPlace::Value(overflow),
+ self.map(),
+ );
+ }
+ }
+ }
+ _ => self.super_assign(target, rvalue, state),
+ }
+ }
+
+ fn handle_rvalue(
+ &self,
+ rvalue: &Rvalue<'tcx>,
+ state: &mut State<Self::Value>,
+ ) -> ValueOrPlace<Self::Value> {
+ match rvalue {
+ Rvalue::Cast(
+ kind @ (CastKind::IntToInt
+ | CastKind::FloatToInt
+ | CastKind::FloatToFloat
+ | CastKind::IntToFloat),
+ operand,
+ ty,
+ ) => match self.eval_operand(operand, state) {
+ FlatSet::Elem(op) => match kind {
+ CastKind::IntToInt | CastKind::IntToFloat => {
+ self.ecx.int_to_int_or_float(&op, *ty)
+ }
+ CastKind::FloatToInt | CastKind::FloatToFloat => {
+ self.ecx.float_to_float_or_int(&op, *ty)
+ }
+ _ => unreachable!(),
+ }
+ .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
+ .unwrap_or(ValueOrPlace::top()),
+ _ => ValueOrPlace::top(),
+ },
+ Rvalue::BinaryOp(op, box (left, right)) => {
+ // Overflows must be ignored here.
+ let (val, _overflow) = self.binary_op(state, *op, left, right);
+ ValueOrPlace::Value(val)
+ }
+ Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
+ FlatSet::Elem(value) => self
+ .ecx
+ .unary_op(*op, &value)
+ .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
+ .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
+ FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
+ FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
+ },
+ _ => self.super_rvalue(rvalue, state),
+ }
+ }
+
+ fn handle_constant(
+ &self,
+ constant: &Constant<'tcx>,
+ _state: &mut State<Self::Value>,
+ ) -> Self::Value {
+ constant
+ .literal
+ .eval(self.tcx, self.param_env)
+ .try_to_scalar()
+ .map(|value| FlatSet::Elem(ScalarTy(value, constant.ty())))
+ .unwrap_or(FlatSet::Top)
+ }
+
+ fn handle_switch_int(
+ &self,
+ discr: &Operand<'tcx>,
+ apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
+ ) {
+ // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
+ // this more inefficient than it has to be.
+ let mut discr_value = None;
+ let mut handled = false;
+ apply_edge_effects.apply(|state, target| {
+ let discr_value = match discr_value {
+ Some(value) => value,
+ None => {
+ let value = match self.handle_operand(discr, state) {
+ ValueOrPlace::Value(value) => value,
+ ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
+ };
+ let result = match value {
+ FlatSet::Top => FlatSet::Top,
+ FlatSet::Elem(ScalarTy(scalar, _)) => {
+ let int = scalar.assert_int();
+ FlatSet::Elem(int.assert_bits(int.size()))
+ }
+ FlatSet::Bottom => FlatSet::Bottom,
+ };
+ discr_value = Some(result);
+ result
+ }
+ };
+
+ let FlatSet::Elem(choice) = discr_value else {
+ // Do nothing if we don't know which branch will be taken.
+ return
+ };
+
+ if target.value.map(|n| n == choice).unwrap_or(!handled) {
+ // Branch is taken. Has no effect on state.
+ handled = true;
+ } else {
+ // Branch is not taken.
+ state.mark_unreachable();
+ }
+ })
+ }
+}
+
+#[derive(Clone, PartialEq, Eq)]
+struct ScalarTy<'tcx>(Scalar, Ty<'tcx>);
+
+impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ // This is used for dataflow visualization, so we return something more concise.
+ std::fmt::Display::fmt(&ConstantKind::Val(ConstValue::Scalar(self.0), self.1), f)
+ }
+}
+
+impl<'tcx> ConstAnalysis<'tcx> {
+ pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self {
+ let param_env = tcx.param_env(body.source.def_id());
+ Self {
+ map,
+ tcx,
+ ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
+ param_env: param_env,
+ }
+ }
+
+ fn binary_op(
+ &self,
+ state: &mut State<FlatSet<ScalarTy<'tcx>>>,
+ op: BinOp,
+ left: &Operand<'tcx>,
+ right: &Operand<'tcx>,
+ ) -> (FlatSet<ScalarTy<'tcx>>, FlatSet<bool>) {
+ let left = self.eval_operand(left, state);
+ let right = self.eval_operand(right, state);
+ match (left, right) {
+ (FlatSet::Elem(left), FlatSet::Elem(right)) => {
+ match self.ecx.overflowing_binary_op(op, &left, &right) {
+ Ok((val, overflow, ty)) => (self.wrap_scalar(val, ty), FlatSet::Elem(overflow)),
+ _ => (FlatSet::Top, FlatSet::Top),
+ }
+ }
+ (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
+ (_, _) => {
+ // Could attempt some algebraic simplifcations here.
+ (FlatSet::Top, FlatSet::Top)
+ }
+ }
+ }
+
+ fn eval_operand(
+ &self,
+ op: &Operand<'tcx>,
+ state: &mut State<FlatSet<ScalarTy<'tcx>>>,
+ ) -> FlatSet<ImmTy<'tcx>> {
+ let value = match self.handle_operand(op, state) {
+ ValueOrPlace::Value(value) => value,
+ ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
+ };
+ match value {
+ FlatSet::Top => FlatSet::Top,
+ FlatSet::Elem(ScalarTy(scalar, ty)) => self
+ .tcx
+ .layout_of(self.param_env.and(ty))
+ .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
+ .unwrap_or(FlatSet::Top),
+ FlatSet::Bottom => FlatSet::Bottom,
+ }
+ }
+
+ fn wrap_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
+ FlatSet::Elem(ScalarTy(scalar, ty))
+ }
+
+ fn wrap_immediate(&self, imm: Immediate, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
+ match imm {
+ Immediate::Scalar(scalar) => self.wrap_scalar(scalar, ty),
+ _ => FlatSet::Top,
+ }
+ }
+
+ fn wrap_immty(&self, val: ImmTy<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
+ self.wrap_immediate(*val, val.layout.ty)
+ }
+}
+
+struct CollectAndPatch<'tcx, 'map> {
+ tcx: TyCtxt<'tcx>,
+ map: &'map Map,
+
+ /// For a given MIR location, this stores the values of the operands used by that location. In
+ /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
+ /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
+ before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
+
+ /// Stores the assigned values for assignments where the Rvalue is constant.
+ assignments: FxHashMap<Location, ScalarTy<'tcx>>,
+}
+
+impl<'tcx, 'map> CollectAndPatch<'tcx, 'map> {
+ fn new(tcx: TyCtxt<'tcx>, map: &'map Map) -> Self {
+ Self { tcx, map, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
+ }
+
+ fn make_operand(&self, scalar: ScalarTy<'tcx>) -> Operand<'tcx> {
+ Operand::Constant(Box::new(Constant {
+ span: DUMMY_SP,
+ user_ty: None,
+ literal: ConstantKind::Val(ConstValue::Scalar(scalar.0), scalar.1),
+ }))
+ }
+}
+
+impl<'mir, 'tcx, 'map> ResultsVisitor<'mir, 'tcx> for CollectAndPatch<'tcx, 'map> {
+ type FlowState = State<FlatSet<ScalarTy<'tcx>>>;
+
+ fn visit_statement_before_primary_effect(
+ &mut self,
+ state: &Self::FlowState,
+ statement: &'mir Statement<'tcx>,
+ location: Location,
+ ) {
+ match &statement.kind {
+ StatementKind::Assign(box (_, rvalue)) => {
+ OperandCollector { state, visitor: self }.visit_rvalue(rvalue, location);
+ }
+ _ => (),
+ }
+ }
+
+ fn visit_statement_after_primary_effect(
+ &mut self,
+ state: &Self::FlowState,
+ statement: &'mir Statement<'tcx>,
+ location: Location,
+ ) {
+ match statement.kind {
+ StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
+ // Don't overwrite the assignment if it already uses a constant (to keep the span).
+ }
+ StatementKind::Assign(box (place, _)) => match state.get(place.as_ref(), self.map) {
+ FlatSet::Top => (),
+ FlatSet::Elem(value) => {
+ self.assignments.insert(location, value);
+ }
+ FlatSet::Bottom => {
+ // This assignment is either unreachable, or an uninitialized value is assigned.
+ }
+ },
+ _ => (),
+ }
+ }
+
+ fn visit_terminator_before_primary_effect(
+ &mut self,
+ state: &Self::FlowState,
+ terminator: &'mir Terminator<'tcx>,
+ location: Location,
+ ) {
+ OperandCollector { state, visitor: self }.visit_terminator(terminator, location);
+ }
+}
+
+impl<'tcx, 'map> MutVisitor<'tcx> for CollectAndPatch<'tcx, 'map> {
+ fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
+ if let Some(value) = self.assignments.get(&location) {
+ match &mut statement.kind {
+ StatementKind::Assign(box (_, rvalue)) => {
+ *rvalue = Rvalue::Use(self.make_operand(value.clone()));
+ }
+ _ => bug!("found assignment info for non-assign statement"),
+ }
+ } else {
+ self.super_statement(statement, location);
+ }
+ }
+
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+ match operand {
+ Operand::Copy(place) | Operand::Move(place) => {
+ if let Some(value) = self.before_effect.get(&(location, *place)) {
+ *operand = self.make_operand(value.clone());
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+struct OperandCollector<'tcx, 'map, 'a> {
+ state: &'a State<FlatSet<ScalarTy<'tcx>>>,
+ visitor: &'a mut CollectAndPatch<'tcx, 'map>,
+}
+
+impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
+ fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
+ match operand {
+ Operand::Copy(place) | Operand::Move(place) => {
+ match self.state.get(place.as_ref(), self.visitor.map) {
+ FlatSet::Top => (),
+ FlatSet::Elem(value) => {
+ self.visitor.before_effect.insert((location, *place), value);
+ }
+ FlatSet::Bottom => (),
+ }
+ }
+ _ => (),
+ }
+ }
+}
+
+struct DummyMachine;
+
+impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
+ rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>);
+ type MemoryKind = !;
+ const PANIC_ON_ALLOC_FAIL: bool = true;
+
+ fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
+ unimplemented!()
+ }
+
+ fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
+ unimplemented!()
+ }
+
+ fn find_mir_or_eval_fn(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _instance: ty::Instance<'tcx>,
+ _abi: rustc_target::spec::abi::Abi,
+ _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
+ _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
+ _target: Option<BasicBlock>,
+ _unwind: rustc_const_eval::interpret::StackPopUnwind,
+ ) -> interpret::InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
+ unimplemented!()
+ }
+
+ fn call_intrinsic(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _instance: ty::Instance<'tcx>,
+ _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
+ _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
+ _target: Option<BasicBlock>,
+ _unwind: rustc_const_eval::interpret::StackPopUnwind,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
+ fn assert_panic(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _msg: &rustc_middle::mir::AssertMessage<'tcx>,
+ _unwind: Option<BasicBlock>,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
+ fn binary_ptr_op(
+ _ecx: &InterpCx<'mir, 'tcx, Self>,
+ _bin_op: BinOp,
+ _left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
+ _right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
+ ) -> interpret::InterpResult<'tcx, (interpret::Scalar<Self::Provenance>, bool, Ty<'tcx>)> {
+ throw_unsup!(Unsupported("".into()))
+ }
+
+ fn expose_ptr(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _ptr: interpret::Pointer<Self::Provenance>,
+ ) -> interpret::InterpResult<'tcx> {
+ unimplemented!()
+ }
+
+ fn init_frame_extra(
+ _ecx: &mut InterpCx<'mir, 'tcx, Self>,
+ _frame: rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance>,
+ ) -> interpret::InterpResult<
+ 'tcx,
+ rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
+ > {
+ unimplemented!()
+ }
+
+ fn stack<'a>(
+ _ecx: &'a InterpCx<'mir, 'tcx, Self>,
+ ) -> &'a [rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
+ {
+ unimplemented!()
+ }
+
+ fn stack_mut<'a>(
+ _ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
+ ) -> &'a mut Vec<
+ rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
+ > {
+ unimplemented!()
+ }
+}
diff --git a/compiler/rustc_mir_transform/src/dead_store_elimination.rs b/compiler/rustc_mir_transform/src/dead_store_elimination.rs
index 3f3870cc7..42c580c63 100644
--- a/compiler/rustc_mir_transform/src/dead_store_elimination.rs
+++ b/compiler/rustc_mir_transform/src/dead_store_elimination.rs
@@ -70,6 +70,8 @@ pub fn eliminate<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, borrowed: &BitS
for Location { block, statement_index } in patch {
bbs[block].statements[statement_index].make_nop();
}
+
+ crate::simplify::SimplifyLocals.run_pass(tcx, body)
}
pub struct DeadStoreElimination;
diff --git a/compiler/rustc_mir_transform/src/deduce_param_attrs.rs b/compiler/rustc_mir_transform/src/deduce_param_attrs.rs
index 28b1c5a48..92f1fff6b 100644
--- a/compiler/rustc_mir_transform/src/deduce_param_attrs.rs
+++ b/compiler/rustc_mir_transform/src/deduce_param_attrs.rs
@@ -110,15 +110,16 @@ impl<'tcx> Visitor<'tcx> for DeduceReadOnly {
if let TerminatorKind::Call { ref args, .. } = terminator.kind {
for arg in args {
- if let Operand::Move(_) = *arg {
- // ArgumentChecker panics if a direct move of an argument from a caller to a
- // callee was detected.
- //
- // If, in the future, MIR optimizations cause arguments to be moved directly
- // from callers to callees, change the panic to instead add the argument in
- // question to `mutating_uses`.
- ArgumentChecker::new(self.mutable_args.domain_size())
- .visit_operand(arg, location)
+ if let Operand::Move(place) = *arg {
+ let local = place.local;
+ if place.is_indirect()
+ || local == RETURN_PLACE
+ || local.index() > self.mutable_args.domain_size()
+ {
+ continue;
+ }
+
+ self.mutable_args.insert(local.index() - 1);
}
}
};
@@ -127,35 +128,6 @@ impl<'tcx> Visitor<'tcx> for DeduceReadOnly {
}
}
-/// A visitor that simply panics if a direct move of an argument from a caller to a callee was
-/// detected.
-struct ArgumentChecker {
- /// The number of arguments to the calling function.
- arg_count: usize,
-}
-
-impl ArgumentChecker {
- /// Creates a new ArgumentChecker.
- fn new(arg_count: usize) -> Self {
- Self { arg_count }
- }
-}
-
-impl<'tcx> Visitor<'tcx> for ArgumentChecker {
- fn visit_local(&mut self, local: Local, context: PlaceContext, _: Location) {
- // Check to make sure that, if this local is an argument, we didn't move directly from it.
- if matches!(context, PlaceContext::NonMutatingUse(NonMutatingUseContext::Move))
- && local != RETURN_PLACE
- && local.index() <= self.arg_count
- {
- // If, in the future, MIR optimizations cause arguments to be moved directly from
- // callers to callees, change this panic to instead add the argument in question to
- // `mutating_uses`.
- panic!("Detected a direct move from a caller's argument to a callee's argument!")
- }
- }
-}
-
/// Returns true if values of a given type will never be passed indirectly, regardless of ABI.
fn type_will_always_be_passed_directly<'tcx>(ty: Ty<'tcx>) -> bool {
matches!(
diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs
index 9bc47613e..97485c4f5 100644
--- a/compiler/rustc_mir_transform/src/dest_prop.rs
+++ b/compiler/rustc_mir_transform/src/dest_prop.rs
@@ -20,7 +20,8 @@
//! values or the return place `_0`. On a very high level, independent of the actual implementation
//! details, it does the following:
//!
-//! 1) Identify `dest = src;` statements that can be soundly eliminated.
+//! 1) Identify `dest = src;` statements with values for `dest` and `src` whose storage can soundly
+//! be merged.
//! 2) Replace all mentions of `src` with `dest` ("unifying" them and propagating the destination
//! backwards).
//! 3) Delete the `dest = src;` statement (by making it a `nop`).
@@ -29,44 +30,80 @@
//!
//! ## Soundness
//!
-//! Given an `Assign` statement `dest = src;`, where `dest` is a `Place` and `src` is an `Rvalue`,
-//! there are a few requirements that must hold for the optimization to be sound:
+//! We have a pair of places `p` and `q`, whose memory we would like to merge. In order for this to
+//! be sound, we need to check a number of conditions:
//!
-//! * `dest` must not contain any *indirection* through a pointer. It must access part of the base
-//! local. Otherwise it might point to arbitrary memory that is hard to track.
+//! * `p` and `q` must both be *constant* - it does not make much sense to talk about merging them
+//! if they do not consistently refer to the same place in memory. This is satisfied if they do
+//! not contain any indirection through a pointer or any indexing projections.
//!
-//! It must also not contain any indexing projections, since those take an arbitrary `Local` as
-//! the index, and that local might only be initialized shortly before `dest` is used.
+//! * We need to make sure that the goal of "merging the memory" is actually structurally possible
+//! in MIR. For example, even if all the other conditions are satisfied, there is no way to
+//! "merge" `_5.foo` and `_6.bar`. For now, we ensure this by requiring that both `p` and `q` are
+//! locals with no further projections. Future iterations of this pass should improve on this.
//!
-//! * `src` must be a bare `Local` without any indirections or field projections (FIXME: Is this a
-//! fundamental restriction or just current impl state?). It can be copied or moved by the
-//! assignment.
+//! * Finally, we want `p` and `q` to use the same memory - however, we still need to make sure that
+//! each of them has enough "ownership" of that memory to continue "doing its job." More
+//! precisely, what we will check is that whenever the program performs a write to `p`, then it
+//! does not currently care about what the value in `q` is (and vice versa). We formalize the
+//! notion of "does not care what the value in `q` is" by checking the *liveness* of `q`.
//!
-//! * The `dest` and `src` locals must never be [*live*][liveness] at the same time. If they are, it
-//! means that they both hold a (potentially different) value that is needed by a future use of
-//! the locals. Unifying them would overwrite one of the values.
+//! Because of the difficulty of computing liveness of places that have their address taken, we do
+//! not even attempt to do it. Any places that are in a local that has its address taken is
+//! excluded from the optimization.
//!
-//! Note that computing liveness of locals that have had their address taken is more difficult:
-//! Short of doing full escape analysis on the address/pointer/reference, the pass would need to
-//! assume that any operation that can potentially involve opaque user code (such as function
-//! calls, destructors, and inline assembly) may access any local that had its address taken
-//! before that point.
+//! The first two conditions are simple structural requirements on the `Assign` statements that can
+//! be trivially checked. The third requirement however is more difficult and costly to check.
//!
-//! Here, the first two conditions are simple structural requirements on the `Assign` statements
-//! that can be trivially checked. The liveness requirement however is more difficult and costly to
-//! check.
+//! ## Future Improvements
+//!
+//! There are a number of ways in which this pass could be improved in the future:
+//!
+//! * Merging storage liveness ranges instead of removing storage statements completely. This may
+//! improve stack usage.
+//!
+//! * Allow merging locals into places with projections, eg `_5` into `_6.foo`.
+//!
+//! * Liveness analysis with more precision than whole locals at a time. The smaller benefit of this
+//! is that it would allow us to dest prop at "sub-local" levels in some cases. The bigger benefit
+//! of this is that such liveness analysis can report more accurate results about whole locals at
+//! a time. For example, consider:
+//!
+//! ```ignore (syntax-highliting-only)
+//! _1 = u;
+//! // unrelated code
+//! _1.f1 = v;
+//! _2 = _1.f1;
+//! ```
+//!
+//! Because the current analysis only thinks in terms of locals, it does not have enough
+//! information to report that `_1` is dead in the "unrelated code" section.
+//!
+//! * Liveness analysis enabled by alias analysis. This would allow us to not just bail on locals
+//! that ever have their address taken. Of course that requires actually having alias analysis
+//! (and a model to build it on), so this might be a bit of a ways off.
+//!
+//! * Various perf improvents. There are a bunch of comments in here marked `PERF` with ideas for
+//! how to do things more efficiently. However, the complexity of the pass as a whole should be
+//! kept in mind.
//!
//! ## Previous Work
//!
-//! A [previous attempt] at implementing an optimization like this turned out to be a significant
-//! regression in compiler performance. Fixing the regressions introduced a lot of undesirable
-//! complexity to the implementation.
+//! A [previous attempt][attempt 1] at implementing an optimization like this turned out to be a
+//! significant regression in compiler performance. Fixing the regressions introduced a lot of
+//! undesirable complexity to the implementation.
+//!
+//! A [subsequent approach][attempt 2] tried to avoid the costly computation by limiting itself to
+//! acyclic CFGs, but still turned out to be far too costly to run due to suboptimal performance
+//! within individual basic blocks, requiring a walk across the entire block for every assignment
+//! found within the block. For the `tuple-stress` benchmark, which has 458745 statements in a
+//! single block, this proved to be far too costly.
//!
-//! A [subsequent approach] tried to avoid the costly computation by limiting itself to acyclic
-//! CFGs, but still turned out to be far too costly to run due to suboptimal performance within
-//! individual basic blocks, requiring a walk across the entire block for every assignment found
-//! within the block. For the `tuple-stress` benchmark, which has 458745 statements in a single
-//! block, this proved to be far too costly.
+//! [Another approach after that][attempt 3] was much closer to correct, but had some soundness
+//! issues - it was failing to consider stores outside live ranges, and failed to uphold some of the
+//! requirements that MIR has for non-overlapping places within statements. However, it also had
+//! performance issues caused by `O(l² * s)` runtime, where `l` is the number of locals and `s` is
+//! the number of statements and terminators.
//!
//! Since the first attempt at this, the compiler has improved dramatically, and new analysis
//! frameworks have been added that should make this approach viable without requiring a limited
@@ -74,8 +111,7 @@
//! - rustc now has a powerful dataflow analysis framework that can handle forwards and backwards
//! analyses efficiently.
//! - Layout optimizations for generators have been added to improve code generation for
-//! async/await, which are very similar in spirit to what this optimization does. Both walk the
-//! MIR and record conflicting uses of locals in a `BitMatrix`.
+//! async/await, which are very similar in spirit to what this optimization does.
//!
//! Also, rustc now has a simple NRVO pass (see `nrvo.rs`), which handles a subset of the cases that
//! this destination propagation pass handles, proving that similar optimizations can be performed
@@ -87,253 +123,205 @@
//! it replaces the eliminated assign statements with `nop`s and leaves unused locals behind.
//!
//! [liveness]: https://en.wikipedia.org/wiki/Live_variable_analysis
-//! [previous attempt]: https://github.com/rust-lang/rust/pull/47954
-//! [subsequent approach]: https://github.com/rust-lang/rust/pull/71003
+//! [attempt 1]: https://github.com/rust-lang/rust/pull/47954
+//! [attempt 2]: https://github.com/rust-lang/rust/pull/71003
+//! [attempt 3]: https://github.com/rust-lang/rust/pull/72632
+
+use std::collections::hash_map::{Entry, OccupiedEntry};
use crate::MirPass;
-use itertools::Itertools;
-use rustc_data_structures::unify::{InPlaceUnificationTable, UnifyKey};
-use rustc_index::{
- bit_set::{BitMatrix, BitSet},
- vec::IndexVec,
-};
-use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_index::bit_set::BitSet;
use rustc_middle::mir::{dump_mir, PassWhere};
use rustc_middle::mir::{
- traversal, Body, InlineAsmOperand, Local, LocalKind, Location, Operand, Place, PlaceElem,
- Rvalue, Statement, StatementKind, Terminator, TerminatorKind,
+ traversal, BasicBlock, Body, InlineAsmOperand, Local, LocalKind, Location, Operand, Place,
+ Rvalue, Statement, StatementKind, TerminatorKind,
+};
+use rustc_middle::mir::{
+ visit::{MutVisitor, PlaceContext, Visitor},
+ ProjectionElem,
};
use rustc_middle::ty::TyCtxt;
-use rustc_mir_dataflow::impls::{borrowed_locals, MaybeInitializedLocals, MaybeLiveLocals};
-use rustc_mir_dataflow::Analysis;
-
-// Empirical measurements have resulted in some observations:
-// - Running on a body with a single block and 500 locals takes barely any time
-// - Running on a body with ~400 blocks and ~300 relevant locals takes "too long"
-// ...so we just limit both to somewhat reasonable-ish looking values.
-const MAX_LOCALS: usize = 500;
-const MAX_BLOCKS: usize = 250;
+use rustc_mir_dataflow::impls::MaybeLiveLocals;
+use rustc_mir_dataflow::{Analysis, ResultsCursor};
pub struct DestinationPropagation;
impl<'tcx> MirPass<'tcx> for DestinationPropagation {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
- // FIXME(#79191, #82678): This is unsound.
- //
- // Only run at mir-opt-level=3 or higher for now (we don't fix up debuginfo and remove
- // storage statements at the moment).
- sess.opts.unstable_opts.unsound_mir_opts && sess.mir_opt_level() >= 3
+ // For now, only run at MIR opt level 3. Two things need to be changed before this can be
+ // turned on by default:
+ // 1. Because of the overeager removal of storage statements, this can cause stack space
+ // regressions. This opt is not the place to fix this though, it's a more general
+ // problem in MIR.
+ // 2. Despite being an overall perf improvement, this still causes a 30% regression in
+ // keccak. We can temporarily fix this by bounding function size, but in the long term
+ // we should fix this by being smarter about invalidating analysis results.
+ sess.mir_opt_level() >= 3
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let def_id = body.source.def_id();
+ let mut allocations = Allocations::default();
+ trace!(func = ?tcx.def_path_str(def_id));
- let candidates = find_candidates(body);
- if candidates.is_empty() {
- debug!("{:?}: no dest prop candidates, done", def_id);
- return;
- }
+ let borrowed = rustc_mir_dataflow::impls::borrowed_locals(body);
- // Collect all locals we care about. We only compute conflicts for these to save time.
- let mut relevant_locals = BitSet::new_empty(body.local_decls.len());
- for CandidateAssignment { dest, src, loc: _ } in &candidates {
- relevant_locals.insert(dest.local);
- relevant_locals.insert(*src);
- }
-
- // This pass unfortunately has `O(l² * s)` performance, where `l` is the number of locals
- // and `s` is the number of statements and terminators in the function.
- // To prevent blowing up compile times too much, we bail out when there are too many locals.
- let relevant = relevant_locals.count();
- debug!(
- "{:?}: {} locals ({} relevant), {} blocks",
- def_id,
- body.local_decls.len(),
- relevant,
- body.basic_blocks.len()
- );
- if relevant > MAX_LOCALS {
- warn!(
- "too many candidate locals in {:?} ({}, max is {}), not optimizing",
- def_id, relevant, MAX_LOCALS
+ // In order to avoid having to collect data for every single pair of locals in the body, we
+ // do not allow doing more than one merge for places that are derived from the same local at
+ // once. To avoid missed opportunities, we instead iterate to a fixed point - we'll refer to
+ // each of these iterations as a "round."
+ //
+ // Reaching a fixed point could in theory take up to `min(l, s)` rounds - however, we do not
+ // expect to see MIR like that. To verify this, a test was run against `[rust-lang/regex]` -
+ // the average MIR body saw 1.32 full iterations of this loop. The most that was hit were 30
+ // for a single function. Only 80/2801 (2.9%) of functions saw at least 5.
+ //
+ // [rust-lang/regex]:
+ // https://github.com/rust-lang/regex/tree/b5372864e2df6a2f5e543a556a62197f50ca3650
+ let mut round_count = 0;
+ loop {
+ // PERF: Can we do something smarter than recalculating the candidates and liveness
+ // results?
+ let mut candidates = find_candidates(
+ body,
+ &borrowed,
+ &mut allocations.candidates,
+ &mut allocations.candidates_reverse,
);
- return;
- }
- if body.basic_blocks.len() > MAX_BLOCKS {
- warn!(
- "too many blocks in {:?} ({}, max is {}), not optimizing",
- def_id,
- body.basic_blocks.len(),
- MAX_BLOCKS
+ trace!(?candidates);
+ let mut live = MaybeLiveLocals
+ .into_engine(tcx, body)
+ .iterate_to_fixpoint()
+ .into_results_cursor(body);
+ dest_prop_mir_dump(tcx, body, &mut live, round_count);
+
+ FilterInformation::filter_liveness(
+ &mut candidates,
+ &mut live,
+ &mut allocations.write_info,
+ body,
);
- return;
- }
- let mut conflicts = Conflicts::build(tcx, body, &relevant_locals);
+ // Because we do not update liveness information, it is unsound to use a local for more
+ // than one merge operation within a single round of optimizations. We store here which
+ // ones we have already used.
+ let mut merged_locals: BitSet<Local> = BitSet::new_empty(body.local_decls.len());
- let mut replacements = Replacements::new(body.local_decls.len());
- for candidate @ CandidateAssignment { dest, src, loc } in candidates {
- // Merge locals that don't conflict.
- if !conflicts.can_unify(dest.local, src) {
- debug!("at assignment {:?}, conflict {:?} vs. {:?}", loc, dest.local, src);
- continue;
- }
+ // This is the set of merges we will apply this round. It is a subset of the candidates.
+ let mut merges = FxHashMap::default();
- if replacements.for_src(candidate.src).is_some() {
- debug!("src {:?} already has replacement", candidate.src);
- continue;
+ for (src, candidates) in candidates.c.iter() {
+ if merged_locals.contains(*src) {
+ continue;
+ }
+ let Some(dest) =
+ candidates.iter().find(|dest| !merged_locals.contains(**dest)) else {
+ continue;
+ };
+ if !tcx.consider_optimizing(|| {
+ format!("{} round {}", tcx.def_path_str(def_id), round_count)
+ }) {
+ break;
+ }
+ merges.insert(*src, *dest);
+ merged_locals.insert(*src);
+ merged_locals.insert(*dest);
}
+ trace!(merging = ?merges);
- if !tcx.consider_optimizing(|| {
- format!("DestinationPropagation {:?} {:?}", def_id, candidate)
- }) {
+ if merges.is_empty() {
break;
}
+ round_count += 1;
- replacements.push(candidate);
- conflicts.unify(candidate.src, candidate.dest.local);
+ apply_merges(body, tcx, &merges, &merged_locals);
}
- replacements.flatten(tcx);
-
- debug!("replacements {:?}", replacements.map);
-
- Replacer { tcx, replacements, place_elem_cache: Vec::new() }.visit_body(body);
-
- // FIXME fix debug info
+ trace!(round_count);
}
}
-#[derive(Debug, Eq, PartialEq, Copy, Clone)]
-struct UnifyLocal(Local);
-
-impl From<Local> for UnifyLocal {
- fn from(l: Local) -> Self {
- Self(l)
- }
-}
-
-impl UnifyKey for UnifyLocal {
- type Value = ();
- #[inline]
- fn index(&self) -> u32 {
- self.0.as_u32()
- }
- #[inline]
- fn from_index(u: u32) -> Self {
- Self(Local::from_u32(u))
- }
- fn tag() -> &'static str {
- "UnifyLocal"
- }
+/// Container for the various allocations that we need.
+///
+/// We store these here and hand out `&mut` access to them, instead of dropping and recreating them
+/// frequently. Everything with a `&'alloc` lifetime points into here.
+#[derive(Default)]
+struct Allocations {
+ candidates: FxHashMap<Local, Vec<Local>>,
+ candidates_reverse: FxHashMap<Local, Vec<Local>>,
+ write_info: WriteInfo,
+ // PERF: Do this for `MaybeLiveLocals` allocations too.
}
-struct Replacements<'tcx> {
- /// Maps locals to their replacement.
- map: IndexVec<Local, Option<Place<'tcx>>>,
-
- /// Whose locals' live ranges to kill.
- kill: BitSet<Local>,
+#[derive(Debug)]
+struct Candidates<'alloc> {
+ /// The set of candidates we are considering in this optimization.
+ ///
+ /// We will always merge the key into at most one of its values.
+ ///
+ /// Whether a place ends up in the key or the value does not correspond to whether it appears as
+ /// the lhs or rhs of any assignment. As a matter of fact, the places in here might never appear
+ /// in an assignment at all. This happens because if we see an assignment like this:
+ ///
+ /// ```ignore (syntax-highlighting-only)
+ /// _1.0 = _2.0
+ /// ```
+ ///
+ /// We will still report that we would like to merge `_1` and `_2` in an attempt to allow us to
+ /// remove that assignment.
+ c: &'alloc mut FxHashMap<Local, Vec<Local>>,
+ /// A reverse index of the `c` set; if the `c` set contains `a => Place { local: b, proj }`,
+ /// then this contains `b => a`.
+ // PERF: Possibly these should be `SmallVec`s?
+ reverse: &'alloc mut FxHashMap<Local, Vec<Local>>,
}
-impl<'tcx> Replacements<'tcx> {
- fn new(locals: usize) -> Self {
- Self { map: IndexVec::from_elem_n(None, locals), kill: BitSet::new_empty(locals) }
- }
-
- fn push(&mut self, candidate: CandidateAssignment<'tcx>) {
- trace!("Replacements::push({:?})", candidate);
- let entry = &mut self.map[candidate.src];
- assert!(entry.is_none());
-
- *entry = Some(candidate.dest);
- self.kill.insert(candidate.src);
- self.kill.insert(candidate.dest.local);
- }
-
- /// Applies the stored replacements to all replacements, until no replacements would result in
- /// locals that need further replacements when applied.
- fn flatten(&mut self, tcx: TyCtxt<'tcx>) {
- // Note: This assumes that there are no cycles in the replacements, which is enforced via
- // `self.unified_locals`. Otherwise this can cause an infinite loop.
-
- for local in self.map.indices() {
- if let Some(replacement) = self.map[local] {
- // Substitute the base local of `replacement` until fixpoint.
- let mut base = replacement.local;
- let mut reversed_projection_slices = Vec::with_capacity(1);
- while let Some(replacement_for_replacement) = self.map[base] {
- base = replacement_for_replacement.local;
- reversed_projection_slices.push(replacement_for_replacement.projection);
- }
-
- let projection: Vec<_> = reversed_projection_slices
- .iter()
- .rev()
- .flat_map(|projs| projs.iter())
- .chain(replacement.projection.iter())
- .collect();
- let projection = tcx.intern_place_elems(&projection);
+//////////////////////////////////////////////////////////
+// Merging
+//
+// Applies the actual optimization
- // Replace with the final `Place`.
- self.map[local] = Some(Place { local: base, projection });
- }
- }
- }
-
- fn for_src(&self, src: Local) -> Option<Place<'tcx>> {
- self.map[src]
- }
+fn apply_merges<'tcx>(
+ body: &mut Body<'tcx>,
+ tcx: TyCtxt<'tcx>,
+ merges: &FxHashMap<Local, Local>,
+ merged_locals: &BitSet<Local>,
+) {
+ let mut merger = Merger { tcx, merges, merged_locals };
+ merger.visit_body_preserves_cfg(body);
}
-struct Replacer<'tcx> {
+struct Merger<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
- replacements: Replacements<'tcx>,
- place_elem_cache: Vec<PlaceElem<'tcx>>,
+ merges: &'a FxHashMap<Local, Local>,
+ merged_locals: &'a BitSet<Local>,
}
-impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> {
+impl<'a, 'tcx> MutVisitor<'tcx> for Merger<'a, 'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
- fn visit_local(&mut self, local: &mut Local, context: PlaceContext, location: Location) {
- if context.is_use() && self.replacements.for_src(*local).is_some() {
- bug!(
- "use of local {:?} should have been replaced by visit_place; context={:?}, loc={:?}",
- local,
- context,
- location,
- );
+ fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _location: Location) {
+ if let Some(dest) = self.merges.get(local) {
+ *local = *dest;
}
}
- fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
- if let Some(replacement) = self.replacements.for_src(place.local) {
- // Rebase `place`s projections onto `replacement`'s.
- self.place_elem_cache.clear();
- self.place_elem_cache.extend(replacement.projection.iter().chain(place.projection));
- let projection = self.tcx.intern_place_elems(&self.place_elem_cache);
- let new_place = Place { local: replacement.local, projection };
-
- debug!("Replacer: {:?} -> {:?}", place, new_place);
- *place = new_place;
- }
-
- self.super_place(place, context, location);
- }
-
fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
- self.super_statement(statement, location);
-
match &statement.kind {
- // FIXME: Don't delete storage statements, merge the live ranges instead
+ // FIXME: Don't delete storage statements, but "merge" the storage ranges instead.
StatementKind::StorageDead(local) | StatementKind::StorageLive(local)
- if self.replacements.kill.contains(*local) =>
+ if self.merged_locals.contains(*local) =>
{
- statement.make_nop()
+ statement.make_nop();
+ return;
}
-
+ _ => (),
+ };
+ self.super_statement(statement, location);
+ match &statement.kind {
StatementKind::Assign(box (dest, rvalue)) => {
match rvalue {
Rvalue::Use(Operand::Copy(place) | Operand::Move(place)) => {
@@ -353,524 +341,427 @@ impl<'tcx> MutVisitor<'tcx> for Replacer<'tcx> {
}
}
-struct Conflicts<'a> {
- relevant_locals: &'a BitSet<Local>,
-
- /// The conflict matrix. It is always symmetric and the adjacency matrix of the corresponding
- /// conflict graph.
- matrix: BitMatrix<Local, Local>,
-
- /// Preallocated `BitSet` used by `unify`.
- unify_cache: BitSet<Local>,
-
- /// Tracks locals that have been merged together to prevent cycles and propagate conflicts.
- unified_locals: InPlaceUnificationTable<UnifyLocal>,
+//////////////////////////////////////////////////////////
+// Liveness filtering
+//
+// This section enforces bullet point 2
+
+struct FilterInformation<'a, 'body, 'alloc, 'tcx> {
+ body: &'body Body<'tcx>,
+ live: &'a mut ResultsCursor<'body, 'tcx, MaybeLiveLocals>,
+ candidates: &'a mut Candidates<'alloc>,
+ write_info: &'alloc mut WriteInfo,
+ at: Location,
}
-impl<'a> Conflicts<'a> {
- fn build<'tcx>(
- tcx: TyCtxt<'tcx>,
- body: &'_ Body<'tcx>,
- relevant_locals: &'a BitSet<Local>,
- ) -> Self {
- // We don't have to look out for locals that have their address taken, since
- // `find_candidates` already takes care of that.
-
- let conflicts = BitMatrix::from_row_n(
- &BitSet::new_empty(body.local_decls.len()),
- body.local_decls.len(),
- );
-
- let mut init = MaybeInitializedLocals
- .into_engine(tcx, body)
- .iterate_to_fixpoint()
- .into_results_cursor(body);
- let mut live =
- MaybeLiveLocals.into_engine(tcx, body).iterate_to_fixpoint().into_results_cursor(body);
-
- let mut reachable = None;
- dump_mir(tcx, None, "DestinationPropagation-dataflow", &"", body, |pass_where, w| {
- let reachable = reachable.get_or_insert_with(|| traversal::reachable_as_bitset(body));
-
- match pass_where {
- PassWhere::BeforeLocation(loc) if reachable.contains(loc.block) => {
- init.seek_before_primary_effect(loc);
- live.seek_after_primary_effect(loc);
-
- writeln!(w, " // init: {:?}", init.get())?;
- writeln!(w, " // live: {:?}", live.get())?;
- }
- PassWhere::AfterTerminator(bb) if reachable.contains(bb) => {
- let loc = body.terminator_loc(bb);
- init.seek_after_primary_effect(loc);
- live.seek_before_primary_effect(loc);
-
- writeln!(w, " // init: {:?}", init.get())?;
- writeln!(w, " // live: {:?}", live.get())?;
- }
-
- PassWhere::BeforeBlock(bb) if reachable.contains(bb) => {
- init.seek_to_block_start(bb);
- live.seek_to_block_start(bb);
-
- writeln!(w, " // init: {:?}", init.get())?;
- writeln!(w, " // live: {:?}", live.get())?;
- }
-
- PassWhere::BeforeCFG | PassWhere::AfterCFG | PassWhere::AfterLocation(_) => {}
-
- PassWhere::BeforeLocation(_) | PassWhere::AfterTerminator(_) => {
- writeln!(w, " // init: <unreachable>")?;
- writeln!(w, " // live: <unreachable>")?;
- }
-
- PassWhere::BeforeBlock(_) => {
- writeln!(w, " // init: <unreachable>")?;
- writeln!(w, " // live: <unreachable>")?;
- }
+// We first implement some utility functions which we will expose removing candidates according to
+// different needs. Throughout the livenss filtering, the `candidates` are only ever accessed
+// through these methods, and not directly.
+impl<'alloc> Candidates<'alloc> {
+ /// Just `Vec::retain`, but the condition is inverted and we add debugging output
+ fn vec_remove_debug(
+ src: Local,
+ v: &mut Vec<Local>,
+ mut f: impl FnMut(Local) -> bool,
+ at: Location,
+ ) {
+ v.retain(|dest| {
+ let remove = f(*dest);
+ if remove {
+ trace!("eliminating {:?} => {:?} due to conflict at {:?}", src, dest, at);
}
-
- Ok(())
+ !remove
});
+ }
- let mut this = Self {
- relevant_locals,
- matrix: conflicts,
- unify_cache: BitSet::new_empty(body.local_decls.len()),
- unified_locals: {
- let mut table = InPlaceUnificationTable::new();
- // Pre-fill table with all locals (this creates N nodes / "connected" components,
- // "graph"-ically speaking).
- for local in 0..body.local_decls.len() {
- assert_eq!(table.new_key(()), UnifyLocal(Local::from_usize(local)));
- }
- table
- },
- };
-
- let mut live_and_init_locals = Vec::new();
-
- // Visit only reachable basic blocks. The exact order is not important.
- for (block, data) in traversal::preorder(body) {
- // We need to observe the dataflow state *before* all possible locations (statement or
- // terminator) in each basic block, and then observe the state *after* the terminator
- // effect is applied. As long as neither `init` nor `borrowed` has a "before" effect,
- // we will observe all possible dataflow states.
-
- // Since liveness is a backwards analysis, we need to walk the results backwards. To do
- // that, we first collect in the `MaybeInitializedLocals` results in a forwards
- // traversal.
-
- live_and_init_locals.resize_with(data.statements.len() + 1, || {
- BitSet::new_empty(body.local_decls.len())
- });
-
- // First, go forwards for `MaybeInitializedLocals` and apply intra-statement/terminator
- // conflicts.
- for (i, statement) in data.statements.iter().enumerate() {
- this.record_statement_conflicts(statement);
-
- let loc = Location { block, statement_index: i };
- init.seek_before_primary_effect(loc);
+ /// `vec_remove_debug` but for an `Entry`
+ fn entry_remove(
+ mut entry: OccupiedEntry<'_, Local, Vec<Local>>,
+ p: Local,
+ f: impl FnMut(Local) -> bool,
+ at: Location,
+ ) {
+ let candidates = entry.get_mut();
+ Self::vec_remove_debug(p, candidates, f, at);
+ if candidates.len() == 0 {
+ entry.remove();
+ }
+ }
- live_and_init_locals[i].clone_from(init.get());
+ /// Removes all candidates `(p, q)` or `(q, p)` where `p` is the indicated local and `f(q)` is true.
+ fn remove_candidates_if(&mut self, p: Local, mut f: impl FnMut(Local) -> bool, at: Location) {
+ // Cover the cases where `p` appears as a `src`
+ if let Entry::Occupied(entry) = self.c.entry(p) {
+ Self::entry_remove(entry, p, &mut f, at);
+ }
+ // And the cases where `p` appears as a `dest`
+ let Some(srcs) = self.reverse.get_mut(&p) else {
+ return;
+ };
+ // We use `retain` here to remove the elements from the reverse set if we've removed the
+ // matching candidate in the forward set.
+ srcs.retain(|src| {
+ if !f(*src) {
+ return true;
}
+ let Entry::Occupied(entry) = self.c.entry(*src) else {
+ return false;
+ };
+ Self::entry_remove(entry, *src, |dest| dest == p, at);
+ false
+ });
+ }
+}
- this.record_terminator_conflicts(data.terminator());
- let term_loc = Location { block, statement_index: data.statements.len() };
- init.seek_before_primary_effect(term_loc);
- live_and_init_locals[term_loc.statement_index].clone_from(init.get());
-
- // Now, go backwards and union with the liveness results.
- for statement_index in (0..=data.statements.len()).rev() {
- let loc = Location { block, statement_index };
- live.seek_after_primary_effect(loc);
-
- live_and_init_locals[statement_index].intersect(live.get());
-
- trace!("record conflicts at {:?}", loc);
+impl<'a, 'body, 'alloc, 'tcx> FilterInformation<'a, 'body, 'alloc, 'tcx> {
+ /// Filters the set of candidates to remove those that conflict.
+ ///
+ /// The steps we take are exactly those that are outlined at the top of the file. For each
+ /// statement/terminator, we collect the set of locals that are written to in that
+ /// statement/terminator, and then we remove all pairs of candidates that contain one such local
+ /// and another one that is live.
+ ///
+ /// We need to be careful about the ordering of operations within each statement/terminator
+ /// here. Many statements might write and read from more than one place, and we need to consider
+ /// them all. The strategy for doing this is as follows: We first gather all the places that are
+ /// written to within the statement/terminator via `WriteInfo`. Then, we use the liveness
+ /// analysis from *before* the statement/terminator (in the control flow sense) to eliminate
+ /// candidates - this is because we want to conservatively treat a pair of locals that is both
+ /// read and written in the statement/terminator to be conflicting, and the liveness analysis
+ /// before the statement/terminator will correctly report locals that are read in the
+ /// statement/terminator to be live. We are additionally conservative by treating all written to
+ /// locals as also being read from.
+ fn filter_liveness<'b>(
+ candidates: &mut Candidates<'alloc>,
+ live: &mut ResultsCursor<'b, 'tcx, MaybeLiveLocals>,
+ write_info_alloc: &'alloc mut WriteInfo,
+ body: &'b Body<'tcx>,
+ ) {
+ let mut this = FilterInformation {
+ body,
+ live,
+ candidates,
+ // We don't actually store anything at this scope, we just keep things here to be able
+ // to reuse the allocation.
+ write_info: write_info_alloc,
+ // Doesn't matter what we put here, will be overwritten before being used
+ at: Location { block: BasicBlock::from_u32(0), statement_index: 0 },
+ };
+ this.internal_filter_liveness();
+ }
- this.record_dataflow_conflicts(&mut live_and_init_locals[statement_index]);
+ fn internal_filter_liveness(&mut self) {
+ for (block, data) in traversal::preorder(self.body) {
+ self.at = Location { block, statement_index: data.statements.len() };
+ self.live.seek_after_primary_effect(self.at);
+ self.write_info.for_terminator(&data.terminator().kind);
+ self.apply_conflicts();
+
+ for (i, statement) in data.statements.iter().enumerate().rev() {
+ self.at = Location { block, statement_index: i };
+ self.live.seek_after_primary_effect(self.at);
+ self.get_statement_write_info(&statement.kind);
+ self.apply_conflicts();
}
-
- init.seek_to_block_end(block);
- live.seek_to_block_end(block);
- let mut conflicts = init.get().clone();
- conflicts.intersect(live.get());
- trace!("record conflicts at end of {:?}", block);
-
- this.record_dataflow_conflicts(&mut conflicts);
}
-
- this
}
- fn record_dataflow_conflicts(&mut self, new_conflicts: &mut BitSet<Local>) {
- // Remove all locals that are not candidates.
- new_conflicts.intersect(self.relevant_locals);
+ fn apply_conflicts(&mut self) {
+ let writes = &self.write_info.writes;
+ for p in writes {
+ self.candidates.remove_candidates_if(
+ *p,
+ // It is possible that a local may be live for less than the
+ // duration of a statement This happens in the case of function
+ // calls or inline asm. Because of this, we also mark locals as
+ // conflicting when both of them are written to in the same
+ // statement.
+ |q| self.live.contains(q) || writes.contains(&q),
+ self.at,
+ );
+ }
+ }
- for local in new_conflicts.iter() {
- self.matrix.union_row_with(&new_conflicts, local);
+ /// Gets the write info for the `statement`.
+ fn get_statement_write_info(&mut self, statement: &StatementKind<'tcx>) {
+ self.write_info.writes.clear();
+ match statement {
+ StatementKind::Assign(box (lhs, rhs)) => match rhs {
+ Rvalue::Use(op) => {
+ if !lhs.is_indirect() {
+ self.get_assign_use_write_info(*lhs, op);
+ return;
+ }
+ }
+ _ => (),
+ },
+ _ => (),
}
+
+ self.write_info.for_statement(statement);
}
- fn record_local_conflict(&mut self, a: Local, b: Local, why: &str) {
- trace!("conflict {:?} <-> {:?} due to {}", a, b, why);
- self.matrix.insert(a, b);
- self.matrix.insert(b, a);
+ fn get_assign_use_write_info(&mut self, lhs: Place<'tcx>, rhs: &Operand<'tcx>) {
+ // We register the writes for the operand unconditionally
+ self.write_info.add_operand(rhs);
+ // However, we cannot do the same thing for the `lhs` as that would always block the
+ // optimization. Instead, we consider removing candidates manually.
+ let Some(rhs) = rhs.place() else {
+ self.write_info.add_place(lhs);
+ return;
+ };
+ // Find out which candidate pair we should skip, if any
+ let Some((src, dest)) = places_to_candidate_pair(lhs, rhs, self.body) else {
+ self.write_info.add_place(lhs);
+ return;
+ };
+ self.candidates.remove_candidates_if(
+ lhs.local,
+ |other| {
+ // Check if this is the candidate pair that should not be removed
+ if (lhs.local == src && other == dest) || (lhs.local == dest && other == src) {
+ return false;
+ }
+ // Otherwise, do the "standard" thing
+ self.live.contains(other)
+ },
+ self.at,
+ )
}
+}
- /// Records locals that must not overlap during the evaluation of `stmt`. These locals conflict
- /// and must not be merged.
- fn record_statement_conflicts(&mut self, stmt: &Statement<'_>) {
- match &stmt.kind {
- // While the left and right sides of an assignment must not overlap, we do not mark
- // conflicts here as that would make this optimization useless. When we optimize, we
- // eliminate the resulting self-assignments automatically.
- StatementKind::Assign(_) => {}
-
- StatementKind::SetDiscriminant { .. }
- | StatementKind::Deinit(..)
- | StatementKind::StorageLive(..)
- | StatementKind::StorageDead(..)
- | StatementKind::Retag(..)
- | StatementKind::FakeRead(..)
- | StatementKind::AscribeUserType(..)
- | StatementKind::Coverage(..)
- | StatementKind::Intrinsic(..)
- | StatementKind::Nop => {}
+/// Describes where a statement/terminator writes to
+#[derive(Default, Debug)]
+struct WriteInfo {
+ writes: Vec<Local>,
+}
+
+impl WriteInfo {
+ fn for_statement<'tcx>(&mut self, statement: &StatementKind<'tcx>) {
+ match statement {
+ StatementKind::Assign(box (lhs, rhs)) => {
+ self.add_place(*lhs);
+ match rhs {
+ Rvalue::Use(op) | Rvalue::Repeat(op, _) => {
+ self.add_operand(op);
+ }
+ Rvalue::Cast(_, op, _)
+ | Rvalue::UnaryOp(_, op)
+ | Rvalue::ShallowInitBox(op, _) => {
+ self.add_operand(op);
+ }
+ Rvalue::BinaryOp(_, ops) | Rvalue::CheckedBinaryOp(_, ops) => {
+ for op in [&ops.0, &ops.1] {
+ self.add_operand(op);
+ }
+ }
+ Rvalue::Aggregate(_, ops) => {
+ for op in ops {
+ self.add_operand(op);
+ }
+ }
+ Rvalue::ThreadLocalRef(_)
+ | Rvalue::NullaryOp(_, _)
+ | Rvalue::Ref(_, _, _)
+ | Rvalue::AddressOf(_, _)
+ | Rvalue::Len(_)
+ | Rvalue::Discriminant(_)
+ | Rvalue::CopyForDeref(_) => (),
+ }
+ }
+ // Retags are technically also reads, but reporting them as a write suffices
+ StatementKind::SetDiscriminant { place, .. }
+ | StatementKind::Deinit(place)
+ | StatementKind::Retag(_, place) => {
+ self.add_place(**place);
+ }
+ StatementKind::Intrinsic(_)
+ | StatementKind::Nop
+ | StatementKind::Coverage(_)
+ | StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_) => (),
+ StatementKind::FakeRead(_) | StatementKind::AscribeUserType(_, _) => {
+ bug!("{:?} not found in this MIR phase", statement)
+ }
}
}
- fn record_terminator_conflicts(&mut self, term: &Terminator<'_>) {
- match &term.kind {
- TerminatorKind::DropAndReplace {
- place: dropped_place,
- value,
- target: _,
- unwind: _,
- } => {
- if let Some(place) = value.place()
- && !place.is_indirect()
- && !dropped_place.is_indirect()
- {
- self.record_local_conflict(
- place.local,
- dropped_place.local,
- "DropAndReplace operand overlap",
- );
- }
+ fn for_terminator<'tcx>(&mut self, terminator: &TerminatorKind<'tcx>) {
+ self.writes.clear();
+ match terminator {
+ TerminatorKind::SwitchInt { discr: op, .. }
+ | TerminatorKind::Assert { cond: op, .. } => {
+ self.add_operand(op);
}
- TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
- if let Some(place) = value.place() {
- if !place.is_indirect() && !resume_arg.is_indirect() {
- self.record_local_conflict(
- place.local,
- resume_arg.local,
- "Yield operand overlap",
- );
- }
+ TerminatorKind::Call { destination, func, args, .. } => {
+ self.add_place(*destination);
+ self.add_operand(func);
+ for arg in args {
+ self.add_operand(arg);
}
}
- TerminatorKind::Call {
- func,
- args,
- destination,
- target: _,
- cleanup: _,
- from_hir_call: _,
- fn_span: _,
- } => {
- // No arguments may overlap with the destination.
- for arg in args.iter().chain(Some(func)) {
- if let Some(place) = arg.place() {
- if !place.is_indirect() && !destination.is_indirect() {
- self.record_local_conflict(
- destination.local,
- place.local,
- "call dest/arg overlap",
- );
+ TerminatorKind::InlineAsm { operands, .. } => {
+ for asm_operand in operands {
+ match asm_operand {
+ InlineAsmOperand::In { value, .. } => {
+ self.add_operand(value);
}
- }
- }
- }
- TerminatorKind::InlineAsm {
- template: _,
- operands,
- options: _,
- line_spans: _,
- destination: _,
- cleanup: _,
- } => {
- // The intended semantics here aren't documented, we just assume that nothing that
- // could be written to by the assembly may overlap with any other operands.
- for op in operands {
- match op {
- InlineAsmOperand::Out { reg: _, late: _, place: Some(dest_place) }
- | InlineAsmOperand::InOut {
- reg: _,
- late: _,
- in_value: _,
- out_place: Some(dest_place),
- } => {
- // For output place `place`, add all places accessed by the inline asm.
- for op in operands {
- match op {
- InlineAsmOperand::In { reg: _, value } => {
- if let Some(p) = value.place()
- && !p.is_indirect()
- && !dest_place.is_indirect()
- {
- self.record_local_conflict(
- p.local,
- dest_place.local,
- "asm! operand overlap",
- );
- }
- }
- InlineAsmOperand::Out {
- reg: _,
- late: _,
- place: Some(place),
- } => {
- if !place.is_indirect() && !dest_place.is_indirect() {
- self.record_local_conflict(
- place.local,
- dest_place.local,
- "asm! operand overlap",
- );
- }
- }
- InlineAsmOperand::InOut {
- reg: _,
- late: _,
- in_value,
- out_place,
- } => {
- if let Some(place) = in_value.place()
- && !place.is_indirect()
- && !dest_place.is_indirect()
- {
- self.record_local_conflict(
- place.local,
- dest_place.local,
- "asm! operand overlap",
- );
- }
-
- if let Some(place) = out_place
- && !place.is_indirect()
- && !dest_place.is_indirect()
- {
- self.record_local_conflict(
- place.local,
- dest_place.local,
- "asm! operand overlap",
- );
- }
- }
- InlineAsmOperand::Out { reg: _, late: _, place: None }
- | InlineAsmOperand::Const { value: _ }
- | InlineAsmOperand::SymFn { value: _ }
- | InlineAsmOperand::SymStatic { def_id: _ } => {}
- }
+ InlineAsmOperand::Out { place, .. } => {
+ if let Some(place) = place {
+ self.add_place(*place);
}
}
- InlineAsmOperand::InOut {
- reg: _,
- late: _,
- in_value: _,
- out_place: None,
+ // Note that the `late` field in `InOut` is about whether the registers used
+ // for these things overlap, and is of absolutely no interest to us.
+ InlineAsmOperand::InOut { in_value, out_place, .. } => {
+ if let Some(place) = out_place {
+ self.add_place(*place);
+ }
+ self.add_operand(in_value);
}
- | InlineAsmOperand::In { reg: _, value: _ }
- | InlineAsmOperand::Out { reg: _, late: _, place: None }
- | InlineAsmOperand::Const { value: _ }
- | InlineAsmOperand::SymFn { value: _ }
- | InlineAsmOperand::SymStatic { def_id: _ } => {}
+ InlineAsmOperand::Const { .. }
+ | InlineAsmOperand::SymFn { .. }
+ | InlineAsmOperand::SymStatic { .. } => (),
}
}
}
-
TerminatorKind::Goto { .. }
- | TerminatorKind::SwitchInt { .. }
- | TerminatorKind::Resume
- | TerminatorKind::Abort
+ | TerminatorKind::Resume { .. }
+ | TerminatorKind::Abort { .. }
| TerminatorKind::Return
- | TerminatorKind::Unreachable
- | TerminatorKind::Drop { .. }
- | TerminatorKind::Assert { .. }
+ | TerminatorKind::Unreachable { .. } => (),
+ TerminatorKind::Drop { .. } => {
+ // `Drop`s create a `&mut` and so are not considered
+ }
+ TerminatorKind::DropAndReplace { .. }
+ | TerminatorKind::Yield { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::FalseEdge { .. }
- | TerminatorKind::FalseUnwind { .. } => {}
+ | TerminatorKind::FalseUnwind { .. } => {
+ bug!("{:?} not found in this MIR phase", terminator)
+ }
}
}
- /// Checks whether `a` and `b` may be merged. Returns `false` if there's a conflict.
- fn can_unify(&mut self, a: Local, b: Local) -> bool {
- // After some locals have been unified, their conflicts are only tracked in the root key,
- // so look that up.
- let a = self.unified_locals.find(a).0;
- let b = self.unified_locals.find(b).0;
-
- if a == b {
- // Already merged (part of the same connected component).
- return false;
- }
+ fn add_place<'tcx>(&mut self, place: Place<'tcx>) {
+ self.writes.push(place.local);
+ }
- if self.matrix.contains(a, b) {
- // Conflict (derived via dataflow, intra-statement conflicts, or inherited from another
- // local during unification).
- return false;
+ fn add_operand<'tcx>(&mut self, op: &Operand<'tcx>) {
+ match op {
+ // FIXME(JakobDegen): In a previous version, the `Move` case was incorrectly treated as
+ // being a read only. This was unsound, however we cannot add a regression test because
+ // it is not possible to set this off with current MIR. Once we have that ability, a
+ // regression test should be added.
+ Operand::Move(p) => self.add_place(*p),
+ Operand::Copy(_) | Operand::Constant(_) => (),
}
-
- true
}
+}
- /// Merges the conflicts of `a` and `b`, so that each one inherits all conflicts of the other.
- ///
- /// `can_unify` must have returned `true` for the same locals, or this may panic or lead to
- /// miscompiles.
- ///
- /// This is called when the pass makes the decision to unify `a` and `b` (or parts of `a` and
- /// `b`) and is needed to ensure that future unification decisions take potentially newly
- /// introduced conflicts into account.
- ///
- /// For an example, assume we have locals `_0`, `_1`, `_2`, and `_3`. There are these conflicts:
- ///
- /// * `_0` <-> `_1`
- /// * `_1` <-> `_2`
- /// * `_3` <-> `_0`
- ///
- /// We then decide to merge `_2` with `_3` since they don't conflict. Then we decide to merge
- /// `_2` with `_0`, which also doesn't have a conflict in the above list. However `_2` is now
- /// `_3`, which does conflict with `_0`.
- fn unify(&mut self, a: Local, b: Local) {
- trace!("unify({:?}, {:?})", a, b);
-
- // Get the root local of the connected components. The root local stores the conflicts of
- // all locals in the connected component (and *is stored* as the conflicting local of other
- // locals).
- let a = self.unified_locals.find(a).0;
- let b = self.unified_locals.find(b).0;
- assert_ne!(a, b);
-
- trace!("roots: a={:?}, b={:?}", a, b);
- trace!("{:?} conflicts: {:?}", a, self.matrix.iter(a).format(", "));
- trace!("{:?} conflicts: {:?}", b, self.matrix.iter(b).format(", "));
-
- self.unified_locals.union(a, b);
-
- let root = self.unified_locals.find(a).0;
- assert!(root == a || root == b);
-
- // Make all locals that conflict with `a` also conflict with `b`, and vice versa.
- self.unify_cache.clear();
- for conflicts_with_a in self.matrix.iter(a) {
- self.unify_cache.insert(conflicts_with_a);
- }
- for conflicts_with_b in self.matrix.iter(b) {
- self.unify_cache.insert(conflicts_with_b);
- }
- for conflicts_with_a_or_b in self.unify_cache.iter() {
- // Set both `a` and `b` for this local's row.
- self.matrix.insert(conflicts_with_a_or_b, a);
- self.matrix.insert(conflicts_with_a_or_b, b);
- }
+/////////////////////////////////////////////////////
+// Candidate accumulation
- // Write the locals `a` conflicts with to `b`'s row.
- self.matrix.union_rows(a, b);
- // Write the locals `b` conflicts with to `a`'s row.
- self.matrix.union_rows(b, a);
- }
+fn is_constant<'tcx>(place: Place<'tcx>) -> bool {
+ place.projection.iter().all(|p| !matches!(p, ProjectionElem::Deref | ProjectionElem::Index(_)))
}
-/// A `dest = {move} src;` statement at `loc`.
+/// If the pair of places is being considered for merging, returns the candidate which would be
+/// merged in order to accomplish this.
+///
+/// The contract here is in one direction - there is a guarantee that merging the locals that are
+/// outputted by this function would result in an assignment between the inputs becoming a
+/// self-assignment. However, there is no guarantee that the returned pair is actually suitable for
+/// merging - candidate collection must still check this independently.
///
-/// We want to consider merging `dest` and `src` due to this assignment.
-#[derive(Debug, Copy, Clone)]
-struct CandidateAssignment<'tcx> {
- /// Does not contain indirection or indexing (so the only local it contains is the place base).
- dest: Place<'tcx>,
- src: Local,
- loc: Location,
+/// This output is unique for each unordered pair of input places.
+fn places_to_candidate_pair<'tcx>(
+ a: Place<'tcx>,
+ b: Place<'tcx>,
+ body: &Body<'tcx>,
+) -> Option<(Local, Local)> {
+ let (mut a, mut b) = if a.projection.len() == 0 && b.projection.len() == 0 {
+ (a.local, b.local)
+ } else {
+ return None;
+ };
+
+ // By sorting, we make sure we're input order independent
+ if a > b {
+ std::mem::swap(&mut a, &mut b);
+ }
+
+ // We could now return `(a, b)`, but then we miss some candidates in the case where `a` can't be
+ // used as a `src`.
+ if is_local_required(a, body) {
+ std::mem::swap(&mut a, &mut b);
+ }
+ // We could check `is_local_required` again here, but there's no need - after all, we make no
+ // promise that the candidate pair is actually valid
+ Some((a, b))
}
-/// Scans the MIR for assignments between locals that we might want to consider merging.
+/// Collects the candidates for merging
///
-/// This will filter out assignments that do not match the right form (as described in the top-level
-/// comment) and also throw out assignments that involve a local that has its address taken or is
-/// otherwise ineligible (eg. locals used as array indices are ignored because we cannot propagate
-/// arbitrary places into array indices).
-fn find_candidates<'tcx>(body: &Body<'tcx>) -> Vec<CandidateAssignment<'tcx>> {
- let mut visitor = FindAssignments {
- body,
- candidates: Vec::new(),
- ever_borrowed_locals: borrowed_locals(body),
- locals_used_as_array_index: locals_used_as_array_index(body),
- };
+/// This is responsible for enforcing the first and third bullet point.
+fn find_candidates<'alloc, 'tcx>(
+ body: &Body<'tcx>,
+ borrowed: &BitSet<Local>,
+ candidates: &'alloc mut FxHashMap<Local, Vec<Local>>,
+ candidates_reverse: &'alloc mut FxHashMap<Local, Vec<Local>>,
+) -> Candidates<'alloc> {
+ candidates.clear();
+ candidates_reverse.clear();
+ let mut visitor = FindAssignments { body, candidates, borrowed };
visitor.visit_body(body);
- visitor.candidates
+ // Deduplicate candidates
+ for (_, cands) in candidates.iter_mut() {
+ cands.sort();
+ cands.dedup();
+ }
+ // Generate the reverse map
+ for (src, cands) in candidates.iter() {
+ for dest in cands.iter().copied() {
+ candidates_reverse.entry(dest).or_default().push(*src);
+ }
+ }
+ Candidates { c: candidates, reverse: candidates_reverse }
}
-struct FindAssignments<'a, 'tcx> {
+struct FindAssignments<'a, 'alloc, 'tcx> {
body: &'a Body<'tcx>,
- candidates: Vec<CandidateAssignment<'tcx>>,
- ever_borrowed_locals: BitSet<Local>,
- locals_used_as_array_index: BitSet<Local>,
+ candidates: &'alloc mut FxHashMap<Local, Vec<Local>>,
+ borrowed: &'a BitSet<Local>,
}
-impl<'tcx> Visitor<'tcx> for FindAssignments<'_, 'tcx> {
- fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
+impl<'tcx> Visitor<'tcx> for FindAssignments<'_, '_, 'tcx> {
+ fn visit_statement(&mut self, statement: &Statement<'tcx>, _: Location) {
if let StatementKind::Assign(box (
- dest,
- Rvalue::Use(Operand::Copy(src) | Operand::Move(src)),
+ lhs,
+ Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)),
)) = &statement.kind
{
- // `dest` must not have pointer indirection.
- if dest.is_indirect() {
- return;
- }
-
- // `src` must be a plain local.
- if !src.projection.is_empty() {
+ if !is_constant(*lhs) || !is_constant(*rhs) {
return;
}
- // Since we want to replace `src` with `dest`, `src` must not be required.
- if is_local_required(src.local, self.body) {
+ let Some((src, dest)) = places_to_candidate_pair(*lhs, *rhs, self.body) else {
return;
- }
+ };
- // Can't optimize if either local ever has their address taken. This optimization does
- // liveness analysis only based on assignments, and a local can be live even if its
- // never assigned to again, because a reference to it might be live.
- // FIXME: This can be smarter and take `StorageDead` into account (which invalidates
- // borrows).
- if self.ever_borrowed_locals.contains(dest.local)
- || self.ever_borrowed_locals.contains(src.local)
- {
+ // As described at the top of the file, we do not go near things that have their address
+ // taken.
+ if self.borrowed.contains(src) || self.borrowed.contains(dest) {
return;
}
- assert_ne!(dest.local, src.local, "self-assignments are UB");
-
- // We can't replace locals occurring in `PlaceElem::Index` for now.
- if self.locals_used_as_array_index.contains(src.local) {
+ // Also, we need to make sure that MIR actually allows the `src` to be removed
+ if is_local_required(src, self.body) {
return;
}
- for elem in dest.projection {
- if let PlaceElem::Index(_) = elem {
- // `dest` contains an indexing projection.
- return;
- }
- }
-
- self.candidates.push(CandidateAssignment {
- dest: *dest,
- src: src.local,
- loc: location,
- });
+ // We may insert duplicates here, but that's fine
+ self.candidates.entry(src).or_default().push(dest);
}
}
}
@@ -886,32 +777,46 @@ fn is_local_required(local: Local, body: &Body<'_>) -> bool {
}
}
-/// `PlaceElem::Index` only stores a `Local`, so we can't replace that with a full `Place`.
-///
-/// Collect locals used as indices so we don't generate candidates that are impossible to apply
-/// later.
-fn locals_used_as_array_index(body: &Body<'_>) -> BitSet<Local> {
- let mut visitor = IndexCollector { locals: BitSet::new_empty(body.local_decls.len()) };
- visitor.visit_body(body);
- visitor.locals
-}
+/////////////////////////////////////////////////////////
+// MIR Dump
-struct IndexCollector {
- locals: BitSet<Local>,
-}
+fn dest_prop_mir_dump<'body, 'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &'body Body<'tcx>,
+ live: &mut ResultsCursor<'body, 'tcx, MaybeLiveLocals>,
+ round: usize,
+) {
+ let mut reachable = None;
+ dump_mir(tcx, false, "DestinationPropagation-dataflow", &round, body, |pass_where, w| {
+ let reachable = reachable.get_or_insert_with(|| traversal::reachable_as_bitset(body));
+
+ match pass_where {
+ PassWhere::BeforeLocation(loc) if reachable.contains(loc.block) => {
+ live.seek_after_primary_effect(loc);
+ writeln!(w, " // live: {:?}", live.get())?;
+ }
+ PassWhere::AfterTerminator(bb) if reachable.contains(bb) => {
+ let loc = body.terminator_loc(bb);
+ live.seek_before_primary_effect(loc);
+ writeln!(w, " // live: {:?}", live.get())?;
+ }
-impl<'tcx> Visitor<'tcx> for IndexCollector {
- fn visit_projection_elem(
- &mut self,
- local: Local,
- proj_base: &[PlaceElem<'tcx>],
- elem: PlaceElem<'tcx>,
- context: PlaceContext,
- location: Location,
- ) {
- if let PlaceElem::Index(i) = elem {
- self.locals.insert(i);
+ PassWhere::BeforeBlock(bb) if reachable.contains(bb) => {
+ live.seek_to_block_start(bb);
+ writeln!(w, " // live: {:?}", live.get())?;
+ }
+
+ PassWhere::BeforeCFG | PassWhere::AfterCFG | PassWhere::AfterLocation(_) => {}
+
+ PassWhere::BeforeLocation(_) | PassWhere::AfterTerminator(_) => {
+ writeln!(w, " // live: <unreachable>")?;
+ }
+
+ PassWhere::BeforeBlock(_) => {
+ writeln!(w, " // live: <unreachable>")?;
+ }
}
- self.super_projection_elem(local, proj_base, elem, context, location);
- }
+
+ Ok(())
+ });
}
diff --git a/compiler/rustc_mir_transform/src/dump_mir.rs b/compiler/rustc_mir_transform/src/dump_mir.rs
index 6b995141a..594cbd897 100644
--- a/compiler/rustc_mir_transform/src/dump_mir.rs
+++ b/compiler/rustc_mir_transform/src/dump_mir.rs
@@ -1,6 +1,5 @@
//! This pass just dumps MIR at a specified point.
-use std::borrow::Cow;
use std::fs::File;
use std::io;
@@ -8,20 +7,20 @@ use crate::MirPass;
use rustc_middle::mir::write_mir_pretty;
use rustc_middle::mir::Body;
use rustc_middle::ty::TyCtxt;
-use rustc_session::config::{OutputFilenames, OutputType};
+use rustc_session::config::OutputType;
pub struct Marker(pub &'static str);
impl<'tcx> MirPass<'tcx> for Marker {
- fn name(&self) -> Cow<'_, str> {
- Cow::Borrowed(self.0)
+ fn name(&self) -> &str {
+ self.0
}
fn run_pass(&self, _tcx: TyCtxt<'tcx>, _body: &mut Body<'tcx>) {}
}
-pub fn emit_mir(tcx: TyCtxt<'_>, outputs: &OutputFilenames) -> io::Result<()> {
- let path = outputs.path(OutputType::Mir);
+pub fn emit_mir(tcx: TyCtxt<'_>) -> io::Result<()> {
+ let path = tcx.output_filenames(()).path(OutputType::Mir);
let mut f = io::BufWriter::new(File::create(&path)?);
write_mir_pretty(tcx, None, &mut f)?;
Ok(())
diff --git a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
index ef8d6bb65..932134bd6 100644
--- a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
@@ -25,7 +25,7 @@ pub fn build_ptr_tys<'tcx>(
(unique_ty, nonnull_ty, ptr_ty)
}
-// Constructs the projection needed to access a Box's pointer
+/// Constructs the projection needed to access a Box's pointer
pub fn build_projection<'tcx>(
unique_ty: Ty<'tcx>,
nonnull_ty: Ty<'tcx>,
diff --git a/compiler/rustc_mir_transform/src/function_item_references.rs b/compiler/rustc_mir_transform/src/function_item_references.rs
index 469566694..b708d780b 100644
--- a/compiler/rustc_mir_transform/src/function_item_references.rs
+++ b/compiler/rustc_mir_transform/src/function_item_references.rs
@@ -110,7 +110,7 @@ impl<'tcx> FunctionItemRefChecker<'_, 'tcx> {
/// If the given predicate is the trait `fmt::Pointer`, returns the bound parameter type.
fn is_pointer_trait(&self, bound: &PredicateKind<'tcx>) -> Option<Ty<'tcx>> {
- if let ty::PredicateKind::Trait(predicate) = bound {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) = bound {
if self.tcx.is_diagnostic_item(sym::Pointer, predicate.def_id()) {
Some(predicate.trait_ref.self_ty())
} else {
diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs
index c833de3a8..69f96fe48 100644
--- a/compiler/rustc_mir_transform/src/generator.rs
+++ b/compiler/rustc_mir_transform/src/generator.rs
@@ -11,10 +11,10 @@
//! generator in the MIR, since it is used to create the drop glue for the generator. We'd get
//! infinite recursion otherwise.
//!
-//! This pass creates the implementation for the Generator::resume function and the drop shim
-//! for the generator based on the MIR input. It converts the generator argument from Self to
-//! &mut Self adding derefs in the MIR as needed. It computes the final layout of the generator
-//! struct which looks like this:
+//! This pass creates the implementation for either the `Generator::resume` or `Future::poll`
+//! function and the drop shim for the generator based on the MIR input.
+//! It converts the generator argument from Self to &mut Self adding derefs in the MIR as needed.
+//! It computes the final layout of the generator struct which looks like this:
//! First upvars are stored
//! It is followed by the generator state field.
//! Then finally the MIR locals which are live across a suspension point are stored.
@@ -32,14 +32,15 @@
//! 2 - Generator has been poisoned
//!
//! It also rewrites `return x` and `yield y` as setting a new generator state and returning
-//! GeneratorState::Complete(x) and GeneratorState::Yielded(y) respectively.
+//! `GeneratorState::Complete(x)` and `GeneratorState::Yielded(y)`,
+//! or `Poll::Ready(x)` and `Poll::Pending` respectively.
//! MIR locals which are live across a suspension point are moved to the generator struct
//! with references to them being updated with references to the generator struct.
//!
//! The pass creates two functions which have a switch on the generator state giving
//! the action to take.
//!
-//! One of them is the implementation of Generator::resume.
+//! One of them is the implementation of `Generator::resume` / `Future::poll`.
//! For generators with state 0 (unresumed) it starts the execution of the generator.
//! For generators with state 1 (returned) and state 2 (poisoned) it panics.
//! Otherwise it continues the execution from the last suspension point.
@@ -56,6 +57,7 @@ use crate::MirPass;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::lang_items::LangItem;
+use rustc_hir::GeneratorKind;
use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet};
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::dump_mir;
@@ -215,6 +217,7 @@ struct SuspensionPoint<'tcx> {
struct TransformVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
+ is_async_kind: bool,
state_adt_ref: AdtDef<'tcx>,
state_substs: SubstsRef<'tcx>,
@@ -239,28 +242,57 @@ struct TransformVisitor<'tcx> {
}
impl<'tcx> TransformVisitor<'tcx> {
- // Make a GeneratorState variant assignment. `core::ops::GeneratorState` only has single
- // element tuple variants, so we can just write to the downcasted first field and then set the
+ // Make a `GeneratorState` or `Poll` variant assignment.
+ //
+ // `core::ops::GeneratorState` only has single element tuple variants,
+ // so we can just write to the downcasted first field and then set the
// discriminant to the appropriate variant.
fn make_state(
&self,
- idx: VariantIdx,
val: Operand<'tcx>,
source_info: SourceInfo,
- ) -> impl Iterator<Item = Statement<'tcx>> {
+ is_return: bool,
+ statements: &mut Vec<Statement<'tcx>>,
+ ) {
+ let idx = VariantIdx::new(match (is_return, self.is_async_kind) {
+ (true, false) => 1, // GeneratorState::Complete
+ (false, false) => 0, // GeneratorState::Yielded
+ (true, true) => 0, // Poll::Ready
+ (false, true) => 1, // Poll::Pending
+ });
+
let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_substs, None, None);
+
+ // `Poll::Pending`
+ if self.is_async_kind && idx == VariantIdx::new(1) {
+ assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 0);
+
+ // FIXME(swatinem): assert that `val` is indeed unit?
+ statements.extend(expand_aggregate(
+ Place::return_place(),
+ std::iter::empty(),
+ kind,
+ source_info,
+ self.tcx,
+ ));
+ return;
+ }
+
+ // else: `Poll::Ready(x)`, `GeneratorState::Yielded(x)` or `GeneratorState::Complete(x)`
assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 1);
+
let ty = self
.tcx
.bound_type_of(self.state_adt_ref.variant(idx).fields[0].did)
.subst(self.tcx, self.state_substs);
- expand_aggregate(
+
+ statements.extend(expand_aggregate(
Place::return_place(),
std::iter::once((val, ty)),
kind,
source_info,
self.tcx,
- )
+ ));
}
// Create a Place referencing a generator struct field
@@ -331,22 +363,19 @@ impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
});
let ret_val = match data.terminator().kind {
- TerminatorKind::Return => Some((
- VariantIdx::new(1),
- None,
- Operand::Move(Place::from(self.new_ret_local)),
- None,
- )),
+ TerminatorKind::Return => {
+ Some((true, None, Operand::Move(Place::from(self.new_ret_local)), None))
+ }
TerminatorKind::Yield { ref value, resume, resume_arg, drop } => {
- Some((VariantIdx::new(0), Some((resume, resume_arg)), value.clone(), drop))
+ Some((false, Some((resume, resume_arg)), value.clone(), drop))
}
_ => None,
};
- if let Some((state_idx, resume, v, drop)) = ret_val {
+ if let Some((is_return, resume, v, drop)) = ret_val {
let source_info = data.terminator().source_info;
// We must assign the value first in case it gets declared dead below
- data.statements.extend(self.make_state(state_idx, v, source_info));
+ self.make_state(v, source_info, is_return, &mut data.statements);
let state = if let Some((resume, mut resume_arg)) = resume {
// Yield
let state = RESERVED_VARIANTS + self.suspension_points.len();
@@ -956,22 +985,12 @@ fn create_generator_drop_shim<'tcx>(
tcx.mk_ptr(ty::TypeAndMut { ty: gen_ty, mutbl: hir::Mutability::Mut }),
source_info,
);
- if tcx.sess.opts.unstable_opts.mir_emit_retag {
- // Alias tracking must know we changed the type
- body.basic_blocks_mut()[START_BLOCK].statements.insert(
- 0,
- Statement {
- source_info,
- kind: StatementKind::Retag(RetagKind::Raw, Box::new(Place::from(SELF_ARG))),
- },
- )
- }
// Make sure we remove dead blocks to remove
// unrelated code from the resume part of the function
simplify::remove_dead_blocks(tcx, &mut body);
- dump_mir(tcx, None, "generator_drop", &0, &body, |_, _| Ok(()));
+ dump_mir(tcx, false, "generator_drop", &0, &body, |_, _| Ok(()));
body
}
@@ -1015,7 +1034,7 @@ fn insert_panic_block<'tcx>(
fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
// Returning from a function with an uninhabited return type is undefined behavior.
- if tcx.conservative_is_privately_uninhabited(param_env.and(body.return_ty())) {
+ if body.return_ty().is_privately_uninhabited(tcx, param_env) {
return false;
}
@@ -1142,7 +1161,7 @@ fn create_generator_resume_function<'tcx>(
// unrelated code from the drop part of the function
simplify::remove_dead_blocks(tcx, body);
- dump_mir(tcx, None, "generator_resume", &0, body, |_, _| Ok(()));
+ dump_mir(tcx, false, "generator_resume", &0, body, |_, _| Ok(()));
}
fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock {
@@ -1268,10 +1287,20 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
}
};
- // Compute GeneratorState<yield_ty, return_ty>
- let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
- let state_adt_ref = tcx.adt_def(state_did);
- let state_substs = tcx.intern_substs(&[yield_ty.into(), body.return_ty().into()]);
+ let is_async_kind = body.generator_kind().unwrap() != GeneratorKind::Gen;
+ let (state_adt_ref, state_substs) = if is_async_kind {
+ // Compute Poll<return_ty>
+ let state_did = tcx.require_lang_item(LangItem::Poll, None);
+ let state_adt_ref = tcx.adt_def(state_did);
+ let state_substs = tcx.intern_substs(&[body.return_ty().into()]);
+ (state_adt_ref, state_substs)
+ } else {
+ // Compute GeneratorState<yield_ty, return_ty>
+ let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
+ let state_adt_ref = tcx.adt_def(state_did);
+ let state_substs = tcx.intern_substs(&[yield_ty.into(), body.return_ty().into()]);
+ (state_adt_ref, state_substs)
+ };
let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
// We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
@@ -1327,9 +1356,11 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
// Run the transformation which converts Places from Local to generator struct
// accesses for locals in `remap`.
// It also rewrites `return x` and `yield y` as writing a new generator state and returning
- // GeneratorState::Complete(x) and GeneratorState::Yielded(y) respectively.
+ // either GeneratorState::Complete(x) and GeneratorState::Yielded(y),
+ // or Poll::Ready(x) and Poll::Pending respectively depending on `is_async_kind`.
let mut transform = TransformVisitor {
tcx,
+ is_async_kind,
state_adt_ref,
state_substs,
remap,
@@ -1353,21 +1384,21 @@ impl<'tcx> MirPass<'tcx> for StateTransform {
// This is expanded to a drop ladder in `elaborate_generator_drops`.
let drop_clean = insert_clean_drop(body);
- dump_mir(tcx, None, "generator_pre-elab", &0, body, |_, _| Ok(()));
+ dump_mir(tcx, false, "generator_pre-elab", &0, body, |_, _| Ok(()));
// Expand `drop(generator_struct)` to a drop ladder which destroys upvars.
// If any upvars are moved out of, drop elaboration will handle upvar destruction.
// However we need to also elaborate the code generated by `insert_clean_drop`.
elaborate_generator_drops(tcx, body);
- dump_mir(tcx, None, "generator_post-transform", &0, body, |_, _| Ok(()));
+ dump_mir(tcx, false, "generator_post-transform", &0, body, |_, _| Ok(()));
// Create a copy of our MIR and use it to create the drop shim for the generator
let drop_shim = create_generator_drop_shim(tcx, &transform, gen_ty, body, drop_clean);
body.generator.as_mut().unwrap().generator_drop = Some(drop_shim);
- // Create the Generator::resume function
+ // Create the Generator::resume / Future::poll function
create_generator_resume_function(tcx, transform, body, can_return);
// Run derefer to fix Derefs that are not in the first place
diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs
index 780b91d92..220cf7df9 100644
--- a/compiler/rustc_mir_transform/src/inline.rs
+++ b/compiler/rustc_mir_transform/src/inline.rs
@@ -1,7 +1,6 @@
//! Inlining pass for MIR functions
use crate::deref_separator::deref_finder;
use rustc_attr::InlineAttr;
-use rustc_const_eval::transform::validate::equal_up_to_regions;
use rustc_index::bit_set::BitSet;
use rustc_index::vec::Idx;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
@@ -9,12 +8,12 @@ use rustc_middle::mir::visit::*;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
use rustc_session::config::OptLevel;
-use rustc_span::def_id::DefId;
use rustc_span::{hygiene::ExpnKind, ExpnData, LocalExpnId, Span};
use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi::Abi;
-use super::simplify::{remove_dead_blocks, CfgSimplifier};
+use crate::simplify::{remove_dead_blocks, CfgSimplifier};
+use crate::util;
use crate::MirPass;
use std::iter;
use std::ops::{Range, RangeFrom};
@@ -87,13 +86,8 @@ fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
let param_env = tcx.param_env_reveal_all_normalized(def_id);
- let mut this = Inliner {
- tcx,
- param_env,
- codegen_fn_attrs: tcx.codegen_fn_attrs(def_id),
- history: Vec::new(),
- changed: false,
- };
+ let mut this =
+ Inliner { tcx, param_env, codegen_fn_attrs: tcx.codegen_fn_attrs(def_id), changed: false };
let blocks = BasicBlock::new(0)..body.basic_blocks.next_index();
this.process_blocks(body, blocks);
this.changed
@@ -104,12 +98,6 @@ struct Inliner<'tcx> {
param_env: ParamEnv<'tcx>,
/// Caller codegen attributes.
codegen_fn_attrs: &'tcx CodegenFnAttrs,
- /// Stack of inlined instances.
- /// We only check the `DefId` and not the substs because we want to
- /// avoid inlining cases of polymorphic recursion.
- /// The number of `DefId`s is finite, so checking history is enough
- /// to ensure that we do not loop endlessly while inlining.
- history: Vec<DefId>,
/// Indicates that the caller body has been modified.
changed: bool,
}
@@ -134,12 +122,12 @@ impl<'tcx> Inliner<'tcx> {
debug!("not-inlined {} [{}]", callsite.callee, reason);
continue;
}
- Ok(new_blocks) => {
+ Ok(_) => {
debug!("inlined {}", callsite.callee);
self.changed = true;
- self.history.push(callsite.callee.def_id());
- self.process_blocks(caller_body, new_blocks);
- self.history.pop();
+ // We could process the blocks returned by `try_inlining` here. However, that
+ // leads to exponential compile times due to the top-down nature of this kind
+ // of inlining.
}
}
}
@@ -180,7 +168,7 @@ impl<'tcx> Inliner<'tcx> {
let TerminatorKind::Call { args, destination, .. } = &terminator.kind else { bug!() };
let destination_ty = destination.ty(&caller_body.local_decls, self.tcx).ty;
let output_type = callee_body.return_ty();
- if !equal_up_to_regions(self.tcx, self.param_env, output_type, destination_ty) {
+ if !util::is_subtype(self.tcx, self.param_env, output_type, destination_ty) {
trace!(?output_type, ?destination_ty);
return Err("failed to normalize return type");
}
@@ -200,7 +188,7 @@ impl<'tcx> Inliner<'tcx> {
arg_tuple_tys.iter().zip(callee_body.args_iter().skip(skipped_args))
{
let input_type = callee_body.local_decls[input].ty;
- if !equal_up_to_regions(self.tcx, self.param_env, arg_ty, input_type) {
+ if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {
trace!(?arg_ty, ?input_type);
return Err("failed to normalize tuple argument type");
}
@@ -209,7 +197,7 @@ impl<'tcx> Inliner<'tcx> {
for (arg, input) in args.iter().zip(callee_body.args_iter()) {
let input_type = callee_body.local_decls[input].ty;
let arg_ty = arg.ty(&caller_body.local_decls, self.tcx);
- if !equal_up_to_regions(self.tcx, self.param_env, arg_ty, input_type) {
+ if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {
trace!(?arg_ty, ?input_type);
return Err("failed to normalize argument type");
}
@@ -313,10 +301,6 @@ impl<'tcx> Inliner<'tcx> {
return None;
}
- if self.history.contains(&callee.def_id()) {
- return None;
- }
-
let fn_sig = self.tcx.bound_fn_sig(def_id).subst(self.tcx, substs);
return Some(CallSite {
@@ -363,10 +347,6 @@ impl<'tcx> Inliner<'tcx> {
return Err("C variadic");
}
- if callee_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
- return Err("naked");
- }
-
if callee_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
return Err("cold");
}
@@ -375,7 +355,12 @@ impl<'tcx> Inliner<'tcx> {
return Err("incompatible sanitizer set");
}
- if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set {
+ // Two functions are compatible if the callee has no attribute (meaning
+ // that it's codegen agnostic), or sets an attribute that is identical
+ // to this function's attribute.
+ if callee_attrs.instruction_set.is_some()
+ && callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set
+ {
return Err("incompatible instruction set");
}
@@ -453,6 +438,15 @@ impl<'tcx> Inliner<'tcx> {
if ty.needs_drop(tcx, self.param_env) && let Some(unwind) = unwind {
work_list.push(unwind);
}
+ } else if callee_attrs.instruction_set != self.codegen_fn_attrs.instruction_set
+ && matches!(term.kind, TerminatorKind::InlineAsm { .. })
+ {
+ // During the attribute checking stage we allow a callee with no
+ // instruction_set assigned to count as compatible with a function that does
+ // assign one. However, during this stage we require an exact match when any
+ // inline-asm is detected. LLVM will still possibly do an inline later on
+ // if the no-attribute function ends up with the same instruction set anyway.
+ return Err("Cannot move inline-asm across instruction sets");
} else {
work_list.extend(term.successors())
}
@@ -847,7 +841,7 @@ impl<'tcx> Visitor<'tcx> for CostChecker<'_, 'tcx> {
let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
let parent_ty = parent.ty(&self.callee_body.local_decls, self.tcx);
let check_equal = |this: &mut Self, f_ty| {
- if !equal_up_to_regions(this.tcx, this.param_env, ty, f_ty) {
+ if !util::is_equal_up_to_subtyping(this.tcx, this.param_env, ty, f_ty) {
trace!(?ty, ?f_ty);
this.validation = Err("failed to normalize projection type");
return;
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index 5be223254..93200b288 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -1,5 +1,6 @@
#![allow(rustc::potential_query_instability)]
#![feature(box_patterns)]
+#![feature(drain_filter)]
#![feature(let_chains)]
#![feature(map_try_insert)]
#![feature(min_specialization)]
@@ -54,6 +55,7 @@ mod const_goto;
mod const_prop;
mod const_prop_lint;
mod coverage;
+mod dataflow_const_prop;
mod dead_store_elimination;
mod deaggregator;
mod deduce_param_attrs;
@@ -91,6 +93,7 @@ pub mod simplify;
mod simplify_branches;
mod simplify_comparison_integral;
mod simplify_try;
+mod sroa;
mod uninhabited_enum_branching;
mod unreachable_prop;
@@ -217,19 +220,18 @@ fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxIndexSet<LocalDefId> {
// Additionally, tuple struct/variant constructors have MIR, but
// they don't have a BodyId, so we need to build them separately.
- struct GatherCtors<'a, 'tcx> {
- tcx: TyCtxt<'tcx>,
+ struct GatherCtors<'a> {
set: &'a mut FxIndexSet<LocalDefId>,
}
- impl<'tcx> Visitor<'tcx> for GatherCtors<'_, 'tcx> {
+ impl<'tcx> Visitor<'tcx> for GatherCtors<'_> {
fn visit_variant_data(&mut self, v: &'tcx hir::VariantData<'tcx>) {
- if let hir::VariantData::Tuple(_, hir_id) = *v {
- self.set.insert(self.tcx.hir().local_def_id(hir_id));
+ if let hir::VariantData::Tuple(_, _, def_id) = *v {
+ self.set.insert(def_id);
}
intravisit::walk_struct_def(self, v)
}
}
- tcx.hir().visit_all_item_likes_in_crate(&mut GatherCtors { tcx, set: &mut set });
+ tcx.hir().visit_all_item_likes_in_crate(&mut GatherCtors { set: &mut set });
set
}
@@ -288,7 +290,7 @@ fn mir_const<'tcx>(
let mut body = tcx.mir_built(def).steal();
- rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
+ pass_manager::dump_mir_for_phase_change(tcx, &body);
pm::run_passes(
tcx,
@@ -561,6 +563,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
&remove_zsts::RemoveZsts,
&const_goto::ConstGoto,
&remove_unneeded_drops::RemoveUnneededDrops,
+ &sroa::ScalarReplacementOfAggregates,
&match_branches::MatchBranchSimplification,
// inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
&multiple_return_terminators::MultipleReturnTerminators,
@@ -569,6 +572,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
//
// FIXME(#70073): This pass is responsible for both optimization as well as some lints.
&const_prop::ConstProp,
+ &dataflow_const_prop::DataflowConstProp,
//
// Const-prop runs unconditionally, but doesn't mutate the MIR at mir-opt-level=0.
&const_debuginfo::ConstDebugInfo,
diff --git a/compiler/rustc_mir_transform/src/pass_manager.rs b/compiler/rustc_mir_transform/src/pass_manager.rs
index 230c6a7cb..e1b65823a 100644
--- a/compiler/rustc_mir_transform/src/pass_manager.rs
+++ b/compiler/rustc_mir_transform/src/pass_manager.rs
@@ -1,5 +1,3 @@
-use std::borrow::Cow;
-
use rustc_middle::mir::{self, Body, MirPhase, RuntimePhase};
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
@@ -8,13 +6,9 @@ use crate::{validate, MirPass};
/// Just like `MirPass`, except it cannot mutate `Body`.
pub trait MirLint<'tcx> {
- fn name(&self) -> Cow<'_, str> {
+ fn name(&self) -> &str {
let name = std::any::type_name::<Self>();
- if let Some(tail) = name.rfind(':') {
- Cow::from(&name[tail + 1..])
- } else {
- Cow::from(name)
- }
+ if let Some((_, tail)) = name.rsplit_once(':') { tail } else { name }
}
fn is_enabled(&self, _sess: &Session) -> bool {
@@ -32,7 +26,7 @@ impl<'tcx, T> MirPass<'tcx> for Lint<T>
where
T: MirLint<'tcx>,
{
- fn name(&self) -> Cow<'_, str> {
+ fn name(&self) -> &str {
self.0.name()
}
@@ -55,7 +49,7 @@ impl<'tcx, T> MirPass<'tcx> for WithMinOptLevel<T>
where
T: MirPass<'tcx>,
{
- fn name(&self) -> Cow<'_, str> {
+ fn name(&self) -> &str {
self.1.name()
}
@@ -96,45 +90,48 @@ fn run_passes_inner<'tcx>(
phase_change: Option<MirPhase>,
validate_each: bool,
) {
- let validate = validate_each & tcx.sess.opts.unstable_opts.validate_mir;
+ let validate = validate_each & tcx.sess.opts.unstable_opts.validate_mir & !body.should_skip();
let overridden_passes = &tcx.sess.opts.unstable_opts.mir_enable_passes;
trace!(?overridden_passes);
- for pass in passes {
- let name = pass.name();
-
- let overridden =
- overridden_passes.iter().rev().find(|(s, _)| s == &*name).map(|(_name, polarity)| {
- trace!(
- pass = %name,
- "{} as requested by flag",
- if *polarity { "Running" } else { "Not running" },
- );
- *polarity
- });
- if !overridden.unwrap_or_else(|| pass.is_enabled(&tcx.sess)) {
- continue;
- }
-
- let dump_enabled = pass.is_mir_dump_enabled();
-
- if dump_enabled {
- dump_mir_for_pass(tcx, body, &name, false);
- }
- if validate {
- validate_body(tcx, body, format!("before pass {}", name));
- }
-
- pass.run_pass(tcx, body);
-
- if dump_enabled {
- dump_mir_for_pass(tcx, body, &name, true);
+ if !body.should_skip() {
+ for pass in passes {
+ let name = pass.name();
+
+ let overridden = overridden_passes.iter().rev().find(|(s, _)| s == &*name).map(
+ |(_name, polarity)| {
+ trace!(
+ pass = %name,
+ "{} as requested by flag",
+ if *polarity { "Running" } else { "Not running" },
+ );
+ *polarity
+ },
+ );
+ if !overridden.unwrap_or_else(|| pass.is_enabled(&tcx.sess)) {
+ continue;
+ }
+
+ let dump_enabled = pass.is_mir_dump_enabled();
+
+ if dump_enabled {
+ dump_mir_for_pass(tcx, body, &name, false);
+ }
+ if validate {
+ validate_body(tcx, body, format!("before pass {}", name));
+ }
+
+ pass.run_pass(tcx, body);
+
+ if dump_enabled {
+ dump_mir_for_pass(tcx, body, &name, true);
+ }
+ if validate {
+ validate_body(tcx, body, format!("after pass {}", name));
+ }
+
+ body.pass_count += 1;
}
- if validate {
- validate_body(tcx, body, format!("after pass {}", name));
- }
-
- body.pass_count += 1;
}
if let Some(new_phase) = phase_change {
@@ -143,10 +140,11 @@ fn run_passes_inner<'tcx>(
}
body.phase = new_phase;
+ body.pass_count = 0;
dump_mir_for_phase_change(tcx, body);
if validate || new_phase == MirPhase::Runtime(RuntimePhase::Optimized) {
- validate_body(tcx, body, format!("after phase change to {}", new_phase));
+ validate_body(tcx, body, format!("after phase change to {}", new_phase.name()));
}
body.pass_count = 1;
@@ -163,11 +161,9 @@ pub fn dump_mir_for_pass<'tcx>(
pass_name: &str,
is_after: bool,
) {
- let phase_index = body.phase.phase_index();
-
mir::dump_mir(
tcx,
- Some(&format_args!("{:03}-{:03}", phase_index, body.pass_count)),
+ true,
pass_name,
if is_after { &"after" } else { &"before" },
body,
@@ -176,14 +172,6 @@ pub fn dump_mir_for_pass<'tcx>(
}
pub fn dump_mir_for_phase_change<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
- let phase_index = body.phase.phase_index();
-
- mir::dump_mir(
- tcx,
- Some(&format_args!("{:03}-000", phase_index)),
- &format!("{}", body.phase),
- &"after",
- body,
- |_, _| Ok(()),
- )
+ assert_eq!(body.pass_count, 0);
+ mir::dump_mir(tcx, true, body.phase.name(), &"after", body, |_, _| Ok(()))
}
diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs
index 40be4f146..569e783fe 100644
--- a/compiler/rustc_mir_transform/src/remove_zsts.rs
+++ b/compiler/rustc_mir_transform/src/remove_zsts.rs
@@ -1,8 +1,7 @@
//! Removes assignments to ZST places.
use crate::MirPass;
-use rustc_middle::mir::tcx::PlaceTy;
-use rustc_middle::mir::{Body, LocalDecls, Place, StatementKind};
+use rustc_middle::mir::{Body, StatementKind};
use rustc_middle::ty::{self, Ty, TyCtxt};
pub struct RemoveZsts;
@@ -35,9 +34,6 @@ impl<'tcx> MirPass<'tcx> for RemoveZsts {
if !layout.is_zst() {
continue;
}
- if involves_a_union(place, local_decls, tcx) {
- continue;
- }
if tcx.consider_optimizing(|| {
format!(
"RemoveZsts - Place: {:?} SourceInfo: {:?}",
@@ -63,24 +59,3 @@ fn maybe_zst(ty: Ty<'_>) -> bool {
_ => false,
}
}
-
-/// Miri lazily allocates memory for locals on assignment,
-/// so we must preserve writes to unions and union fields,
-/// or it will ICE on reads of those fields.
-fn involves_a_union<'tcx>(
- place: Place<'tcx>,
- local_decls: &LocalDecls<'tcx>,
- tcx: TyCtxt<'tcx>,
-) -> bool {
- let mut place_ty = PlaceTy::from_ty(local_decls[place.local].ty);
- if place_ty.ty.is_union() {
- return true;
- }
- for elem in place.projection {
- place_ty = place_ty.projection_ty(tcx, elem);
- if place_ty.ty.is_union() {
- return true;
- }
- }
- return false;
-}
diff --git a/compiler/rustc_mir_transform/src/required_consts.rs b/compiler/rustc_mir_transform/src/required_consts.rs
index cc75947d9..0ea8f2ba9 100644
--- a/compiler/rustc_mir_transform/src/required_consts.rs
+++ b/compiler/rustc_mir_transform/src/required_consts.rs
@@ -17,7 +17,7 @@ impl<'tcx> Visitor<'tcx> for RequiredConstsVisitor<'_, 'tcx> {
let literal = constant.literal;
match literal {
ConstantKind::Ty(c) => match c.kind() {
- ConstKind::Param(_) => {}
+ ConstKind::Param(_) | ConstKind::Error(_) => {}
_ => bug!("only ConstKind::Param should be encountered here, got {:#?}", c),
},
ConstantKind::Unevaluated(..) => self.required_consts.push(*constant),
diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs
index 4e8798b7a..16b7dcad1 100644
--- a/compiler/rustc_mir_transform/src/shim.rs
+++ b/compiler/rustc_mir_transform/src/shim.rs
@@ -37,7 +37,7 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<'
}
ty::InstanceDef::FnPtrShim(def_id, ty) => {
let trait_ = tcx.trait_of_item(def_id).unwrap();
- let adjustment = match tcx.fn_trait_kind_from_lang_item(trait_) {
+ let adjustment = match tcx.fn_trait_kind_from_def_id(trait_) {
Some(ty::ClosureKind::FnOnce) => Adjustment::Identity,
Some(ty::ClosureKind::FnMut | ty::ClosureKind::Fn) => Adjustment::Deref,
None => bug!("fn pointer {:?} is not an fn", ty),
@@ -177,16 +177,6 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
if ty.is_some() {
// The first argument (index 0), but add 1 for the return value.
let dropee_ptr = Place::from(Local::new(1 + 0));
- if tcx.sess.opts.unstable_opts.mir_emit_retag {
- // Function arguments should be retagged, and we make this one raw.
- body.basic_blocks_mut()[START_BLOCK].statements.insert(
- 0,
- Statement {
- source_info,
- kind: StatementKind::Retag(RetagKind::Raw, Box::new(dropee_ptr)),
- },
- );
- }
let patch = {
let param_env = tcx.param_env_reveal_all_normalized(def_id);
let mut elaborator =
@@ -346,7 +336,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
// we must subst the self_ty because it's
// otherwise going to be TySelf and we can't index
// or access fields of a Place of type TySelf.
- let substs = tcx.mk_substs_trait(self_ty, &[]);
+ let substs = tcx.mk_substs_trait(self_ty, []);
let sig = tcx.bound_fn_sig(def_id).subst(tcx, substs);
let sig = tcx.erase_late_bound_regions(sig);
let span = tcx.def_span(def_id);
@@ -427,7 +417,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
) {
let tcx = self.tcx;
- let substs = tcx.mk_substs_trait(ty, &[]);
+ let substs = tcx.mk_substs_trait(ty, []);
// `func == Clone::clone(&ty) -> ty`
let func_ty = tcx.mk_fn_def(self.def_id, substs);
@@ -569,17 +559,13 @@ impl<'tcx> CloneShimBuilder<'tcx> {
/// Builds a "call" shim for `instance`. The shim calls the function specified by `call_kind`,
/// first adjusting its first argument according to `rcvr_adjustment`.
+#[instrument(level = "debug", skip(tcx), ret)]
fn build_call_shim<'tcx>(
tcx: TyCtxt<'tcx>,
instance: ty::InstanceDef<'tcx>,
rcvr_adjustment: Option<Adjustment>,
call_kind: CallKind<'tcx>,
) -> Body<'tcx> {
- debug!(
- "build_call_shim(instance={:?}, rcvr_adjustment={:?}, call_kind={:?})",
- instance, rcvr_adjustment, call_kind
- );
-
// `FnPtrShim` contains the fn pointer type that a call shim is being built for - this is used
// to substitute into the signature of the shim. It is not necessary for users of this
// MIR body to perform further substitutions (see `InstanceDef::has_polymorphic_mir_body`).
@@ -590,7 +576,7 @@ fn build_call_shim<'tcx>(
// Create substitutions for the `Self` and `Args` generic parameters of the shim body.
let arg_tup = tcx.mk_tup(untuple_args.iter());
- let sig_substs = tcx.mk_substs_trait(ty, &[ty::subst::GenericArg::from(arg_tup)]);
+ let sig_substs = tcx.mk_substs_trait(ty, [ty::subst::GenericArg::from(arg_tup)]);
(Some(sig_substs), Some(untuple_args))
} else {
@@ -641,7 +627,7 @@ fn build_call_shim<'tcx>(
let span = tcx.def_span(def_id);
- debug!("build_call_shim: sig={:?}", sig);
+ debug!(?sig);
let mut local_decls = local_decls_for_sig(&sig, span);
let source_info = SourceInfo::outermost(span);
@@ -845,7 +831,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> {
span,
);
- rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
+ crate::pass_manager::dump_mir_for_phase_change(tcx, &body);
body
}
diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs
index 57d372fda..475e2ec9a 100644
--- a/compiler/rustc_mir_transform/src/simplify.rs
+++ b/compiler/rustc_mir_transform/src/simplify.rs
@@ -35,7 +35,6 @@ use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Vis
use rustc_middle::mir::*;
use rustc_middle::ty::TyCtxt;
use smallvec::SmallVec;
-use std::borrow::Cow;
use std::convert::TryInto;
pub struct SimplifyCfg {
@@ -57,8 +56,8 @@ pub fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
}
impl<'tcx> MirPass<'tcx> for SimplifyCfg {
- fn name(&self) -> Cow<'_, str> {
- Cow::Borrowed(&self.label)
+ fn name(&self) -> &str {
+ &self.label
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
diff --git a/compiler/rustc_mir_transform/src/simplify_branches.rs b/compiler/rustc_mir_transform/src/simplify_branches.rs
index 3bbae5b89..405ebce4d 100644
--- a/compiler/rustc_mir_transform/src/simplify_branches.rs
+++ b/compiler/rustc_mir_transform/src/simplify_branches.rs
@@ -2,8 +2,6 @@ use crate::MirPass;
use rustc_middle::mir::*;
use rustc_middle::ty::TyCtxt;
-use std::borrow::Cow;
-
/// A pass that replaces a branch with a goto when its condition is known.
pub struct SimplifyConstCondition {
label: String,
@@ -16,8 +14,8 @@ impl SimplifyConstCondition {
}
impl<'tcx> MirPass<'tcx> for SimplifyConstCondition {
- fn name(&self) -> Cow<'_, str> {
- Cow::Borrowed(&self.label)
+ fn name(&self) -> &str {
+ &self.label
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs
new file mode 100644
index 000000000..558a372fb
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/sroa.rs
@@ -0,0 +1,348 @@
+use crate::MirPass;
+use rustc_data_structures::fx::{FxIndexMap, IndexEntry};
+use rustc_index::bit_set::BitSet;
+use rustc_index::vec::IndexVec;
+use rustc_middle::mir::visit::*;
+use rustc_middle::mir::*;
+use rustc_middle::ty::TyCtxt;
+
+pub struct ScalarReplacementOfAggregates;
+
+impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates {
+ fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
+ sess.mir_opt_level() >= 3
+ }
+
+ fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+ let escaping = escaping_locals(&*body);
+ debug!(?escaping);
+ let replacements = compute_flattening(tcx, body, escaping);
+ debug!(?replacements);
+ replace_flattened_locals(tcx, body, replacements);
+ }
+}
+
+/// Identify all locals that are not eligible for SROA.
+///
+/// There are 3 cases:
+/// - the aggegated local is used or passed to other code (function parameters and arguments);
+/// - the locals is a union or an enum;
+/// - the local's address is taken, and thus the relative addresses of the fields are observable to
+/// client code.
+fn escaping_locals(body: &Body<'_>) -> BitSet<Local> {
+ let mut set = BitSet::new_empty(body.local_decls.len());
+ set.insert_range(RETURN_PLACE..=Local::from_usize(body.arg_count));
+ for (local, decl) in body.local_decls().iter_enumerated() {
+ if decl.ty.is_union() || decl.ty.is_enum() {
+ set.insert(local);
+ }
+ }
+ let mut visitor = EscapeVisitor { set };
+ visitor.visit_body(body);
+ return visitor.set;
+
+ struct EscapeVisitor {
+ set: BitSet<Local>,
+ }
+
+ impl<'tcx> Visitor<'tcx> for EscapeVisitor {
+ fn visit_local(&mut self, local: Local, _: PlaceContext, _: Location) {
+ self.set.insert(local);
+ }
+
+ fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
+ // Mirror the implementation in PreFlattenVisitor.
+ if let &[PlaceElem::Field(..), ..] = &place.projection[..] {
+ return;
+ }
+ self.super_place(place, context, location);
+ }
+
+ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
+ if let Rvalue::AddressOf(.., place) | Rvalue::Ref(.., place) = rvalue {
+ if !place.is_indirect() {
+ // Raw pointers may be used to access anything inside the enclosing place.
+ self.set.insert(place.local);
+ return;
+ }
+ }
+ self.super_rvalue(rvalue, location)
+ }
+
+ fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
+ if let StatementKind::StorageLive(..)
+ | StatementKind::StorageDead(..)
+ | StatementKind::Deinit(..) = statement.kind
+ {
+ // Storage statements are expanded in run_pass.
+ return;
+ }
+ self.super_statement(statement, location)
+ }
+
+ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
+ // Drop implicitly calls `drop_in_place`, which takes a `&mut`.
+ // This implies that `Drop` implicitly takes the address of the place.
+ if let TerminatorKind::Drop { place, .. }
+ | TerminatorKind::DropAndReplace { place, .. } = terminator.kind
+ {
+ if !place.is_indirect() {
+ // Raw pointers may be used to access anything inside the enclosing place.
+ self.set.insert(place.local);
+ return;
+ }
+ }
+ self.super_terminator(terminator, location);
+ }
+
+ // We ignore anything that happens in debuginfo, since we expand it using
+ // `VarDebugInfoContents::Composite`.
+ fn visit_var_debug_info(&mut self, _: &VarDebugInfo<'tcx>) {}
+ }
+}
+
+#[derive(Default, Debug)]
+struct ReplacementMap<'tcx> {
+ fields: FxIndexMap<PlaceRef<'tcx>, Local>,
+}
+
+/// Compute the replacement of flattened places into locals.
+///
+/// For each eligible place, we assign a new local to each accessed field.
+/// The replacement will be done later in `ReplacementVisitor`.
+fn compute_flattening<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &mut Body<'tcx>,
+ escaping: BitSet<Local>,
+) -> ReplacementMap<'tcx> {
+ let mut visitor = PreFlattenVisitor {
+ tcx,
+ escaping,
+ local_decls: &mut body.local_decls,
+ map: Default::default(),
+ };
+ for (block, bbdata) in body.basic_blocks.iter_enumerated() {
+ visitor.visit_basic_block_data(block, bbdata);
+ }
+ return visitor.map;
+
+ struct PreFlattenVisitor<'tcx, 'll> {
+ tcx: TyCtxt<'tcx>,
+ local_decls: &'ll mut LocalDecls<'tcx>,
+ escaping: BitSet<Local>,
+ map: ReplacementMap<'tcx>,
+ }
+
+ impl<'tcx, 'll> PreFlattenVisitor<'tcx, 'll> {
+ fn create_place(&mut self, place: PlaceRef<'tcx>) {
+ if self.escaping.contains(place.local) {
+ return;
+ }
+
+ match self.map.fields.entry(place) {
+ IndexEntry::Occupied(_) => {}
+ IndexEntry::Vacant(v) => {
+ let ty = place.ty(&*self.local_decls, self.tcx).ty;
+ let local = self.local_decls.push(LocalDecl {
+ ty,
+ user_ty: None,
+ ..self.local_decls[place.local].clone()
+ });
+ v.insert(local);
+ }
+ }
+ }
+ }
+
+ impl<'tcx, 'll> Visitor<'tcx> for PreFlattenVisitor<'tcx, 'll> {
+ fn visit_place(&mut self, place: &Place<'tcx>, _: PlaceContext, _: Location) {
+ if let &[PlaceElem::Field(..), ..] = &place.projection[..] {
+ let pr = PlaceRef { local: place.local, projection: &place.projection[..1] };
+ self.create_place(pr)
+ }
+ }
+ }
+}
+
+/// Perform the replacement computed by `compute_flattening`.
+fn replace_flattened_locals<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &mut Body<'tcx>,
+ replacements: ReplacementMap<'tcx>,
+) {
+ let mut all_dead_locals = BitSet::new_empty(body.local_decls.len());
+ for p in replacements.fields.keys() {
+ all_dead_locals.insert(p.local);
+ }
+ debug!(?all_dead_locals);
+ if all_dead_locals.is_empty() {
+ return;
+ }
+
+ let mut fragments = IndexVec::new();
+ for (k, v) in &replacements.fields {
+ fragments.ensure_contains_elem(k.local, || Vec::new());
+ fragments[k.local].push((&k.projection[..], *v));
+ }
+ debug!(?fragments);
+
+ let mut visitor = ReplacementVisitor {
+ tcx,
+ local_decls: &body.local_decls,
+ replacements,
+ all_dead_locals,
+ fragments,
+ };
+ for (bb, data) in body.basic_blocks.as_mut_preserves_cfg().iter_enumerated_mut() {
+ visitor.visit_basic_block_data(bb, data);
+ }
+ for scope in &mut body.source_scopes {
+ visitor.visit_source_scope_data(scope);
+ }
+ for (index, annotation) in body.user_type_annotations.iter_enumerated_mut() {
+ visitor.visit_user_type_annotation(index, annotation);
+ }
+ for var_debug_info in &mut body.var_debug_info {
+ visitor.visit_var_debug_info(var_debug_info);
+ }
+}
+
+struct ReplacementVisitor<'tcx, 'll> {
+ tcx: TyCtxt<'tcx>,
+ /// This is only used to compute the type for `VarDebugInfoContents::Composite`.
+ local_decls: &'ll LocalDecls<'tcx>,
+ /// Work to do.
+ replacements: ReplacementMap<'tcx>,
+ /// This is used to check that we are not leaving references to replaced locals behind.
+ all_dead_locals: BitSet<Local>,
+ /// Pre-computed list of all "new" locals for each "old" local. This is used to expand storage
+ /// and deinit statement and debuginfo.
+ fragments: IndexVec<Local, Vec<(&'tcx [PlaceElem<'tcx>], Local)>>,
+}
+
+impl<'tcx, 'll> ReplacementVisitor<'tcx, 'll> {
+ fn gather_debug_info_fragments(
+ &self,
+ place: PlaceRef<'tcx>,
+ ) -> Vec<VarDebugInfoFragment<'tcx>> {
+ let mut fragments = Vec::new();
+ let parts = &self.fragments[place.local];
+ for (proj, replacement_local) in parts {
+ if proj.starts_with(place.projection) {
+ fragments.push(VarDebugInfoFragment {
+ projection: proj[place.projection.len()..].to_vec(),
+ contents: Place::from(*replacement_local),
+ });
+ }
+ }
+ fragments
+ }
+
+ fn replace_place(&self, place: PlaceRef<'tcx>) -> Option<Place<'tcx>> {
+ if let &[PlaceElem::Field(..), ref rest @ ..] = place.projection {
+ let pr = PlaceRef { local: place.local, projection: &place.projection[..1] };
+ let local = self.replacements.fields.get(&pr)?;
+ Some(Place { local: *local, projection: self.tcx.intern_place_elems(&rest) })
+ } else {
+ None
+ }
+ }
+}
+
+impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> {
+ fn tcx(&self) -> TyCtxt<'tcx> {
+ self.tcx
+ }
+
+ fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
+ if let StatementKind::StorageLive(..)
+ | StatementKind::StorageDead(..)
+ | StatementKind::Deinit(..) = statement.kind
+ {
+ // Storage statements are expanded in run_pass.
+ return;
+ }
+ self.super_statement(statement, location)
+ }
+
+ fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
+ if let Some(repl) = self.replace_place(place.as_ref()) {
+ *place = repl
+ } else {
+ self.super_place(place, context, location)
+ }
+ }
+
+ fn visit_var_debug_info(&mut self, var_debug_info: &mut VarDebugInfo<'tcx>) {
+ match &mut var_debug_info.value {
+ VarDebugInfoContents::Place(ref mut place) => {
+ if let Some(repl) = self.replace_place(place.as_ref()) {
+ *place = repl;
+ } else if self.all_dead_locals.contains(place.local) {
+ let ty = place.ty(self.local_decls, self.tcx).ty;
+ let fragments = self.gather_debug_info_fragments(place.as_ref());
+ var_debug_info.value = VarDebugInfoContents::Composite { ty, fragments };
+ }
+ }
+ VarDebugInfoContents::Composite { ty: _, ref mut fragments } => {
+ let mut new_fragments = Vec::new();
+ fragments
+ .drain_filter(|fragment| {
+ if let Some(repl) = self.replace_place(fragment.contents.as_ref()) {
+ fragment.contents = repl;
+ true
+ } else if self.all_dead_locals.contains(fragment.contents.local) {
+ let frg = self.gather_debug_info_fragments(fragment.contents.as_ref());
+ new_fragments.extend(frg.into_iter().map(|mut f| {
+ f.projection.splice(0..0, fragment.projection.iter().copied());
+ f
+ }));
+ false
+ } else {
+ true
+ }
+ })
+ .for_each(drop);
+ fragments.extend(new_fragments);
+ }
+ VarDebugInfoContents::Const(_) => {}
+ }
+ }
+
+ fn visit_basic_block_data(&mut self, bb: BasicBlock, bbdata: &mut BasicBlockData<'tcx>) {
+ self.super_basic_block_data(bb, bbdata);
+
+ #[derive(Debug)]
+ enum Stmt {
+ StorageLive,
+ StorageDead,
+ Deinit,
+ }
+
+ bbdata.expand_statements(|stmt| {
+ let source_info = stmt.source_info;
+ let (stmt, origin_local) = match &stmt.kind {
+ StatementKind::StorageLive(l) => (Stmt::StorageLive, *l),
+ StatementKind::StorageDead(l) => (Stmt::StorageDead, *l),
+ StatementKind::Deinit(p) if let Some(l) = p.as_local() => (Stmt::Deinit, l),
+ _ => return None,
+ };
+ if !self.all_dead_locals.contains(origin_local) {
+ return None;
+ }
+ let final_locals = self.fragments.get(origin_local)?;
+ Some(final_locals.iter().map(move |&(_, l)| {
+ let kind = match stmt {
+ Stmt::StorageLive => StatementKind::StorageLive(l),
+ Stmt::StorageDead => StatementKind::StorageDead(l),
+ Stmt::Deinit => StatementKind::Deinit(Box::new(l.into())),
+ };
+ Statement { source_info, kind }
+ }))
+ });
+ }
+
+ fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
+ assert!(!self.all_dead_locals.contains(*local));
+ }
+}
diff --git a/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs b/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs
index 96ea15f1b..be0aa0fc4 100644
--- a/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs
+++ b/compiler/rustc_mir_transform/src/uninhabited_enum_branching.rs
@@ -65,7 +65,7 @@ fn variant_discriminants<'tcx>(
Variants::Multiple { variants, .. } => variants
.iter_enumerated()
.filter_map(|(idx, layout)| {
- (layout.abi() != Abi::Uninhabited)
+ (layout.abi != Abi::Uninhabited)
.then(|| ty.discriminant_for_variant(tcx, idx).unwrap().val)
})
.collect(),
diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs
index a71218e69..cf7226a12 100644
--- a/compiler/rustc_monomorphize/src/collector.rs
+++ b/compiler/rustc_monomorphize/src/collector.rs
@@ -187,6 +187,7 @@ use rustc_middle::mir::visit::Visitor as MirVisitor;
use rustc_middle::mir::{self, Local, Location};
use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCast};
use rustc_middle::ty::print::with_no_trimmed_paths;
+use rustc_middle::ty::query::TyCtxtAt;
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
use rustc_middle::ty::{
self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitable, VtblEntry,
@@ -197,11 +198,10 @@ use rustc_session::lint::builtin::LARGE_ASSIGNMENTS;
use rustc_session::Limit;
use rustc_span::source_map::{dummy_spanned, respan, Span, Spanned, DUMMY_SP};
use rustc_target::abi::Size;
-use std::iter;
use std::ops::Range;
use std::path::PathBuf;
-use crate::errors::{LargeAssignmentsLint, RecursionLimit, RequiresLangItem, TypeLengthLimit};
+use crate::errors::{LargeAssignmentsLint, RecursionLimit, TypeLengthLimit};
#[derive(PartialEq)]
pub enum MonoItemCollectionMode {
@@ -295,8 +295,8 @@ impl<'tcx> InliningMap<'tcx> {
assert!(self.index.insert(source, start_index..end_index).is_none());
}
- // Internally iterate over all items referenced by `source` which will be
- // made available for inlining.
+ /// Internally iterate over all items referenced by `source` which will be
+ /// made available for inlining.
pub fn with_inlining_candidates<F>(&self, source: MonoItem<'tcx>, mut f: F)
where
F: FnMut(MonoItem<'tcx>),
@@ -310,7 +310,7 @@ impl<'tcx> InliningMap<'tcx> {
}
}
- // Internally iterate over all items and the things each accesses.
+ /// Internally iterate over all items and the things each accesses.
pub fn iter_accesses<F>(&self, mut f: F)
where
F: FnMut(MonoItem<'tcx>, &[MonoItem<'tcx>]),
@@ -456,7 +456,7 @@ fn collect_items_rec<'tcx>(
recursion_depth_reset = None;
if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
- for &id in alloc.inner().provenance().values() {
+ for &id in alloc.inner().provenance().ptrs().values() {
collect_miri(tcx, id, &mut neighbors);
}
}
@@ -541,29 +541,23 @@ fn collect_items_rec<'tcx>(
}
/// Format instance name that is already known to be too long for rustc.
-/// Show only the first and last 32 characters to avoid blasting
+/// Show only the first 2 types if it is longer than 32 characters to avoid blasting
/// the user's terminal with thousands of lines of type-name.
///
/// If the type name is longer than before+after, it will be written to a file.
fn shrunk_instance_name<'tcx>(
tcx: TyCtxt<'tcx>,
instance: &Instance<'tcx>,
- before: usize,
- after: usize,
) -> (String, Option<PathBuf>) {
let s = instance.to_string();
// Only use the shrunk version if it's really shorter.
// This also avoids the case where before and after slices overlap.
- if s.chars().nth(before + after + 1).is_some() {
- // An iterator of all byte positions including the end of the string.
- let positions = || s.char_indices().map(|(i, _)| i).chain(iter::once(s.len()));
-
- let shrunk = format!(
- "{before}...{after}",
- before = &s[..positions().nth(before).unwrap_or(s.len())],
- after = &s[positions().rev().nth(after).unwrap_or(0)..],
- );
+ if s.chars().nth(33).is_some() {
+ let shrunk = format!("{}", ty::ShortInstance(instance, 4));
+ if shrunk == s {
+ return (s, None);
+ }
let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None);
let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
@@ -599,7 +593,7 @@ fn check_recursion_limit<'tcx>(
if !recursion_limit.value_within_limit(adjusted_recursion_depth) {
let def_span = tcx.def_span(def_id);
let def_path_str = tcx.def_path_str(def_id);
- let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance, 32, 32);
+ let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance);
let mut path = PathBuf::new();
let was_written = if written_to_path.is_some() {
path = written_to_path.unwrap();
@@ -641,7 +635,7 @@ fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
//
// Bail out in these cases to avoid that bad user experience.
if !tcx.type_length_limit().value_within_limit(type_length) {
- let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance, 32, 32);
+ let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance);
let span = tcx.def_span(instance.def_id());
let mut path = PathBuf::new();
let was_written = if written_to_path.is_some() {
@@ -695,7 +689,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
let source_ty = operand.ty(self.body, self.tcx);
let source_ty = self.monomorphize(source_ty);
let (source_ty, target_ty) =
- find_vtable_types_for_unsizing(self.tcx, source_ty, target_ty);
+ find_vtable_types_for_unsizing(self.tcx.at(span), source_ty, target_ty);
// This could also be a different Unsize instruction, like
// from a fixed sized array to a slice. But we are only
// interested in things that produce a vtable.
@@ -773,7 +767,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
match self.tcx.const_eval_resolve(param_env, ct.expand(), None) {
// The `monomorphize` call should have evaluated that constant already.
Ok(val) => val,
- Err(ErrorHandled::Reported(_) | ErrorHandled::Linted) => return,
+ Err(ErrorHandled::Reported(_)) => return,
Err(ErrorHandled::TooGeneric) => span_bug!(
self.body.source_info(location).span,
"collection encountered polymorphic constant: {:?}",
@@ -788,7 +782,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
match self.tcx.const_eval_resolve(param_env, uv, None) {
// The `monomorphize` call should have evaluated that constant already.
Ok(val) => val,
- Err(ErrorHandled::Reported(_) | ErrorHandled::Linted) => return,
+ Err(ErrorHandled::Reported(_)) => return,
Err(ErrorHandled::TooGeneric) => span_bug!(
self.body.source_info(location).span,
"collection encountered polymorphic constant: {:?}",
@@ -1060,14 +1054,14 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) ->
/// Finally, there is also the case of custom unsizing coercions, e.g., for
/// smart pointers such as `Rc` and `Arc`.
fn find_vtable_types_for_unsizing<'tcx>(
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxtAt<'tcx>,
source_ty: Ty<'tcx>,
target_ty: Ty<'tcx>,
) -> (Ty<'tcx>, Ty<'tcx>) {
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
let param_env = ty::ParamEnv::reveal_all();
let type_has_metadata = |ty: Ty<'tcx>| -> bool {
- if ty.is_sized(tcx, param_env) {
+ if ty.is_sized(tcx.tcx, param_env) {
return false;
}
let tail = tcx.struct_tail_erasing_lifetimes(ty, param_env);
@@ -1111,8 +1105,8 @@ fn find_vtable_types_for_unsizing<'tcx>(
find_vtable_types_for_unsizing(
tcx,
- source_fields[coerce_index].ty(tcx, source_substs),
- target_fields[coerce_index].ty(tcx, target_substs),
+ source_fields[coerce_index].ty(*tcx, source_substs),
+ target_fields[coerce_index].ty(*tcx, target_substs),
)
}
_ => bug!(
@@ -1298,14 +1292,7 @@ impl<'v> RootCollector<'_, 'v> {
return;
};
- let start_def_id = match self.tcx.lang_items().require(LangItem::Start) {
- Ok(s) => s,
- Err(lang_item_err) => {
- self.tcx
- .sess
- .emit_fatal(RequiresLangItem { lang_item: lang_item_err.0.name().to_string() });
- }
- };
+ let start_def_id = self.tcx.require_lang_item(LangItem::Start, None);
let main_ret_ty = self.tcx.fn_sig(main_def_id).output();
// Given that `main()` has no arguments,
@@ -1343,6 +1330,10 @@ fn create_mono_items_for_default_impls<'tcx>(
) {
match item.kind {
hir::ItemKind::Impl(ref impl_) => {
+ if matches!(impl_.polarity, hir::ImplPolarity::Negative(_)) {
+ return;
+ }
+
for param in impl_.generics.params {
match param.kind {
hir::GenericParamKind::Lifetime { .. } => {}
@@ -1407,7 +1398,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIte
}
GlobalAlloc::Memory(alloc) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
- for &inner in alloc.inner().provenance().values() {
+ for &inner in alloc.inner().provenance().ptrs().values() {
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output);
});
@@ -1446,7 +1437,7 @@ fn collect_const_value<'tcx>(
match value {
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output),
ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
- for &id in alloc.inner().provenance().values() {
+ for &id in alloc.inner().provenance().ptrs().values() {
collect_miri(tcx, id, output);
}
}
diff --git a/compiler/rustc_monomorphize/src/errors.rs b/compiler/rustc_monomorphize/src/errors.rs
index ce097b8d8..f1ca72de8 100644
--- a/compiler/rustc_monomorphize/src/errors.rs
+++ b/compiler/rustc_monomorphize/src/errors.rs
@@ -32,12 +32,6 @@ pub struct TypeLengthLimit {
pub type_length: usize,
}
-#[derive(Diagnostic)]
-#[diag(monomorphize_requires_lang_item)]
-pub struct RequiresLangItem {
- pub lang_item: String,
-}
-
pub struct UnusedGenericParams {
pub span: Span,
pub param_spans: Vec<Span>,
@@ -45,6 +39,7 @@ pub struct UnusedGenericParams {
}
impl IntoDiagnostic<'_> for UnusedGenericParams {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'_ rustc_errors::Handler,
diff --git a/compiler/rustc_monomorphize/src/lib.rs b/compiler/rustc_monomorphize/src/lib.rs
index 42781bd25..b616ed35d 100644
--- a/compiler/rustc_monomorphize/src/lib.rs
+++ b/compiler/rustc_monomorphize/src/lib.rs
@@ -13,8 +13,8 @@ extern crate rustc_middle;
use rustc_hir::lang_items::LangItem;
use rustc_middle::traits;
use rustc_middle::ty::adjustment::CustomCoerceUnsized;
-use rustc_middle::ty::query::Providers;
-use rustc_middle::ty::{self, Ty, TyCtxt};
+use rustc_middle::ty::query::{Providers, TyCtxtAt};
+use rustc_middle::ty::{self, Ty};
mod collector;
mod errors;
@@ -23,16 +23,12 @@ mod polymorphize;
mod util;
fn custom_coerce_unsize_info<'tcx>(
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxtAt<'tcx>,
source_ty: Ty<'tcx>,
target_ty: Ty<'tcx>,
) -> CustomCoerceUnsized {
- let def_id = tcx.require_lang_item(LangItem::CoerceUnsized, None);
-
- let trait_ref = ty::Binder::dummy(ty::TraitRef {
- def_id,
- substs: tcx.mk_substs_trait(source_ty, &[target_ty.into()]),
- });
+ let trait_ref =
+ ty::Binder::dummy(tcx.mk_trait_ref(LangItem::CoerceUnsized, [source_ty, target_ty]));
match tcx.codegen_select_candidate((ty::ParamEnv::reveal_all(), trait_ref)) {
Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml
index a5c94e164..dbcfb3903 100644
--- a/compiler/rustc_parse/Cargo.toml
+++ b/compiler/rustc_parse/Cargo.toml
@@ -7,15 +7,16 @@ edition = "2021"
[dependencies]
bitflags = "1.0"
-tracing = "0.1"
+rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_data_structures = { path = "../rustc_data_structures" }
+rustc_errors = { path = "../rustc_errors" }
rustc_feature = { path = "../rustc_feature" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_macros = { path = "../rustc_macros" }
-rustc_errors = { path = "../rustc_errors" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
-rustc_ast = { path = "../rustc_ast" }
+thin-vec = "0.2.8"
+tracing = "0.1"
unicode-normalization = "0.1.11"
unicode-width = "0.1.4"
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index 9b177c518..9875cde4a 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -9,7 +9,7 @@ use rustc_span::{Span, Symbol};
use crate::parser::TokenDescription;
#[derive(Diagnostic)]
-#[diag(parser_maybe_report_ambiguous_plus)]
+#[diag(parse_maybe_report_ambiguous_plus)]
pub(crate) struct AmbiguousPlus {
pub sum_ty: String,
#[primary_span]
@@ -18,7 +18,7 @@ pub(crate) struct AmbiguousPlus {
}
#[derive(Diagnostic)]
-#[diag(parser_maybe_recover_from_bad_type_plus, code = "E0178")]
+#[diag(parse_maybe_recover_from_bad_type_plus, code = "E0178")]
pub(crate) struct BadTypePlus {
pub ty: String,
#[primary_span]
@@ -30,7 +30,7 @@ pub(crate) struct BadTypePlus {
#[derive(Subdiagnostic)]
pub(crate) enum BadTypePlusSub {
#[suggestion(
- parser_add_paren,
+ parse_add_paren,
code = "{sum_with_parens}",
applicability = "machine-applicable"
)]
@@ -39,12 +39,12 @@ pub(crate) enum BadTypePlusSub {
#[primary_span]
span: Span,
},
- #[label(parser_forgot_paren)]
+ #[label(parse_forgot_paren)]
ForgotParen {
#[primary_span]
span: Span,
},
- #[label(parser_expect_path)]
+ #[label(parse_expect_path)]
ExpectPath {
#[primary_span]
span: Span,
@@ -52,7 +52,7 @@ pub(crate) enum BadTypePlusSub {
}
#[derive(Diagnostic)]
-#[diag(parser_maybe_recover_from_bad_qpath_stage_2)]
+#[diag(parse_maybe_recover_from_bad_qpath_stage_2)]
pub(crate) struct BadQPathStage2 {
#[primary_span]
#[suggestion(code = "", applicability = "maybe-incorrect")]
@@ -61,10 +61,10 @@ pub(crate) struct BadQPathStage2 {
}
#[derive(Diagnostic)]
-#[diag(parser_incorrect_semicolon)]
+#[diag(parse_incorrect_semicolon)]
pub(crate) struct IncorrectSemicolon<'a> {
#[primary_span]
- #[suggestion_short(code = "", applicability = "machine-applicable")]
+ #[suggestion(style = "short", code = "", applicability = "machine-applicable")]
pub span: Span,
#[help]
pub opt_help: Option<()>,
@@ -72,7 +72,7 @@ pub(crate) struct IncorrectSemicolon<'a> {
}
#[derive(Diagnostic)]
-#[diag(parser_incorrect_use_of_await)]
+#[diag(parse_incorrect_use_of_await)]
pub(crate) struct IncorrectUseOfAwait {
#[primary_span]
#[suggestion(parentheses_suggestion, code = "", applicability = "machine-applicable")]
@@ -80,7 +80,7 @@ pub(crate) struct IncorrectUseOfAwait {
}
#[derive(Diagnostic)]
-#[diag(parser_incorrect_use_of_await)]
+#[diag(parse_incorrect_use_of_await)]
pub(crate) struct IncorrectAwait {
#[primary_span]
pub span: Span,
@@ -91,7 +91,7 @@ pub(crate) struct IncorrectAwait {
}
#[derive(Diagnostic)]
-#[diag(parser_in_in_typo)]
+#[diag(parse_in_in_typo)]
pub(crate) struct InInTypo {
#[primary_span]
pub span: Span,
@@ -100,7 +100,7 @@ pub(crate) struct InInTypo {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_variable_declaration)]
+#[diag(parse_invalid_variable_declaration)]
pub(crate) struct InvalidVariableDeclaration {
#[primary_span]
pub span: Span,
@@ -110,22 +110,22 @@ pub(crate) struct InvalidVariableDeclaration {
#[derive(Subdiagnostic)]
pub(crate) enum InvalidVariableDeclarationSub {
- #[suggestion(parser_switch_mut_let_order, applicability = "maybe-incorrect", code = "let mut")]
+ #[suggestion(parse_switch_mut_let_order, applicability = "maybe-incorrect", code = "let mut")]
SwitchMutLetOrder(#[primary_span] Span),
#[suggestion(
- parser_missing_let_before_mut,
+ parse_missing_let_before_mut,
applicability = "machine-applicable",
code = "let mut"
)]
MissingLet(#[primary_span] Span),
- #[suggestion(parser_use_let_not_auto, applicability = "machine-applicable", code = "let")]
+ #[suggestion(parse_use_let_not_auto, applicability = "machine-applicable", code = "let")]
UseLetNotAuto(#[primary_span] Span),
- #[suggestion(parser_use_let_not_var, applicability = "machine-applicable", code = "let")]
+ #[suggestion(parse_use_let_not_var, applicability = "machine-applicable", code = "let")]
UseLetNotVar(#[primary_span] Span),
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_comparison_operator)]
+#[diag(parse_invalid_comparison_operator)]
pub(crate) struct InvalidComparisonOperator {
#[primary_span]
pub span: Span,
@@ -136,7 +136,12 @@ pub(crate) struct InvalidComparisonOperator {
#[derive(Subdiagnostic)]
pub(crate) enum InvalidComparisonOperatorSub {
- #[suggestion_short(use_instead, applicability = "machine-applicable", code = "{correct}")]
+ #[suggestion(
+ use_instead,
+ style = "short",
+ applicability = "machine-applicable",
+ code = "{correct}"
+ )]
Correctable {
#[primary_span]
span: Span,
@@ -148,7 +153,7 @@ pub(crate) enum InvalidComparisonOperatorSub {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_logical_operator)]
+#[diag(parse_invalid_logical_operator)]
#[note]
pub(crate) struct InvalidLogicalOperator {
#[primary_span]
@@ -160,14 +165,16 @@ pub(crate) struct InvalidLogicalOperator {
#[derive(Subdiagnostic)]
pub(crate) enum InvalidLogicalOperatorSub {
- #[suggestion_short(
+ #[suggestion(
use_amp_amp_for_conjunction,
+ style = "short",
applicability = "machine-applicable",
code = "&&"
)]
Conjunction(#[primary_span] Span),
- #[suggestion_short(
+ #[suggestion(
use_pipe_pipe_for_disjunction,
+ style = "short",
applicability = "machine-applicable",
code = "||"
)]
@@ -175,15 +182,15 @@ pub(crate) enum InvalidLogicalOperatorSub {
}
#[derive(Diagnostic)]
-#[diag(parser_tilde_is_not_unary_operator)]
+#[diag(parse_tilde_is_not_unary_operator)]
pub(crate) struct TildeAsUnaryOperator(
#[primary_span]
- #[suggestion_short(applicability = "machine-applicable", code = "!")]
+ #[suggestion(style = "short", applicability = "machine-applicable", code = "!")]
pub Span,
);
#[derive(Diagnostic)]
-#[diag(parser_unexpected_token_after_not)]
+#[diag(parse_unexpected_token_after_not)]
pub(crate) struct NotAsNegationOperator {
#[primary_span]
pub negated: Span,
@@ -194,22 +201,25 @@ pub(crate) struct NotAsNegationOperator {
#[derive(Subdiagnostic)]
pub enum NotAsNegationOperatorSub {
- #[suggestion_short(
- parser_unexpected_token_after_not_default,
+ #[suggestion(
+ parse_unexpected_token_after_not_default,
+ style = "short",
applicability = "machine-applicable",
code = "!"
)]
SuggestNotDefault(#[primary_span] Span),
- #[suggestion_short(
- parser_unexpected_token_after_not_bitwise,
+ #[suggestion(
+ parse_unexpected_token_after_not_bitwise,
+ style = "short",
applicability = "machine-applicable",
code = "!"
)]
SuggestNotBitwise(#[primary_span] Span),
- #[suggestion_short(
- parser_unexpected_token_after_not_logical,
+ #[suggestion(
+ parse_unexpected_token_after_not_logical,
+ style = "short",
applicability = "machine-applicable",
code = "!"
)]
@@ -217,7 +227,7 @@ pub enum NotAsNegationOperatorSub {
}
#[derive(Diagnostic)]
-#[diag(parser_malformed_loop_label)]
+#[diag(parse_malformed_loop_label)]
pub(crate) struct MalformedLoopLabel {
#[primary_span]
#[suggestion(applicability = "machine-applicable", code = "{correct_label}")]
@@ -226,7 +236,7 @@ pub(crate) struct MalformedLoopLabel {
}
#[derive(Diagnostic)]
-#[diag(parser_lifetime_in_borrow_expression)]
+#[diag(parse_lifetime_in_borrow_expression)]
pub(crate) struct LifetimeInBorrowExpression {
#[primary_span]
pub span: Span,
@@ -236,20 +246,20 @@ pub(crate) struct LifetimeInBorrowExpression {
}
#[derive(Diagnostic)]
-#[diag(parser_field_expression_with_generic)]
+#[diag(parse_field_expression_with_generic)]
pub(crate) struct FieldExpressionWithGeneric(#[primary_span] pub Span);
#[derive(Diagnostic)]
-#[diag(parser_macro_invocation_with_qualified_path)]
+#[diag(parse_macro_invocation_with_qualified_path)]
pub(crate) struct MacroInvocationWithQualifiedPath(#[primary_span] pub Span);
#[derive(Diagnostic)]
-#[diag(parser_unexpected_token_after_label)]
+#[diag(parse_unexpected_token_after_label)]
pub(crate) struct UnexpectedTokenAfterLabel {
#[primary_span]
- #[label(parser_unexpected_token_after_label)]
+ #[label(parse_unexpected_token_after_label)]
pub span: Span,
- #[suggestion_verbose(suggestion_remove_label, code = "")]
+ #[suggestion(suggestion_remove_label, style = "verbose", code = "")]
pub remove_label: Option<Span>,
#[subdiagnostic]
pub enclose_in_block: Option<UnexpectedTokenAfterLabelSugg>,
@@ -265,19 +275,19 @@ pub(crate) struct UnexpectedTokenAfterLabelSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_require_colon_after_labeled_expression)]
+#[diag(parse_require_colon_after_labeled_expression)]
#[note]
pub(crate) struct RequireColonAfterLabeledExpression {
#[primary_span]
pub span: Span,
#[label]
pub label: Span,
- #[suggestion_short(applicability = "machine-applicable", code = ": ")]
+ #[suggestion(style = "short", applicability = "machine-applicable", code = ": ")]
pub label_end: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_do_catch_syntax_removed)]
+#[diag(parse_do_catch_syntax_removed)]
#[note]
pub(crate) struct DoCatchSyntaxRemoved {
#[primary_span]
@@ -286,7 +296,7 @@ pub(crate) struct DoCatchSyntaxRemoved {
}
#[derive(Diagnostic)]
-#[diag(parser_float_literal_requires_integer_part)]
+#[diag(parse_float_literal_requires_integer_part)]
pub(crate) struct FloatLiteralRequiresIntegerPart {
#[primary_span]
#[suggestion(applicability = "machine-applicable", code = "{correct}")]
@@ -295,71 +305,25 @@ pub(crate) struct FloatLiteralRequiresIntegerPart {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_int_literal_width)]
-#[help]
-pub(crate) struct InvalidIntLiteralWidth {
- #[primary_span]
- pub span: Span,
- pub width: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_num_literal_base_prefix)]
-#[note]
-pub(crate) struct InvalidNumLiteralBasePrefix {
- #[primary_span]
- #[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
- pub span: Span,
- pub fixed: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_num_literal_suffix)]
-#[help]
-pub(crate) struct InvalidNumLiteralSuffix {
- #[primary_span]
- #[label]
- pub span: Span,
- pub suffix: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_float_literal_width)]
-#[help]
-pub(crate) struct InvalidFloatLiteralWidth {
- #[primary_span]
- pub span: Span,
- pub width: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_float_literal_suffix)]
-#[help]
-pub(crate) struct InvalidFloatLiteralSuffix {
- #[primary_span]
- #[label]
- pub span: Span,
- pub suffix: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_int_literal_too_large)]
-pub(crate) struct IntLiteralTooLarge {
+#[diag(parse_missing_semicolon_before_array)]
+pub(crate) struct MissingSemicolonBeforeArray {
#[primary_span]
- pub span: Span,
+ pub open_delim: Span,
+ #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = ";")]
+ pub semicolon: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_missing_semicolon_before_array)]
-pub(crate) struct MissingSemicolonBeforeArray {
+#[diag(parse_expect_dotdot_not_dotdotdot)]
+pub(crate) struct MissingDotDot {
#[primary_span]
- pub open_delim: Span,
- #[suggestion_verbose(applicability = "maybe-incorrect", code = ";")]
- pub semicolon: Span,
+ pub token_span: Span,
+ #[suggestion(applicability = "maybe-incorrect", code = "..", style = "verbose")]
+ pub sugg_span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_block_macro_segment)]
+#[diag(parse_invalid_block_macro_segment)]
pub(crate) struct InvalidBlockMacroSegment {
#[primary_span]
pub span: Span,
@@ -368,7 +332,7 @@ pub(crate) struct InvalidBlockMacroSegment {
}
#[derive(Diagnostic)]
-#[diag(parser_if_expression_missing_then_block)]
+#[diag(parse_if_expression_missing_then_block)]
pub(crate) struct IfExpressionMissingThenBlock {
#[primary_span]
pub if_span: Span,
@@ -385,7 +349,7 @@ pub(crate) enum IfExpressionMissingThenBlockSub {
}
#[derive(Diagnostic)]
-#[diag(parser_if_expression_missing_condition)]
+#[diag(parse_if_expression_missing_condition)]
pub(crate) struct IfExpressionMissingCondition {
#[primary_span]
#[label(condition_label)]
@@ -395,14 +359,23 @@ pub(crate) struct IfExpressionMissingCondition {
}
#[derive(Diagnostic)]
-#[diag(parser_expected_expression_found_let)]
+#[diag(parse_expected_expression_found_let)]
pub(crate) struct ExpectedExpressionFoundLet {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_expected_else_block)]
+#[diag(parse_expect_eq_instead_of_eqeq)]
+pub(crate) struct ExpectedEqForLetExpr {
+ #[primary_span]
+ pub span: Span,
+ #[suggestion(applicability = "maybe-incorrect", code = "=", style = "verbose")]
+ pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_else_block)]
pub(crate) struct ExpectedElseBlock {
#[primary_span]
pub first_tok_span: Span,
@@ -414,7 +387,7 @@ pub(crate) struct ExpectedElseBlock {
}
#[derive(Diagnostic)]
-#[diag(parser_outer_attribute_not_allowed_on_if_else)]
+#[diag(parse_outer_attribute_not_allowed_on_if_else)]
pub(crate) struct OuterAttributeNotAllowedOnIfElse {
#[primary_span]
pub last: Span,
@@ -431,7 +404,7 @@ pub(crate) struct OuterAttributeNotAllowedOnIfElse {
}
#[derive(Diagnostic)]
-#[diag(parser_missing_in_in_for_loop)]
+#[diag(parse_missing_in_in_for_loop)]
pub(crate) struct MissingInInForLoop {
#[primary_span]
pub span: Span,
@@ -442,14 +415,14 @@ pub(crate) struct MissingInInForLoop {
#[derive(Subdiagnostic)]
pub(crate) enum MissingInInForLoopSub {
// Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
- #[suggestion_short(use_in_not_of, applicability = "maybe-incorrect", code = "in")]
+ #[suggestion(use_in_not_of, style = "short", applicability = "maybe-incorrect", code = "in")]
InNotOf(#[primary_span] Span),
- #[suggestion_short(add_in, applicability = "maybe-incorrect", code = " in ")]
+ #[suggestion(add_in, style = "short", applicability = "maybe-incorrect", code = " in ")]
AddIn(#[primary_span] Span),
}
#[derive(Diagnostic)]
-#[diag(parser_missing_comma_after_match_arm)]
+#[diag(parse_missing_comma_after_match_arm)]
pub(crate) struct MissingCommaAfterMatchArm {
#[primary_span]
#[suggestion(applicability = "machine-applicable", code = ",")]
@@ -457,7 +430,7 @@ pub(crate) struct MissingCommaAfterMatchArm {
}
#[derive(Diagnostic)]
-#[diag(parser_catch_after_try)]
+#[diag(parse_catch_after_try)]
#[help]
pub(crate) struct CatchAfterTry {
#[primary_span]
@@ -465,17 +438,17 @@ pub(crate) struct CatchAfterTry {
}
#[derive(Diagnostic)]
-#[diag(parser_comma_after_base_struct)]
+#[diag(parse_comma_after_base_struct)]
#[note]
pub(crate) struct CommaAfterBaseStruct {
#[primary_span]
pub span: Span,
- #[suggestion_short(applicability = "machine-applicable", code = "")]
+ #[suggestion(style = "short", applicability = "machine-applicable", code = "")]
pub comma: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_eq_field_init)]
+#[diag(parse_eq_field_init)]
pub(crate) struct EqFieldInit {
#[primary_span]
pub span: Span,
@@ -484,7 +457,7 @@ pub(crate) struct EqFieldInit {
}
#[derive(Diagnostic)]
-#[diag(parser_dotdotdot)]
+#[diag(parse_dotdotdot)]
pub(crate) struct DotDotDot {
#[primary_span]
#[suggestion(suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
@@ -493,7 +466,7 @@ pub(crate) struct DotDotDot {
}
#[derive(Diagnostic)]
-#[diag(parser_left_arrow_operator)]
+#[diag(parse_left_arrow_operator)]
pub(crate) struct LeftArrowOperator {
#[primary_span]
#[suggestion(applicability = "maybe-incorrect", code = "< -")]
@@ -501,7 +474,7 @@ pub(crate) struct LeftArrowOperator {
}
#[derive(Diagnostic)]
-#[diag(parser_remove_let)]
+#[diag(parse_remove_let)]
pub(crate) struct RemoveLet {
#[primary_span]
#[suggestion(applicability = "machine-applicable", code = "")]
@@ -509,23 +482,23 @@ pub(crate) struct RemoveLet {
}
#[derive(Diagnostic)]
-#[diag(parser_use_eq_instead)]
+#[diag(parse_use_eq_instead)]
pub(crate) struct UseEqInstead {
#[primary_span]
- #[suggestion_short(applicability = "machine-applicable", code = "=")]
+ #[suggestion(style = "short", applicability = "machine-applicable", code = "=")]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_use_empty_block_not_semi)]
+#[diag(parse_use_empty_block_not_semi)]
pub(crate) struct UseEmptyBlockNotSemi {
#[primary_span]
- #[suggestion_hidden(applicability = "machine-applicable", code = "{{}}")]
+ #[suggestion(style = "hidden", applicability = "machine-applicable", code = "{{}}")]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_comparison_interpreted_as_generic)]
+#[diag(parse_comparison_interpreted_as_generic)]
pub(crate) struct ComparisonInterpretedAsGeneric {
#[primary_span]
#[label(label_comparison)]
@@ -538,7 +511,7 @@ pub(crate) struct ComparisonInterpretedAsGeneric {
}
#[derive(Diagnostic)]
-#[diag(parser_shift_interpreted_as_generic)]
+#[diag(parse_shift_interpreted_as_generic)]
pub(crate) struct ShiftInterpretedAsGeneric {
#[primary_span]
#[label(label_comparison)]
@@ -560,7 +533,7 @@ pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_found_expr_would_be_stmt)]
+#[diag(parse_found_expr_would_be_stmt)]
pub(crate) struct FoundExprWouldBeStmt {
#[primary_span]
#[label]
@@ -571,19 +544,24 @@ pub(crate) struct FoundExprWouldBeStmt {
}
#[derive(Diagnostic)]
-#[diag(parser_leading_plus_not_supported)]
+#[diag(parse_leading_plus_not_supported)]
pub(crate) struct LeadingPlusNotSupported {
#[primary_span]
#[label]
pub span: Span,
- #[suggestion_verbose(suggestion_remove_plus, code = "", applicability = "machine-applicable")]
+ #[suggestion(
+ suggestion_remove_plus,
+ style = "verbose",
+ code = "",
+ applicability = "machine-applicable"
+ )]
pub remove_plus: Option<Span>,
#[subdiagnostic]
pub add_parentheses: Option<ExprParenthesesNeeded>,
}
#[derive(Diagnostic)]
-#[diag(parser_parentheses_with_struct_fields)]
+#[diag(parse_parentheses_with_struct_fields)]
pub(crate) struct ParenthesesWithStructFields {
#[primary_span]
pub span: Span,
@@ -611,7 +589,7 @@ pub(crate) struct NoFieldsForFnCall {
}
#[derive(Diagnostic)]
-#[diag(parser_labeled_loop_in_break)]
+#[diag(parse_labeled_loop_in_break)]
pub(crate) struct LabeledLoopInBreak {
#[primary_span]
pub span: Span,
@@ -621,7 +599,7 @@ pub(crate) struct LabeledLoopInBreak {
#[derive(Subdiagnostic)]
#[multipart_suggestion(
- parser_sugg_wrap_expression_in_parentheses,
+ parse_sugg_wrap_expression_in_parentheses,
applicability = "machine-applicable"
)]
pub(crate) struct WrapExpressionInParentheses {
@@ -632,7 +610,7 @@ pub(crate) struct WrapExpressionInParentheses {
}
#[derive(Diagnostic)]
-#[diag(parser_array_brackets_instead_of_braces)]
+#[diag(parse_array_brackets_instead_of_braces)]
pub(crate) struct ArrayBracketsInsteadOfSpaces {
#[primary_span]
pub span: Span,
@@ -650,7 +628,7 @@ pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_match_arm_body_without_braces)]
+#[diag(parse_match_arm_body_without_braces)]
pub(crate) struct MatchArmBodyWithoutBraces {
#[primary_span]
#[label(label_statements)]
@@ -683,7 +661,7 @@ pub(crate) enum MatchArmBodyWithoutBracesSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_struct_literal_not_allowed_here)]
+#[diag(parse_struct_literal_not_allowed_here)]
pub(crate) struct StructLiteralNotAllowedHere {
#[primary_span]
pub span: Span,
@@ -701,49 +679,14 @@ pub(crate) struct StructLiteralNotAllowedHereSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_interpolated_expression)]
+#[diag(parse_invalid_interpolated_expression)]
pub(crate) struct InvalidInterpolatedExpression {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_hexadecimal_float_literal_not_supported)]
-pub(crate) struct HexadecimalFloatLiteralNotSupported {
- #[primary_span]
- #[label(parser_not_supported)]
- pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_octal_float_literal_not_supported)]
-pub(crate) struct OctalFloatLiteralNotSupported {
- #[primary_span]
- #[label(parser_not_supported)]
- pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_binary_float_literal_not_supported)]
-pub(crate) struct BinaryFloatLiteralNotSupported {
- #[primary_span]
- #[label(parser_not_supported)]
- pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_literal_suffix)]
-pub(crate) struct InvalidLiteralSuffix {
- #[primary_span]
- #[label]
- pub span: Span,
- // FIXME(#100717)
- pub kind: String,
- pub suffix: Symbol,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_literal_suffix_on_tuple_index)]
+#[diag(parse_invalid_literal_suffix_on_tuple_index)]
pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
#[primary_span]
#[label]
@@ -756,7 +699,7 @@ pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
}
#[derive(Diagnostic)]
-#[diag(parser_non_string_abi_literal)]
+#[diag(parse_non_string_abi_literal)]
pub(crate) struct NonStringAbiLiteral {
#[primary_span]
#[suggestion(code = "\"C\"", applicability = "maybe-incorrect")]
@@ -764,7 +707,7 @@ pub(crate) struct NonStringAbiLiteral {
}
#[derive(Diagnostic)]
-#[diag(parser_mismatched_closing_delimiter)]
+#[diag(parse_mismatched_closing_delimiter)]
pub(crate) struct MismatchedClosingDelimiter {
#[primary_span]
pub spans: Vec<Span>,
@@ -778,7 +721,7 @@ pub(crate) struct MismatchedClosingDelimiter {
}
#[derive(Diagnostic)]
-#[diag(parser_incorrect_visibility_restriction, code = "E0704")]
+#[diag(parse_incorrect_visibility_restriction, code = "E0704")]
#[help]
pub(crate) struct IncorrectVisibilityRestriction {
#[primary_span]
@@ -788,21 +731,21 @@ pub(crate) struct IncorrectVisibilityRestriction {
}
#[derive(Diagnostic)]
-#[diag(parser_assignment_else_not_allowed)]
+#[diag(parse_assignment_else_not_allowed)]
pub(crate) struct AssignmentElseNotAllowed {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_expected_statement_after_outer_attr)]
+#[diag(parse_expected_statement_after_outer_attr)]
pub(crate) struct ExpectedStatementAfterOuterAttr {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_doc_comment_does_not_document_anything, code = "E0585")]
+#[diag(parse_doc_comment_does_not_document_anything, code = "E0585")]
#[help]
pub(crate) struct DocCommentDoesNotDocumentAnything {
#[primary_span]
@@ -812,7 +755,7 @@ pub(crate) struct DocCommentDoesNotDocumentAnything {
}
#[derive(Diagnostic)]
-#[diag(parser_const_let_mutually_exclusive)]
+#[diag(parse_const_let_mutually_exclusive)]
pub(crate) struct ConstLetMutuallyExclusive {
#[primary_span]
#[suggestion(code = "const", applicability = "maybe-incorrect")]
@@ -820,7 +763,7 @@ pub(crate) struct ConstLetMutuallyExclusive {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_expression_in_let_else)]
+#[diag(parse_invalid_expression_in_let_else)]
pub(crate) struct InvalidExpressionInLetElse {
#[primary_span]
pub span: Span,
@@ -830,7 +773,7 @@ pub(crate) struct InvalidExpressionInLetElse {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_curly_in_let_else)]
+#[diag(parse_invalid_curly_in_let_else)]
pub(crate) struct InvalidCurlyInLetElse {
#[primary_span]
pub span: Span,
@@ -839,16 +782,16 @@ pub(crate) struct InvalidCurlyInLetElse {
}
#[derive(Diagnostic)]
-#[diag(parser_compound_assignment_expression_in_let)]
+#[diag(parse_compound_assignment_expression_in_let)]
#[help]
pub(crate) struct CompoundAssignmentExpressionInLet {
#[primary_span]
- #[suggestion_short(code = "=", applicability = "maybe-incorrect")]
+ #[suggestion(style = "short", code = "=", applicability = "maybe-incorrect")]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_suffixed_literal_in_attribute)]
+#[diag(parse_suffixed_literal_in_attribute)]
#[help]
pub(crate) struct SuffixedLiteralInAttribute {
#[primary_span]
@@ -856,7 +799,7 @@ pub(crate) struct SuffixedLiteralInAttribute {
}
#[derive(Diagnostic)]
-#[diag(parser_invalid_meta_item)]
+#[diag(parse_invalid_meta_item)]
pub(crate) struct InvalidMetaItem {
#[primary_span]
pub span: Span,
@@ -864,8 +807,9 @@ pub(crate) struct InvalidMetaItem {
}
#[derive(Subdiagnostic)]
-#[suggestion_verbose(
- parser_sugg_escape_to_use_as_identifier,
+#[suggestion(
+ parse_sugg_escape_to_use_as_identifier,
+ style = "verbose",
applicability = "maybe-incorrect",
code = "r#"
)]
@@ -876,7 +820,7 @@ pub(crate) struct SuggEscapeToUseAsIdentifier {
}
#[derive(Subdiagnostic)]
-#[suggestion(parser_sugg_remove_comma, applicability = "machine-applicable", code = "")]
+#[suggestion(parse_sugg_remove_comma, applicability = "machine-applicable", code = "")]
pub(crate) struct SuggRemoveComma {
#[primary_span]
pub span: Span,
@@ -884,15 +828,15 @@ pub(crate) struct SuggRemoveComma {
#[derive(Subdiagnostic)]
pub(crate) enum ExpectedIdentifierFound {
- #[label(parser_expected_identifier_found_reserved_identifier)]
+ #[label(parse_expected_identifier_found_reserved_identifier)]
ReservedIdentifier(#[primary_span] Span),
- #[label(parser_expected_identifier_found_keyword)]
+ #[label(parse_expected_identifier_found_keyword)]
Keyword(#[primary_span] Span),
- #[label(parser_expected_identifier_found_reserved_keyword)]
+ #[label(parse_expected_identifier_found_reserved_keyword)]
ReservedKeyword(#[primary_span] Span),
- #[label(parser_expected_identifier_found_doc_comment)]
+ #[label(parse_expected_identifier_found_doc_comment)]
DocComment(#[primary_span] Span),
- #[label(parser_expected_identifier)]
+ #[label(parse_expected_identifier)]
Other(#[primary_span] Span),
}
@@ -918,6 +862,7 @@ pub(crate) struct ExpectedIdentifier {
}
impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'a rustc_errors::Handler,
@@ -926,16 +871,16 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
let mut diag = handler.struct_diagnostic(match token_descr {
Some(TokenDescription::ReservedIdentifier) => {
- fluent::parser_expected_identifier_found_reserved_identifier_str
+ fluent::parse_expected_identifier_found_reserved_identifier_str
}
- Some(TokenDescription::Keyword) => fluent::parser_expected_identifier_found_keyword_str,
+ Some(TokenDescription::Keyword) => fluent::parse_expected_identifier_found_keyword_str,
Some(TokenDescription::ReservedKeyword) => {
- fluent::parser_expected_identifier_found_reserved_keyword_str
+ fluent::parse_expected_identifier_found_reserved_keyword_str
}
Some(TokenDescription::DocComment) => {
- fluent::parser_expected_identifier_found_doc_comment_str
+ fluent::parse_expected_identifier_found_doc_comment_str
}
- None => fluent::parser_expected_identifier_found_str,
+ None => fluent::parse_expected_identifier_found_str,
});
diag.set_span(self.span);
diag.set_arg("token", self.token);
@@ -963,6 +908,7 @@ pub(crate) struct ExpectedSemi {
}
impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'a rustc_errors::Handler,
@@ -971,22 +917,20 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
let mut diag = handler.struct_diagnostic(match token_descr {
Some(TokenDescription::ReservedIdentifier) => {
- fluent::parser_expected_semi_found_reserved_identifier_str
+ fluent::parse_expected_semi_found_reserved_identifier_str
}
- Some(TokenDescription::Keyword) => fluent::parser_expected_semi_found_keyword_str,
+ Some(TokenDescription::Keyword) => fluent::parse_expected_semi_found_keyword_str,
Some(TokenDescription::ReservedKeyword) => {
- fluent::parser_expected_semi_found_reserved_keyword_str
- }
- Some(TokenDescription::DocComment) => {
- fluent::parser_expected_semi_found_doc_comment_str
+ fluent::parse_expected_semi_found_reserved_keyword_str
}
- None => fluent::parser_expected_semi_found_str,
+ Some(TokenDescription::DocComment) => fluent::parse_expected_semi_found_doc_comment_str,
+ None => fluent::parse_expected_semi_found_str,
});
diag.set_span(self.span);
diag.set_arg("token", self.token);
if let Some(unexpected_token_label) = self.unexpected_token_label {
- diag.span_label(unexpected_token_label, fluent::parser_label_unexpected_token);
+ diag.span_label(unexpected_token_label, fluent::parse_label_unexpected_token);
}
self.sugg.add_to_diagnostic(&mut diag);
@@ -997,18 +941,19 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
#[derive(Subdiagnostic)]
pub(crate) enum ExpectedSemiSugg {
+ #[suggestion(parse_sugg_change_this_to_semi, code = ";", applicability = "machine-applicable")]
+ ChangeToSemi(#[primary_span] Span),
#[suggestion(
- parser_sugg_change_this_to_semi,
+ parse_sugg_add_semi,
+ style = "short",
code = ";",
applicability = "machine-applicable"
)]
- ChangeToSemi(#[primary_span] Span),
- #[suggestion_short(parser_sugg_add_semi, code = ";", applicability = "machine-applicable")]
AddSemi(#[primary_span] Span),
}
#[derive(Diagnostic)]
-#[diag(parser_struct_literal_body_without_path)]
+#[diag(parse_struct_literal_body_without_path)]
pub(crate) struct StructLiteralBodyWithoutPath {
#[primary_span]
pub span: Span,
@@ -1026,7 +971,7 @@ pub(crate) struct StructLiteralBodyWithoutPathSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_unmatched_angle_brackets)]
+#[diag(parse_unmatched_angle_brackets)]
pub(crate) struct UnmatchedAngleBrackets {
#[primary_span]
#[suggestion(code = "", applicability = "machine-applicable")]
@@ -1035,7 +980,7 @@ pub(crate) struct UnmatchedAngleBrackets {
}
#[derive(Diagnostic)]
-#[diag(parser_generic_parameters_without_angle_brackets)]
+#[diag(parse_generic_parameters_without_angle_brackets)]
pub(crate) struct GenericParamsWithoutAngleBrackets {
#[primary_span]
pub span: Span,
@@ -1053,17 +998,18 @@ pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_comparison_operators_cannot_be_chained)]
+#[diag(parse_comparison_operators_cannot_be_chained)]
pub(crate) struct ComparisonOperatorsCannotBeChained {
#[primary_span]
pub span: Vec<Span>,
- #[suggestion_verbose(
- parser_sugg_turbofish_syntax,
+ #[suggestion(
+ parse_sugg_turbofish_syntax,
+ style = "verbose",
code = "::",
applicability = "maybe-incorrect"
)]
pub suggest_turbofish: Option<Span>,
- #[help(parser_sugg_turbofish_syntax)]
+ #[help(parse_sugg_turbofish_syntax)]
#[help(sugg_parentheses_for_function_args)]
pub help_turbofish: Option<()>,
#[subdiagnostic]
@@ -1072,8 +1018,9 @@ pub(crate) struct ComparisonOperatorsCannotBeChained {
#[derive(Subdiagnostic)]
pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
- #[suggestion_verbose(
+ #[suggestion(
sugg_split_comparison,
+ style = "verbose",
code = " && {middle_term}",
applicability = "maybe-incorrect"
)]
@@ -1092,7 +1039,7 @@ pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_question_mark_in_type)]
+#[diag(parse_question_mark_in_type)]
pub(crate) struct QuestionMarkInType {
#[primary_span]
#[label]
@@ -1111,7 +1058,7 @@ pub(crate) struct QuestionMarkInTypeSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_unexpected_parentheses_in_for_head)]
+#[diag(parse_unexpected_parentheses_in_for_head)]
pub(crate) struct ParenthesesInForHead {
#[primary_span]
pub span: Vec<Span>,
@@ -1122,14 +1069,16 @@ pub(crate) struct ParenthesesInForHead {
#[derive(Subdiagnostic)]
#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
pub(crate) struct ParenthesesInForHeadSugg {
- #[suggestion_part(code = "")]
+ #[suggestion_part(code = "{left_snippet}")]
pub left: Span,
- #[suggestion_part(code = "")]
+ pub left_snippet: String,
+ #[suggestion_part(code = "{right_snippet}")]
pub right: Span,
+ pub right_snippet: String,
}
#[derive(Diagnostic)]
-#[diag(parser_doc_comment_on_param_type)]
+#[diag(parse_doc_comment_on_param_type)]
pub(crate) struct DocCommentOnParamType {
#[primary_span]
#[label]
@@ -1137,7 +1086,7 @@ pub(crate) struct DocCommentOnParamType {
}
#[derive(Diagnostic)]
-#[diag(parser_attribute_on_param_type)]
+#[diag(parse_attribute_on_param_type)]
pub(crate) struct AttributeOnParamType {
#[primary_span]
#[label]
@@ -1145,7 +1094,7 @@ pub(crate) struct AttributeOnParamType {
}
#[derive(Diagnostic)]
-#[diag(parser_pattern_method_param_without_body, code = "E0642")]
+#[diag(parse_pattern_method_param_without_body, code = "E0642")]
pub(crate) struct PatternMethodParamWithoutBody {
#[primary_span]
#[suggestion(code = "_", applicability = "machine-applicable")]
@@ -1153,7 +1102,7 @@ pub(crate) struct PatternMethodParamWithoutBody {
}
#[derive(Diagnostic)]
-#[diag(parser_self_param_not_first)]
+#[diag(parse_self_param_not_first)]
pub(crate) struct SelfParamNotFirst {
#[primary_span]
#[label]
@@ -1161,7 +1110,15 @@ pub(crate) struct SelfParamNotFirst {
}
#[derive(Diagnostic)]
-#[diag(parser_const_generic_without_braces)]
+#[diag(parse_invalid_identifier_with_leading_number)]
+pub(crate) struct InvalidIdentiferStartsWithNumber {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_generic_without_braces)]
pub(crate) struct ConstGenericWithoutBraces {
#[primary_span]
pub span: Span,
@@ -1179,7 +1136,7 @@ pub(crate) struct ConstGenericWithoutBracesSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_unexpected_const_param_declaration)]
+#[diag(parse_unexpected_const_param_declaration)]
pub(crate) struct UnexpectedConstParamDeclaration {
#[primary_span]
#[label]
@@ -1211,27 +1168,56 @@ pub(crate) enum UnexpectedConstParamDeclarationSugg {
}
#[derive(Diagnostic)]
-#[diag(parser_unexpected_const_in_generic_param)]
+#[diag(parse_unexpected_const_in_generic_param)]
pub(crate) struct UnexpectedConstInGenericParam {
#[primary_span]
pub span: Span,
- #[suggestion_verbose(code = "", applicability = "maybe-incorrect")]
+ #[suggestion(style = "verbose", code = "", applicability = "maybe-incorrect")]
pub to_remove: Option<Span>,
}
#[derive(Diagnostic)]
-#[diag(parser_async_move_order_incorrect)]
+#[diag(parse_async_move_order_incorrect)]
pub(crate) struct AsyncMoveOrderIncorrect {
#[primary_span]
- #[suggestion_verbose(code = "async move", applicability = "maybe-incorrect")]
+ #[suggestion(style = "verbose", code = "async move", applicability = "maybe-incorrect")]
pub span: Span,
}
#[derive(Diagnostic)]
-#[diag(parser_double_colon_in_bound)]
+#[diag(parse_double_colon_in_bound)]
pub(crate) struct DoubleColonInBound {
#[primary_span]
pub span: Span,
#[suggestion(code = ": ", applicability = "machine-applicable")]
pub between: Span,
}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_ptr_with_generics)]
+pub(crate) struct FnPtrWithGenerics {
+ #[primary_span]
+ pub span: Span,
+ #[subdiagnostic]
+ pub sugg: Option<FnPtrWithGenericsSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct FnPtrWithGenericsSugg {
+ #[suggestion_part(code = "{snippet}")]
+ pub left: Span,
+ pub snippet: String,
+ #[suggestion_part(code = "")]
+ pub right: Span,
+ pub arity: usize,
+ pub for_param_list_exists: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_if_with_if)]
+pub(crate) struct UnexpectedIfWithIf(
+ #[primary_span]
+ #[suggestion(applicability = "machine-applicable", code = " ", style = "verbose")]
+ pub Span,
+);
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 462bce16a..f027843e6 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -175,20 +175,10 @@ impl<'a> StringReader<'a> {
if string == "_" {
self.sess
.span_diagnostic
- .struct_span_warn(
+ .struct_span_err(
self.mk_sp(suffix_start, self.pos),
"underscore literal suffix is not allowed",
)
- .warn(
- "this was previously accepted by the compiler but is \
- being phased out; it will become a hard error in \
- a future release!",
- )
- .note(
- "see issue #42326 \
- <https://github.com/rust-lang/rust/issues/42326> \
- for more information",
- )
.emit();
None
} else {
@@ -363,55 +353,55 @@ impl<'a> StringReader<'a> {
fn cook_lexer_literal(
&self,
start: BytePos,
- suffix_start: BytePos,
+ end: BytePos,
kind: rustc_lexer::LiteralKind,
) -> (token::LitKind, Symbol) {
- // prefix means `"` or `br"` or `r###"`, ...
- let (lit_kind, mode, prefix_len, postfix_len) = match kind {
+ match kind {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
self.sess.span_diagnostic.span_fatal_with_code(
- self.mk_sp(start, suffix_start),
+ self.mk_sp(start, end),
"unterminated character literal",
error_code!(E0762),
)
}
- (token::Char, Mode::Char, 1, 1) // ' '
+ self.cook_quoted(token::Char, Mode::Char, start, end, 1, 1) // ' '
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
self.sess.span_diagnostic.span_fatal_with_code(
- self.mk_sp(start + BytePos(1), suffix_start),
+ self.mk_sp(start + BytePos(1), end),
"unterminated byte constant",
error_code!(E0763),
)
}
- (token::Byte, Mode::Byte, 2, 1) // b' '
+ self.cook_quoted(token::Byte, Mode::Byte, start, end, 2, 1) // b' '
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
self.sess.span_diagnostic.span_fatal_with_code(
- self.mk_sp(start, suffix_start),
+ self.mk_sp(start, end),
"unterminated double quote string",
error_code!(E0765),
)
}
- (token::Str, Mode::Str, 1, 1) // " "
+ self.cook_quoted(token::Str, Mode::Str, start, end, 1, 1) // " "
}
rustc_lexer::LiteralKind::ByteStr { terminated } => {
if !terminated {
self.sess.span_diagnostic.span_fatal_with_code(
- self.mk_sp(start + BytePos(1), suffix_start),
+ self.mk_sp(start + BytePos(1), end),
"unterminated double quote byte string",
error_code!(E0766),
)
}
- (token::ByteStr, Mode::ByteStr, 2, 1) // b" "
+ self.cook_quoted(token::ByteStr, Mode::ByteStr, start, end, 2, 1) // b" "
}
rustc_lexer::LiteralKind::RawStr { n_hashes } => {
if let Some(n_hashes) = n_hashes {
let n = u32::from(n_hashes);
- (token::StrRaw(n_hashes), Mode::RawStr, 2 + n, 1 + n) // r##" "##
+ let kind = token::StrRaw(n_hashes);
+ self.cook_quoted(kind, Mode::RawStr, start, end, 2 + n, 1 + n) // r##" "##
} else {
self.report_raw_str_error(start, 1);
}
@@ -419,56 +409,59 @@ impl<'a> StringReader<'a> {
rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
if let Some(n_hashes) = n_hashes {
let n = u32::from(n_hashes);
- (token::ByteStrRaw(n_hashes), Mode::RawByteStr, 3 + n, 1 + n) // br##" "##
+ let kind = token::ByteStrRaw(n_hashes);
+ self.cook_quoted(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n) // br##" "##
} else {
self.report_raw_str_error(start, 2);
}
}
rustc_lexer::LiteralKind::Int { base, empty_int } => {
- return if empty_int {
+ if empty_int {
self.sess
.span_diagnostic
.struct_span_err_with_code(
- self.mk_sp(start, suffix_start),
+ self.mk_sp(start, end),
"no valid digits found for number",
error_code!(E0768),
)
.emit();
(token::Integer, sym::integer(0))
} else {
- self.validate_int_literal(base, start, suffix_start);
- (token::Integer, self.symbol_from_to(start, suffix_start))
- };
+ if matches!(base, Base::Binary | Base::Octal) {
+ let base = base as u32;
+ let s = self.str_from_to(start + BytePos(2), end);
+ for (idx, c) in s.char_indices() {
+ if c != '_' && c.to_digit(base).is_none() {
+ self.err_span_(
+ start + BytePos::from_usize(2 + idx),
+ start + BytePos::from_usize(2 + idx + c.len_utf8()),
+ &format!("invalid digit for a base {} literal", base),
+ );
+ }
+ }
+ }
+ (token::Integer, self.symbol_from_to(start, end))
+ }
}
rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
if empty_exponent {
self.err_span_(start, self.pos, "expected at least one digit in exponent");
}
-
match base {
- Base::Hexadecimal => self.err_span_(
- start,
- suffix_start,
- "hexadecimal float literal is not supported",
- ),
+ Base::Hexadecimal => {
+ self.err_span_(start, end, "hexadecimal float literal is not supported")
+ }
Base::Octal => {
- self.err_span_(start, suffix_start, "octal float literal is not supported")
+ self.err_span_(start, end, "octal float literal is not supported")
}
Base::Binary => {
- self.err_span_(start, suffix_start, "binary float literal is not supported")
+ self.err_span_(start, end, "binary float literal is not supported")
}
- _ => (),
+ _ => {}
}
-
- let id = self.symbol_from_to(start, suffix_start);
- return (token::Float, id);
+ (token::Float, self.symbol_from_to(start, end))
}
- };
- let content_start = start + BytePos(prefix_len);
- let content_end = suffix_start - BytePos(postfix_len);
- let id = self.symbol_from_to(content_start, content_end);
- self.validate_literal_escape(mode, content_start, content_end, prefix_len, postfix_len);
- (lit_kind, id)
+ }
}
#[inline]
@@ -659,24 +652,30 @@ impl<'a> StringReader<'a> {
)
}
- fn validate_literal_escape(
+ fn cook_quoted(
&self,
+ kind: token::LitKind,
mode: Mode,
- content_start: BytePos,
- content_end: BytePos,
+ start: BytePos,
+ end: BytePos,
prefix_len: u32,
postfix_len: u32,
- ) {
+ ) -> (token::LitKind, Symbol) {
+ let mut has_fatal_err = false;
+ let content_start = start + BytePos(prefix_len);
+ let content_end = end - BytePos(postfix_len);
let lit_content = self.str_from_to(content_start, content_end);
unescape::unescape_literal(lit_content, mode, &mut |range, result| {
// Here we only check for errors. The actual unescaping is done later.
if let Err(err) = result {
- let span_with_quotes = self
- .mk_sp(content_start - BytePos(prefix_len), content_end + BytePos(postfix_len));
+ let span_with_quotes = self.mk_sp(start, end);
let (start, end) = (range.start as u32, range.end as u32);
let lo = content_start + BytePos(start);
let hi = lo + BytePos(end - start);
let span = self.mk_sp(lo, hi);
+ if err.is_fatal() {
+ has_fatal_err = true;
+ }
emit_unescape_error(
&self.sess.span_diagnostic,
lit_content,
@@ -688,22 +687,13 @@ impl<'a> StringReader<'a> {
);
}
});
- }
- fn validate_int_literal(&self, base: Base, content_start: BytePos, content_end: BytePos) {
- let base = match base {
- Base::Binary => 2,
- Base::Octal => 8,
- _ => return,
- };
- let s = self.str_from_to(content_start + BytePos(2), content_end);
- for (idx, c) in s.char_indices() {
- let idx = idx as u32;
- if c != '_' && c.to_digit(base).is_none() {
- let lo = content_start + BytePos(2 + idx);
- let hi = content_start + BytePos(2 + idx + c.len_utf8() as u32);
- self.err_span_(lo, hi, &format!("invalid digit for a base {} literal", base));
- }
+ // We normally exclude the quotes for the symbol, but for errors we
+ // include it because it results in clearer error messages.
+ if !has_fatal_err {
+ (kind, Symbol::intern(lit_content))
+ } else {
+ (token::Err, self.symbol_from_to(start, end))
}
}
}
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index f075de714..6373f5b4f 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -108,7 +108,7 @@ pub(crate) fn emit_unescape_error(
}
if !has_help {
- let (prefix, msg) = if mode.is_bytes() {
+ let (prefix, msg) = if mode.is_byte() {
("b", "if you meant to write a byte string literal, use double quotes")
} else {
("", "if you meant to write a `str` literal, use double quotes")
@@ -142,7 +142,7 @@ pub(crate) fn emit_unescape_error(
EscapeError::EscapeOnlyChar => {
let (c, char_span) = last_char();
- let msg = if mode.is_bytes() {
+ let msg = if mode.is_byte() {
"byte constant must be escaped"
} else {
"character constant must be escaped"
@@ -182,11 +182,11 @@ pub(crate) fn emit_unescape_error(
let (c, span) = last_char();
let label =
- if mode.is_bytes() { "unknown byte escape" } else { "unknown character escape" };
+ if mode.is_byte() { "unknown byte escape" } else { "unknown character escape" };
let ec = escaped_char(c);
let mut diag = handler.struct_span_err(span, &format!("{}: `{}`", label, ec));
diag.span_label(span, label);
- if c == '{' || c == '}' && !mode.is_bytes() {
+ if c == '{' || c == '}' && !mode.is_byte() {
diag.help(
"if used in a formatting string, curly braces are escaped with `{{` and `}}`",
);
@@ -196,7 +196,7 @@ pub(crate) fn emit_unescape_error(
version control settings",
);
} else {
- if !mode.is_bytes() {
+ if !mode.is_byte() {
diag.span_suggestion(
span_with_quotes,
"if you meant to write a literal backslash (perhaps escaping in a regular expression), consider a raw string literal",
@@ -231,16 +231,23 @@ pub(crate) fn emit_unescape_error(
.emit();
}
EscapeError::NonAsciiCharInByte => {
- assert!(mode.is_bytes());
let (c, span) = last_char();
- let mut err = handler.struct_span_err(span, "non-ASCII character in byte constant");
+ let desc = match mode {
+ Mode::Byte => "byte literal",
+ Mode::ByteStr => "byte string literal",
+ Mode::RawByteStr => "raw byte string literal",
+ _ => panic!("non-is_byte literal paired with NonAsciiCharInByte"),
+ };
+ let mut err = handler.struct_span_err(span, format!("non-ASCII character in {}", desc));
let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
format!(" but is {:?}", c)
} else {
String::new()
};
- err.span_label(span, &format!("byte constant must be ASCII{}", postfix));
- if (c as u32) <= 0xFF {
+ err.span_label(span, &format!("must be ASCII{}", postfix));
+ // Note: the \\xHH suggestions are not given for raw byte string
+ // literals, because they are araw and so cannot use any escapes.
+ if (c as u32) <= 0xFF && mode != Mode::RawByteStr {
err.span_suggestion(
span,
&format!(
@@ -250,9 +257,9 @@ pub(crate) fn emit_unescape_error(
format!("\\x{:X}", c as u32),
Applicability::MaybeIncorrect,
);
- } else if matches!(mode, Mode::Byte) {
+ } else if mode == Mode::Byte {
err.span_label(span, "this multibyte character does not fit into a single byte");
- } else if matches!(mode, Mode::ByteStr) {
+ } else if mode != Mode::RawByteStr {
let mut utf8 = String::new();
utf8.push(c);
err.span_suggestion(
@@ -270,19 +277,6 @@ pub(crate) fn emit_unescape_error(
}
err.emit();
}
- EscapeError::NonAsciiCharInByteString => {
- assert!(mode.is_bytes());
- let (c, span) = last_char();
- let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
- format!(" but is {:?}", c)
- } else {
- String::new()
- };
- handler
- .struct_span_err(span, "raw byte string must be ASCII")
- .span_label(span, &format!("must be ASCII{}", postfix))
- .emit();
- }
EscapeError::OutOfRangeHexEscape => {
handler
.struct_span_err(span, "out of range hex escape")
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 2c68cc589..f1b50296e 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -1,5 +1,5 @@
-// Characters and their corresponding confusables were collected from
-// https://www.unicode.org/Public/security/10.0.0/confusables.txt
+//! Characters and their corresponding confusables were collected from
+//! <https://www.unicode.org/Public/security/10.0.0/confusables.txt>
use super::StringReader;
use crate::token::{self, Delimiter};
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 3dcadb4c9..b49a01d75 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -3,6 +3,7 @@
#![feature(array_windows)]
#![feature(box_patterns)]
#![feature(if_let_guard)]
+#![feature(iter_intersperse)]
#![feature(let_chains)]
#![feature(never_type)]
#![feature(rustc_attrs)]
@@ -14,8 +15,7 @@ extern crate tracing;
use rustc_ast as ast;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
-use rustc_ast::Attribute;
-use rustc_ast::{AttrItem, MetaItem};
+use rustc_ast::{AttrItem, Attribute, MetaItem};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
@@ -256,10 +256,12 @@ pub fn parse_cfg_attr(
parse_sess: &ParseSess,
) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
match attr.get_normal_item().args {
- ast::MacArgs::Delimited(dspan, delim, ref tts) if !tts.is_empty() => {
+ ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
+ if !tokens.is_empty() =>
+ {
let msg = "wrong `cfg_attr` delimiters";
crate::validate_attr::check_meta_bad_delim(parse_sess, dspan, delim, msg);
- match parse_in(parse_sess, tts.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
+ match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
Ok(r) => return Some(r),
Err(mut e) => {
e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 9e4565694..c7d239b64 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -55,7 +55,7 @@ impl<'a> Parser<'a> {
let span = self.token.span;
let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
span,
- fluent::parser_inner_doc_comment_not_permitted,
+ fluent::parse_inner_doc_comment_not_permitted,
error_code!(E0753),
);
if let Some(replacement_span) = self.annotate_following_item_if_applicable(
@@ -200,7 +200,7 @@ impl<'a> Parser<'a> {
Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
let mut diag = self.struct_span_err(
attr_sp,
- fluent::parser_inner_attr_not_permitted_after_outer_doc_comment,
+ fluent::parse_inner_attr_not_permitted_after_outer_doc_comment,
);
diag.span_label(attr_sp, fluent::label_attr)
.span_label(prev_doc_comment_span, fluent::label_prev_doc_comment);
@@ -209,18 +209,18 @@ impl<'a> Parser<'a> {
Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => {
let mut diag = self.struct_span_err(
attr_sp,
- fluent::parser_inner_attr_not_permitted_after_outer_attr,
+ fluent::parse_inner_attr_not_permitted_after_outer_attr,
);
diag.span_label(attr_sp, fluent::label_attr)
.span_label(prev_outer_attr_sp, fluent::label_prev_attr);
diag
}
Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
- self.struct_span_err(attr_sp, fluent::parser_inner_attr_not_permitted)
+ self.struct_span_err(attr_sp, fluent::parse_inner_attr_not_permitted)
}
};
- diag.note(fluent::parser_inner_attr_explanation);
+ diag.note(fluent::parse_inner_attr_explanation);
if self
.annotate_following_item_if_applicable(
&mut diag,
@@ -229,7 +229,7 @@ impl<'a> Parser<'a> {
)
.is_some()
{
- diag.note(fluent::parser_outer_attr_explanation);
+ diag.note(fluent::parse_outer_attr_explanation);
};
diag.emit();
}
@@ -245,9 +245,9 @@ impl<'a> Parser<'a> {
/// PATH `=` UNSUFFIXED_LIT
/// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
- let item = match self.token.kind {
- token::Interpolated(ref nt) => match **nt {
- Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
+ let item = match &self.token.kind {
+ token::Interpolated(nt) => match &**nt {
+ Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
_ => None,
},
_ => None,
@@ -315,9 +315,10 @@ impl<'a> Parser<'a> {
Ok(attrs)
}
- pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
- let lit = self.parse_lit()?;
- debug!("checking if {:?} is unusuffixed", lit);
+ // Note: must be unsuffixed.
+ pub(crate) fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'a, ast::MetaItemLit> {
+ let lit = self.parse_meta_item_lit()?;
+ debug!("checking if {:?} is unsuffixed", lit);
if !lit.kind.is_unsuffixed() {
self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
@@ -364,9 +365,9 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
/// ```
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
- let nt_meta = match self.token.kind {
- token::Interpolated(ref nt) => match **nt {
- token::NtMeta(ref e) => Some(e.clone()),
+ let nt_meta = match &self.token.kind {
+ token::Interpolated(nt) => match &**nt {
+ token::NtMeta(e) => Some(e.clone()),
_ => None,
},
_ => None,
@@ -391,7 +392,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
- ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+ ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
@@ -403,8 +404,8 @@ impl<'a> Parser<'a> {
/// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
- match self.parse_unsuffixed_lit() {
- Ok(lit) => return Ok(ast::NestedMetaItem::Literal(lit)),
+ match self.parse_unsuffixed_meta_item_lit() {
+ Ok(lit) => return Ok(ast::NestedMetaItem::Lit(lit)),
Err(err) => err.cancel(),
}
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 1b16ecb5e..a084a7010 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -5,7 +5,8 @@ use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spac
use rustc_ast::{self as ast};
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
use rustc_errors::PResult;
-use rustc_span::{sym, Span};
+use rustc_session::parse::ParseSess;
+use rustc_span::{sym, Span, DUMMY_SP};
use std::convert::TryInto;
use std::ops::Range;
@@ -39,12 +40,17 @@ impl AttrWrapper {
pub fn empty() -> AttrWrapper {
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
}
- // FIXME: Delay span bug here?
- pub(crate) fn take_for_recovery(self) -> AttrVec {
+
+ pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec {
+ sess.span_diagnostic.delay_span_bug(
+ self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
+ "AttrVec is taken for recovery but no error is produced",
+ );
+
self.attrs
}
- // Prepend `self.attrs` to `attrs`.
+ /// Prepend `self.attrs` to `attrs`.
// FIXME: require passing an NT to prevent misuse of this method
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
let mut self_attrs = self.attrs;
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 309717350..c316a4dd6 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -18,6 +18,7 @@ use crate::errors::{
};
use crate::lexer::UnmatchedBrace;
+use crate::parser;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
@@ -37,11 +38,10 @@ use rustc_session::errors::ExprParenthesesNeeded;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
-use std::ops::{Deref, DerefMut};
-
use std::mem::take;
-
-use crate::parser;
+use std::ops::{Deref, DerefMut};
+use thin_vec::{thin_vec, ThinVec};
+use tracing::{debug, trace};
/// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident) -> Param {
@@ -65,7 +65,7 @@ pub(super) fn dummy_arg(ident: Ident) -> Param {
pub(super) trait RecoverQPath: Sized + 'static {
const PATH_STYLE: PathStyle = PathStyle::Expr;
fn to_ty(&self) -> Option<P<Ty>>;
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self;
}
impl RecoverQPath for Ty {
@@ -73,7 +73,7 @@ impl RecoverQPath for Ty {
fn to_ty(&self) -> Option<P<Ty>> {
Some(P(self.clone()))
}
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
Self {
span: path.span,
kind: TyKind::Path(qself, path),
@@ -87,7 +87,7 @@ impl RecoverQPath for Pat {
fn to_ty(&self) -> Option<P<Ty>> {
self.to_ty()
}
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
Self {
span: path.span,
kind: PatKind::Path(qself, path),
@@ -101,7 +101,7 @@ impl RecoverQPath for Expr {
fn to_ty(&self) -> Option<P<Ty>> {
self.to_ty()
}
- fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+ fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
Self {
span: path.span,
kind: ExprKind::Path(qself, path),
@@ -224,9 +224,9 @@ impl MultiSugg {
}
}
-// SnapshotParser is used to create a snapshot of the parser
-// without causing duplicate errors being emitted when the `Parser`
-// is dropped.
+/// SnapshotParser is used to create a snapshot of the parser
+/// without causing duplicate errors being emitted when the `Parser`
+/// is dropped.
pub struct SnapshotParser<'a> {
parser: Parser<'a>,
unclosed_delims: Vec<UnmatchedBrace>,
@@ -638,8 +638,11 @@ impl<'a> Parser<'a> {
// field: value,
// }
let mut snapshot = self.create_snapshot_for_diagnostic();
- let path =
- Path { segments: vec![], span: self.prev_token.span.shrink_to_lo(), tokens: None };
+ let path = Path {
+ segments: ThinVec::new(),
+ span: self.prev_token.span.shrink_to_lo(),
+ tokens: None,
+ };
let struct_expr = snapshot.parse_struct_expr(None, path, false);
let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
return Some(match (struct_expr, block_tail) {
@@ -769,6 +772,10 @@ impl<'a> Parser<'a> {
segment: &PathSegment,
end: &[&TokenKind],
) -> bool {
+ if !self.may_recover() {
+ return false;
+ }
+
// This function is intended to be invoked after parsing a path segment where there are two
// cases:
//
@@ -863,6 +870,10 @@ impl<'a> Parser<'a> {
/// Check if a method call with an intended turbofish has been written without surrounding
/// angle brackets.
pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
+ if !self.may_recover() {
+ return;
+ }
+
if token::ModSep == self.token.kind && segment.args.is_none() {
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
@@ -926,7 +937,7 @@ impl<'a> Parser<'a> {
if self.eat(&token::Gt) {
e.span_suggestion_verbose(
binop.span.shrink_to_lo(),
- fluent::parser_sugg_turbofish_syntax,
+ fluent::parse_sugg_turbofish_syntax,
"::",
Applicability::MaybeIncorrect,
)
@@ -962,7 +973,7 @@ impl<'a> Parser<'a> {
inner_op: &Expr,
outer_op: &Spanned<AssocOp>,
) -> bool /* advanced the cursor */ {
- if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
+ if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
if let ExprKind::Field(_, ident) = l1.kind
&& ident.as_str().parse::<i32>().is_err()
&& !matches!(r1.kind, ExprKind::Lit(_))
@@ -1068,8 +1079,8 @@ impl<'a> Parser<'a> {
let mk_err_expr = |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err)));
- match inner_op.kind {
- ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
+ match &inner_op.kind {
+ ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => {
let mut err = ComparisonOperatorsCannotBeChained {
span: vec![op.span, self.prev_token.span],
suggest_turbofish: None,
@@ -1226,8 +1237,8 @@ impl<'a> Parser<'a> {
let bounds = self.parse_generic_bounds(None)?;
let sum_span = ty.span.to(self.prev_token.span);
- let sub = match ty.kind {
- TyKind::Rptr(ref lifetime, ref mut_ty) => {
+ let sub = match &ty.kind {
+ TyKind::Rptr(lifetime, mut_ty) => {
let sum_with_parens = pprust::to_string(|s| {
s.s.word("&");
s.print_opt_lifetime(lifetime);
@@ -1396,6 +1407,10 @@ impl<'a> Parser<'a> {
&mut self,
base: P<T>,
) -> PResult<'a, P<T>> {
+ if !self.may_recover() {
+ return Ok(base);
+ }
+
// Do not add `::` to expected tokens.
if self.token == token::ModSep {
if let Some(ty) = base.to_ty() {
@@ -1414,7 +1429,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<T>> {
self.expect(&token::ModSep)?;
- let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP, tokens: None };
+ let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
path.span = ty_span.to(self.prev_token.span);
@@ -1425,7 +1440,7 @@ impl<'a> Parser<'a> {
});
let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
- Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
+ Ok(P(T::recovered(Some(P(QSelf { ty, path_span, position: 0 })), path)))
}
pub fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
@@ -1641,15 +1656,29 @@ impl<'a> Parser<'a> {
(token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
self.bump();
+ let sm = self.sess.source_map();
+ let left = begin_par_sp;
+ let right = self.prev_token.span;
+ let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left) &&
+ !snip.ends_with(' ') {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
+
+ let right_snippet = if let Ok(snip) = sm.span_to_next_source(right) &&
+ !snip.starts_with(' ') {
+ " ".to_string()
+ } else {
+ "".to_string()
+ };
+
self.sess.emit_err(ParenthesesInForHead {
- span: vec![begin_par_sp, self.prev_token.span],
+ span: vec![left, right],
// With e.g. `for (x) in y)` this would replace `(x) in y)`
// with `x) in y)` which is syntactically invalid.
// However, this is prevented before we get here.
- sugg: ParenthesesInForHeadSugg {
- left: begin_par_sp,
- right: self.prev_token.span,
- },
+ sugg: ParenthesesInForHeadSugg { left, right, left_snippet, right_snippet },
});
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
@@ -2408,7 +2437,7 @@ impl<'a> Parser<'a> {
None,
Path {
span: new_span,
- segments: vec![
+ segments: thin_vec![
PathSegment::from_ident(*old_ident),
PathSegment::from_ident(*ident),
],
@@ -2536,7 +2565,7 @@ impl<'a> Parser<'a> {
if let [a, b] = segments {
let (a_span, b_span) = (a.span(), b.span());
let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
- if self.span_to_snippet(between_span).as_ref().map(|a| &a[..]) == Ok(":: ") {
+ if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
return Err(DoubleColonInBound {
span: path.span.shrink_to_hi(),
between: between_span,
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index a781748ef..f6a6ed379 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -7,46 +7,42 @@ use super::{
};
use crate::errors::{
ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
- BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct,
- ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg,
- DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedExpressionFoundLet,
+ BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct, ComparisonInterpretedAsGeneric,
+ ComparisonOrShiftInterpretedAsGenericSugg, DoCatchSyntaxRemoved, DotDotDot, EqFieldInit,
+ ExpectedElseBlock, ExpectedEqForLetExpr, ExpectedExpressionFoundLet,
FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart, FoundExprWouldBeStmt,
- HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition,
- IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge,
+ IfExpressionMissingCondition, IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub,
InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
- InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth,
- InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex,
- InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix,
- InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
+ InvalidInterpolatedExpression, InvalidLiteralSuffixOnTupleIndex, InvalidLogicalOperator,
+ InvalidLogicalOperatorSub, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
- MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray, NoFieldsForFnCall,
- NotAsNegationOperator, NotAsNegationOperatorSub, OctalFloatLiteralNotSupported,
+ MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
+ NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
- StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
- UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
+ StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedIfWithIf,
+ UnexpectedTokenAfterLabel, UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
};
use crate::maybe_recover_from_interpolated_ty_qpath;
-
use core::mem;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::Spacing;
+use rustc_ast::util::case::Case;
use rustc_ast::util::classify;
-use rustc_ast::util::literal::LitError;
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
use rustc_ast::visit::Visitor;
-use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
+use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID};
use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
-use rustc_ast::{ClosureBinder, StmtKind};
+use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
use rustc_ast_pretty::pprust;
use rustc_errors::{
Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
StashKey,
};
-use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_span::source_map::{self, Span, Spanned};
@@ -132,7 +128,7 @@ impl<'a> Parser<'a> {
Ok(expr) => Ok(expr),
Err(mut err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, false))
- if self.look_ahead(1, |t| t == &token::Comma) =>
+ if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
{
// Special-case handling of `foo(_, _, _)`
err.emit();
@@ -394,20 +390,11 @@ impl<'a> Parser<'a> {
// want to keep their span info to improve diagnostics in these cases in a later stage.
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
- (true, Some(AssocOp::Add)) // `{ 42 } + 42
- // If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
- // `if x { a } else { b } && if y { c } else { d }`
- if !self.look_ahead(1, |t| t.is_used_keyword()) => {
- // These cases are ambiguous and can't be identified in the parser alone.
- let sp = self.sess.source_map().start_point(self.token.span);
- self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
- false
- }
- (true, Some(AssocOp::LAnd)) |
- (true, Some(AssocOp::LOr)) |
- (true, Some(AssocOp::BitOr)) => {
- // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the
- // above due to #74233.
+ (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus)
+ (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
+ (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure)
+ (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42`
+ => {
// These cases are ambiguous and can't be identified in the parser alone.
//
// Bitwise AND is left out because guessing intent is hard. We can make
@@ -418,7 +405,7 @@ impl<'a> Parser<'a> {
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
false
}
- (true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false,
+ (true, Some(op)) if !op.can_continue_expr_unambiguously() => false,
(true, Some(_)) => {
self.error_found_expr_would_be_stmt(lhs);
true
@@ -456,7 +443,7 @@ impl<'a> Parser<'a> {
return None;
}
(Some(op), _) => (op, self.token.span),
- (None, Some((Ident { name: sym::and, span }, false))) => {
+ (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
self.sess.emit_err(InvalidLogicalOperator {
span: self.token.span,
incorrect: "and".into(),
@@ -464,7 +451,7 @@ impl<'a> Parser<'a> {
});
(AssocOp::LAnd, span)
}
- (None, Some((Ident { name: sym::or, span }, false))) => {
+ (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
self.sess.emit_err(InvalidLogicalOperator {
span: self.token.span,
incorrect: "or".into(),
@@ -615,7 +602,7 @@ impl<'a> Parser<'a> {
token::Ident(..) if this.token.is_keyword(kw::Box) => {
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
}
- token::Ident(..) if this.is_mistaken_not_ident_negation() => {
+ token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
}
_ => return this.parse_dot_or_call_expr(Some(attrs)),
@@ -718,6 +705,10 @@ impl<'a> Parser<'a> {
let cast_expr = match self.parse_as_cast_ty() {
Ok(rhs) => mk_expr(self, lhs, rhs),
Err(type_err) => {
+ if !self.may_recover() {
+ return Err(type_err);
+ }
+
// Rewind to before attempting to parse the type with generics, to recover
// from situations like `x as usize < y` in which we first tried to parse
// `usize < y` as a type with generic arguments.
@@ -829,23 +820,18 @@ impl<'a> Parser<'a> {
("cast", None)
};
- // Save the memory location of expr before parsing any following postfix operators.
- // This will be compared with the memory location of the output expression.
- // If they different we can assume we parsed another expression because the existing expression is not reallocated.
- let addr_before = &*cast_expr as *const _ as usize;
let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
- let changed = addr_before != &*with_postfix as *const _ as usize;
// Check if an illegal postfix operator has been added after the cast.
- // If the resulting expression is not a cast, or has a different memory location, it is an illegal postfix operator.
- if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) || changed {
+ // If the resulting expression is not a cast, it is an illegal postfix operator.
+ if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) {
let msg = format!(
"{cast_kind} cannot be followed by {}",
match with_postfix.kind {
ExprKind::Index(_, _) => "indexing",
ExprKind::Try(_) => "`?`",
ExprKind::Field(_, _) => "a field access",
- ExprKind::MethodCall(_, _, _, _) => "a method call",
+ ExprKind::MethodCall(_) => "a method call",
ExprKind::Call(_, _) => "a function call",
ExprKind::Await(_) => "`.await`",
ExprKind::Err => return Ok(with_postfix),
@@ -1197,6 +1183,10 @@ impl<'a> Parser<'a> {
seq: &mut PResult<'a, P<Expr>>,
snapshot: Option<(SnapshotParser<'a>, ExprKind)>,
) -> Option<P<Expr>> {
+ if !self.may_recover() {
+ return None;
+ }
+
match (seq.as_mut(), snapshot) {
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
snapshot.bump(); // `(`
@@ -1263,24 +1253,32 @@ impl<'a> Parser<'a> {
}
let fn_span_lo = self.token.span;
- let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
- self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
- self.check_turbofish_missing_angle_brackets(&mut segment);
+ let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
+ self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]);
+ self.check_turbofish_missing_angle_brackets(&mut seg);
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Method call `expr.f()`
let args = self.parse_paren_expr_seq()?;
let fn_span = fn_span_lo.to(self.prev_token.span);
let span = lo.to(self.prev_token.span);
- Ok(self.mk_expr(span, ExprKind::MethodCall(segment, self_arg, args, fn_span)))
+ Ok(self.mk_expr(
+ span,
+ ExprKind::MethodCall(Box::new(ast::MethodCall {
+ seg,
+ receiver: self_arg,
+ args,
+ span: fn_span,
+ })),
+ ))
} else {
// Field access `expr.f`
- if let Some(args) = segment.args {
+ if let Some(args) = seg.args {
self.sess.emit_err(FieldExpressionWithGeneric(args.span()));
}
let span = lo.to(self.prev_token.span);
- Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident)))
+ Ok(self.mk_expr(span, ExprKind::Field(self_arg, seg.ident)))
}
}
@@ -1360,7 +1358,7 @@ impl<'a> Parser<'a> {
)
} else if self.check_inline_const(0) {
self.parse_const_block(lo.to(self.token.span), false)
- } else if self.is_do_catch_block() {
+ } else if self.may_recover() && self.is_do_catch_block() {
self.recover_do_catch()
} else if self.is_try_block() {
self.expect_keyword(kw::Try)?;
@@ -1411,9 +1409,9 @@ impl<'a> Parser<'a> {
fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
- match self.parse_opt_lit() {
- Some(literal) => {
- let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal));
+ match self.parse_opt_token_lit() {
+ Some((token_lit, _)) => {
+ let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
self.maybe_recover_from_bad_qpath(expr)
}
None => self.try_macro_suggestion(),
@@ -1494,12 +1492,12 @@ impl<'a> Parser<'a> {
let lo = path.span;
let mac = P(MacCall {
path,
- args: self.parse_mac_args()?,
+ args: self.parse_delim_args()?,
prior_type_ascription: self.last_type_ascription,
});
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
} else if self.check(&token::OpenDelim(Delimiter::Brace)) &&
- let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path) {
+ let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) {
if qself.is_some() {
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
}
@@ -1532,6 +1530,7 @@ impl<'a> Parser<'a> {
{
self.parse_block_expr(label, lo, BlockCheckMode::Default)
} else if !ate_colon
+ && self.may_recover()
&& (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
|| self.token.is_op())
{
@@ -1543,7 +1542,7 @@ impl<'a> Parser<'a> {
})
});
consume_colon = false;
- Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
+ Ok(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
} else if !ate_colon
&& (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
{
@@ -1620,10 +1619,10 @@ impl<'a> Parser<'a> {
/// Emit an error when a char is parsed as a lifetime because of a missing quote
pub(super) fn recover_unclosed_char(
- &mut self,
+ &self,
lifetime: Ident,
- err: impl FnOnce(&mut Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
- ) -> ast::Lit {
+ err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
+ ) -> ast::MetaItemLit {
if let Some(mut diag) =
self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
{
@@ -1644,9 +1643,10 @@ impl<'a> Parser<'a> {
)
.emit();
}
- ast::Lit {
- token_lit: token::Lit::new(token::LitKind::Char, lifetime.name, None),
- kind: ast::LitKind::Char(lifetime.name.as_str().chars().next().unwrap_or('_')),
+ let name = lifetime.without_first_quote().name;
+ ast::MetaItemLit {
+ token_lit: token::Lit::new(token::LitKind::Char, name, None),
+ kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
span: lifetime.span,
}
}
@@ -1719,13 +1719,13 @@ impl<'a> Parser<'a> {
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
{
let expr = self.parse_expr_opt()?;
- if let Some(ref expr) = expr {
+ if let Some(expr) = &expr {
if label.is_some()
&& matches!(
expr.kind,
ExprKind::While(_, _, None)
| ExprKind::ForLoop(_, _, _, None)
- | ExprKind::Loop(_, None)
+ | ExprKind::Loop(_, None, _)
| ExprKind::Block(_, None)
)
{
@@ -1759,8 +1759,8 @@ impl<'a> Parser<'a> {
/// Returns a string literal if the next token is a string literal.
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
/// and returns `None` if the next token is not literal at all.
- pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
- match self.parse_opt_lit() {
+ pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<MetaItemLit>> {
+ match self.parse_opt_meta_item_lit() {
Some(lit) => match lit.kind {
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
style,
@@ -1775,41 +1775,47 @@ impl<'a> Parser<'a> {
}
}
- pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
- self.parse_opt_lit().ok_or(()).or_else(|()| {
- if let token::Interpolated(inner) = &self.token.kind {
- let expr = match inner.as_ref() {
- token::NtExpr(expr) => Some(expr),
- token::NtLiteral(expr) => Some(expr),
- _ => None,
- };
- if let Some(expr) = expr {
- if matches!(expr.kind, ExprKind::Err) {
- let mut err = InvalidInterpolatedExpression { span: self.token.span }
- .into_diagnostic(&self.sess.span_diagnostic);
- err.downgrade_to_delayed_bug();
- return Err(err);
- }
- }
- }
- let token = self.token.clone();
- let err = |self_: &mut Self| {
- let msg = format!("unexpected token: {}", super::token_descr(&token));
- self_.struct_span_err(token.span, &msg)
+ fn handle_missing_lit(&mut self) -> PResult<'a, MetaItemLit> {
+ if let token::Interpolated(inner) = &self.token.kind {
+ let expr = match inner.as_ref() {
+ token::NtExpr(expr) => Some(expr),
+ token::NtLiteral(expr) => Some(expr),
+ _ => None,
};
- // On an error path, eagerly consider a lifetime to be an unclosed character lit
- if self.token.is_lifetime() {
- let lt = self.expect_lifetime();
- Ok(self.recover_unclosed_char(lt.ident, err))
- } else {
- Err(err(self))
+ if let Some(expr) = expr {
+ if matches!(expr.kind, ExprKind::Err) {
+ let mut err = InvalidInterpolatedExpression { span: self.token.span }
+ .into_diagnostic(&self.sess.span_diagnostic);
+ err.downgrade_to_delayed_bug();
+ return Err(err);
+ }
}
- })
+ }
+ let token = self.token.clone();
+ let err = |self_: &Self| {
+ let msg = format!("unexpected token: {}", super::token_descr(&token));
+ self_.struct_span_err(token.span, &msg)
+ };
+ // On an error path, eagerly consider a lifetime to be an unclosed character lit
+ if self.token.is_lifetime() {
+ let lt = self.expect_lifetime();
+ Ok(self.recover_unclosed_char(lt.ident, err))
+ } else {
+ Err(err(self))
+ }
}
- /// Matches `lit = true | false | token_lit`.
- /// Returns `None` if the next token is not a literal.
- pub(super) fn parse_opt_lit(&mut self) -> Option<Lit> {
+ pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
+ self.parse_opt_token_lit()
+ .ok_or(())
+ .or_else(|()| self.handle_missing_lit().map(|lit| (lit.token_lit, lit.span)))
+ }
+
+ pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> {
+ self.parse_opt_meta_item_lit().ok_or(()).or_else(|()| self.handle_missing_lit())
+ }
+
+ fn recover_after_dot(&mut self) -> Option<Token> {
let mut recovered = None;
if self.token == token::Dot {
// Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
@@ -1835,100 +1841,53 @@ impl<'a> Parser<'a> {
}
}
- let token = recovered.as_ref().unwrap_or(&self.token);
- match Lit::from_token(token) {
- Ok(lit) => {
- self.bump();
- Some(lit)
- }
- Err(LitError::NotLiteral) => None,
- Err(err) => {
- let span = token.span;
- let token::Literal(lit) = token.kind else {
- unreachable!();
- };
- self.bump();
- self.report_lit_error(err, lit, span);
- // Pack possible quotes and prefixes from the original literal into
- // the error literal's symbol so they can be pretty-printed faithfully.
- let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
- let symbol = Symbol::intern(&suffixless_lit.to_string());
- let lit = token::Lit::new(token::Err, symbol, lit.suffix);
- Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
- }
- }
+ recovered
}
- fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
- // Checks if `s` looks like i32 or u1234 etc.
- fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
- s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
- }
-
- // Try to lowercase the prefix if it's a valid base prefix.
- fn fix_base_capitalisation(s: &str) -> Option<String> {
- if let Some(stripped) = s.strip_prefix('B') {
- Some(format!("0b{stripped}"))
- } else if let Some(stripped) = s.strip_prefix('O') {
- Some(format!("0o{stripped}"))
- } else if let Some(stripped) = s.strip_prefix('X') {
- Some(format!("0x{stripped}"))
- } else {
- None
- }
- }
+ /// Matches `lit = true | false | token_lit`.
+ /// Returns `None` if the next token is not a literal.
+ pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
+ let recovered = self.recover_after_dot();
+ let token = recovered.as_ref().unwrap_or(&self.token);
+ let span = token.span;
+ token::Lit::from_token(token).map(|token_lit| {
+ self.bump();
+ (token_lit, span)
+ })
+ }
- let token::Lit { kind, suffix, .. } = lit;
- match err {
- // `NotLiteral` is not an error by itself, so we don't report
- // it and give the parser opportunity to try something else.
- LitError::NotLiteral => {}
- // `LexerError` *is* an error, but it was already reported
- // by lexer, so here we don't report it the second time.
- LitError::LexerError => {}
- LitError::InvalidSuffix => {
- if let Some(suffix) = suffix {
- self.sess.emit_err(InvalidLiteralSuffix {
- span,
- kind: format!("{}", kind.descr()),
- suffix,
- });
- }
- }
- LitError::InvalidIntSuffix => {
- let suf = suffix.expect("suffix error with no suffix");
- let suf = suf.as_str();
- if looks_like_width_suffix(&['i', 'u'], &suf) {
- // If it looks like a width, try to be helpful.
- self.sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
- } else if let Some(fixed) = fix_base_capitalisation(suf) {
- self.sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
- } else {
- self.sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
- }
- }
- LitError::InvalidFloatSuffix => {
- let suf = suffix.expect("suffix error with no suffix");
- let suf = suf.as_str();
- if looks_like_width_suffix(&['f'], suf) {
- // If it looks like a width, try to be helpful.
- self.sess
- .emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
- } else {
- self.sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
+ /// Matches `lit = true | false | token_lit`.
+ /// Returns `None` if the next token is not a literal.
+ pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
+ let recovered = self.recover_after_dot();
+ let token = recovered.as_ref().unwrap_or(&self.token);
+ match token::Lit::from_token(token) {
+ Some(token_lit) => {
+ match MetaItemLit::from_token_lit(token_lit, token.span) {
+ Ok(lit) => {
+ self.bump();
+ Some(lit)
+ }
+ Err(err) => {
+ let span = token.span;
+ let token::Literal(lit) = token.kind else {
+ unreachable!();
+ };
+ self.bump();
+ report_lit_error(&self.sess, err, lit, span);
+ // Pack possible quotes and prefixes from the original literal into
+ // the error literal's symbol so they can be pretty-printed faithfully.
+ let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+ let symbol = Symbol::intern(&suffixless_lit.to_string());
+ let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+ Some(
+ MetaItemLit::from_token_lit(lit, span)
+ .unwrap_or_else(|_| unreachable!()),
+ )
+ }
}
}
- LitError::NonDecimalFloat(base) => {
- match base {
- 16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
- 8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }),
- 2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }),
- _ => unreachable!(),
- };
- }
- LitError::IntTooLarge => {
- self.sess.emit_err(IntLiteralTooLarge { span });
- }
+ None => None,
}
}
@@ -1953,8 +1912,8 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let minus_present = self.eat(&token::BinOp(token::Minus));
- let lit = self.parse_lit()?;
- let expr = self.mk_expr(lit.span, ExprKind::Lit(lit));
+ let (token_lit, span) = self.parse_token_lit()?;
+ let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
if minus_present {
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
@@ -1999,6 +1958,10 @@ impl<'a> Parser<'a> {
prev_span: Span,
open_delim_span: Span,
) -> PResult<'a, ()> {
+ if !self.may_recover() {
+ return Ok(());
+ }
+
if self.token.kind == token::Comma {
if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) {
return Ok(());
@@ -2039,7 +2002,7 @@ impl<'a> Parser<'a> {
lo: Span,
blk_mode: BlockCheckMode,
) -> PResult<'a, P<Expr>> {
- if self.is_array_like_block() {
+ if self.may_recover() && self.is_array_like_block() {
if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo) {
return Ok(arr);
}
@@ -2082,15 +2045,15 @@ impl<'a> Parser<'a> {
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
let asyncness = if self.token.uninterpolated_span().rust_2018() {
- self.parse_asyncness()
+ self.parse_asyncness(Case::Sensitive)
} else {
Async::No
};
let capture_clause = self.parse_capture_clause()?;
- let decl = self.parse_fn_block_decl()?;
+ let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?;
let decl_hi = self.prev_token.span;
- let mut body = match decl.output {
+ let mut body = match fn_decl.output {
FnRetTy::Default(_) => {
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
self.parse_expr_res(restrictions, None)?
@@ -2109,12 +2072,7 @@ impl<'a> Parser<'a> {
if self.token.kind == TokenKind::Semi
&& matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
- // HACK: This is needed so we can detect whether we're inside a macro,
- // where regular assumptions about what tokens can follow other tokens
- // don't necessarily apply.
&& self.may_recover()
- // FIXME(Nilstrieb): Remove this check once `may_recover` actually stops recovery
- && self.subparser_name.is_none()
{
// It is likely that the closure body is a block but where the
// braces have been removed. We will recover and eat the next
@@ -2126,15 +2084,16 @@ impl<'a> Parser<'a> {
let closure = self.mk_expr(
lo.to(body.span),
- ExprKind::Closure(
+ ExprKind::Closure(Box::new(ast::Closure {
binder,
capture_clause,
asyncness,
movability,
- decl,
+ fn_decl,
body,
- lo.to(decl_hi),
- ),
+ fn_decl_span: lo.to(decl_hi),
+ fn_arg_span,
+ })),
);
// Disable recovery for closure body
@@ -2162,7 +2121,9 @@ impl<'a> Parser<'a> {
}
/// Parses the `|arg, arg|` header of a closure.
- fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
+ fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
+ let arg_start = self.token.span.lo();
+
let inputs = if self.eat(&token::OrOr) {
Vec::new()
} else {
@@ -2178,10 +2139,11 @@ impl<'a> Parser<'a> {
self.expect_or()?;
args
};
+ let arg_span = self.prev_token.span.with_lo(arg_start);
let output =
self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
- Ok(P(FnDecl { inputs, output }))
+ Ok((P(FnDecl { inputs, output }), arg_span))
}
/// Parses a parameter in a closure header (e.g., `|arg, arg|`).
@@ -2263,13 +2225,14 @@ impl<'a> Parser<'a> {
self.mk_block_err(cond_span.shrink_to_hi())
}
} else {
- let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+ let attrs = self.parse_outer_attributes()?; // For recovery.
let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
self.parse_block()?
} else {
if let Some(block) = recover_block_from_condition(self) {
block
} else {
+ self.error_on_extra_if(&cond)?;
// Parse block, which will always fail, but we can add a nice note to the error
self.parse_block().map_err(|mut err| {
err.span_note(
@@ -2280,7 +2243,7 @@ impl<'a> Parser<'a> {
})?
}
};
- self.error_on_if_block_attrs(lo, false, block.span, &attrs);
+ self.error_on_if_block_attrs(lo, false, block.span, attrs);
block
};
let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
@@ -2321,7 +2284,15 @@ impl<'a> Parser<'a> {
RecoverColon::Yes,
CommaRecoveryMode::LikelyTuple,
)?;
- self.expect(&token::Eq)?;
+ if self.token == token::EqEq {
+ self.sess.emit_err(ExpectedEqForLetExpr {
+ span: self.token.span,
+ sugg_span: self.token.span,
+ });
+ self.bump();
+ } else {
+ self.expect(&token::Eq)?;
+ }
let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
})?;
@@ -2333,7 +2304,7 @@ impl<'a> Parser<'a> {
/// Parses an `else { ... }` expression (`else` token already eaten).
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
let else_span = self.prev_token.span; // `else`
- let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+ let attrs = self.parse_outer_attributes()?; // For recovery.
let expr = if self.eat_keyword(kw::If) {
self.parse_if_expr()?
} else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
@@ -2368,7 +2339,7 @@ impl<'a> Parser<'a> {
},
}
};
- self.error_on_if_block_attrs(else_span, true, expr.span, &attrs);
+ self.error_on_if_block_attrs(else_span, true, expr.span, attrs);
Ok(expr)
}
@@ -2377,8 +2348,13 @@ impl<'a> Parser<'a> {
ctx_span: Span,
is_ctx_else: bool,
branch_span: Span,
- attrs: &[ast::Attribute],
+ attrs: AttrWrapper,
) {
+ if attrs.is_empty() {
+ return;
+ }
+
+ let attrs: &[ast::Attribute] = &attrs.take_for_recovery(self.sess);
let (attributes, last) = match attrs {
[] => return,
[x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
@@ -2393,6 +2369,16 @@ impl<'a> Parser<'a> {
});
}
+ fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
+ if let ExprKind::Binary(Spanned { span: binop_span, node: binop}, _, right) = &cond.kind &&
+ let BinOpKind::And = binop &&
+ let ExprKind::If(cond, ..) = &right.kind {
+ Err(self.sess.create_err(UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()))))
+ } else {
+ Ok(())
+ }
+ }
+
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
fn parse_for_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
// Record whether we are about to parse `for (`.
@@ -2456,10 +2442,11 @@ impl<'a> Parser<'a> {
/// Parses `loop { ... }` (`loop` token already eaten).
fn parse_loop_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+ let loop_span = self.prev_token.span;
let (attrs, body) = self.parse_inner_attrs_and_block()?;
Ok(self.mk_expr_with_attrs(
lo.to(self.prev_token.span),
- ExprKind::Loop(body, opt_label),
+ ExprKind::Loop(body, opt_label, loop_span),
attrs,
))
}
@@ -2602,8 +2589,8 @@ impl<'a> Parser<'a> {
// Used to check the `let_chains` and `if_let_guard` features mostly by scanning
// `&&` tokens.
fn check_let_expr(expr: &Expr) -> (bool, bool) {
- match expr.kind {
- ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, ref lhs, ref rhs) => {
+ match &expr.kind {
+ ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
let lhs_rslt = check_let_expr(lhs);
let rhs_rslt = check_let_expr(rhs);
(lhs_rslt.0 || rhs_rslt.0, false)
@@ -2826,7 +2813,7 @@ impl<'a> Parser<'a> {
fn maybe_parse_struct_expr(
&mut self,
- qself: Option<&ast::QSelf>,
+ qself: &Option<P<ast::QSelf>>,
path: &ast::Path,
) -> Option<PResult<'a, P<Expr>>> {
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
@@ -2834,7 +2821,7 @@ impl<'a> Parser<'a> {
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
return Some(Err(err));
}
- let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
+ let expr = self.parse_struct_expr(qself.clone(), path.clone(), true);
if let (Ok(expr), false) = (&expr, struct_allowed) {
// This is a struct literal, but we don't can't accept them here.
self.sess.emit_err(StructLiteralNotAllowedHere {
@@ -2867,7 +2854,7 @@ impl<'a> Parser<'a> {
};
while self.token != token::CloseDelim(close_delim) {
- if self.eat(&token::DotDot) {
+ if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) {
let exp_span = self.prev_token.span;
// We permit `.. }` on the left-hand side of a destructuring assignment.
if self.check(&token::CloseDelim(close_delim)) {
@@ -2965,7 +2952,7 @@ impl<'a> Parser<'a> {
/// Precondition: already parsed the '{'.
pub(super) fn parse_struct_expr(
&mut self,
- qself: Option<ast::QSelf>,
+ qself: Option<P<ast::QSelf>>,
pth: ast::Path,
recover: bool,
) -> PResult<'a, P<Expr>> {
@@ -3014,6 +3001,18 @@ impl<'a> Parser<'a> {
self.recover_stmt();
}
+ fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
+ if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim))
+ && self.eat(&token::DotDotDot)
+ {
+ // recover from typo of `...`, suggest `..`
+ let span = self.prev_token.span;
+ self.sess.emit_err(MissingDotDot { token_span: span, sugg_span: span });
+ return true;
+ }
+ false
+ }
+
/// Parses `ident (COLON expr)?`.
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
let attrs = self.parse_outer_attributes()?;
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index bda301c52..03f25392a 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -3,17 +3,17 @@ use crate::errors::{DocCommentDoesNotDocumentAnything, UseEmptyBlockNotSemi};
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
-
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use rustc_ast::util::case::Case;
use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
use rustc_ast::{BindingAnnotation, Block, FnDecl, FnSig, Param, SelfKind};
use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData};
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
-use rustc_ast::{MacArgs, MacCall, MacDelimiter};
+use rustc_ast::{MacCall, MacDelimiter};
use rustc_ast_pretty::pprust;
use rustc_errors::{struct_span_err, Applicability, IntoDiagnostic, PResult, StashKey};
use rustc_span::edition::Edition;
@@ -21,9 +21,10 @@ use rustc_span::lev_distance::lev_distance;
use rustc_span::source_map::{self, Span};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::DUMMY_SP;
-
use std::convert::TryFrom;
use std::mem;
+use thin_vec::ThinVec;
+use tracing::debug;
impl<'a> Parser<'a> {
/// Parses a source module as a crate. This is the main entry point for the parser.
@@ -34,7 +35,7 @@ impl<'a> Parser<'a> {
/// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
self.expect_keyword(kw::Mod)?;
let id = self.parse_ident()?;
let mod_kind = if self.eat(&token::Semi) {
@@ -143,8 +144,15 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let vis = self.parse_visibility(FollowedByType::No)?;
let mut def = self.parse_defaultness();
- let kind =
- self.parse_item_kind(&mut attrs, mac_allowed, lo, &vis, &mut def, fn_parse_mode)?;
+ let kind = self.parse_item_kind(
+ &mut attrs,
+ mac_allowed,
+ lo,
+ &vis,
+ &mut def,
+ fn_parse_mode,
+ Case::Sensitive,
+ )?;
if let Some((ident, kind)) = kind {
self.error_on_unconsumed_default(def, &kind);
let span = lo.to(self.prev_token.span);
@@ -205,16 +213,18 @@ impl<'a> Parser<'a> {
vis: &Visibility,
def: &mut Defaultness,
fn_parse_mode: FnParseMode,
+ case: Case,
) -> PResult<'a, Option<ItemInfo>> {
let def_final = def == &Defaultness::Final;
- let mut def = || mem::replace(def, Defaultness::Final);
+ let mut def_ = || mem::replace(def, Defaultness::Final);
- let info = if self.eat_keyword(kw::Use) {
+ let info = if self.eat_keyword_case(kw::Use, case) {
self.parse_use_item()?
- } else if self.check_fn_front_matter(def_final) {
+ } else if self.check_fn_front_matter(def_final, case) {
// FUNCTION ITEM
- let (ident, sig, generics, body) = self.parse_fn(attrs, fn_parse_mode, lo, vis)?;
- (ident, ItemKind::Fn(Box::new(Fn { defaultness: def(), sig, generics, body })))
+ let (ident, sig, generics, body) =
+ self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
+ (ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
} else if self.eat_keyword(kw::Extern) {
if self.eat_keyword(kw::Crate) {
// EXTERN CRATE
@@ -225,7 +235,7 @@ impl<'a> Parser<'a> {
}
} else if self.is_unsafe_foreign_mod() {
// EXTERN BLOCK
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
self.expect_keyword(kw::Extern)?;
self.parse_item_foreign_mod(attrs, unsafety)?
} else if self.is_static_global() {
@@ -234,15 +244,15 @@ impl<'a> Parser<'a> {
let m = self.parse_mutability();
let (ident, ty, expr) = self.parse_item_global(Some(m))?;
(ident, ItemKind::Static(ty, m, expr))
- } else if let Const::Yes(const_span) = self.parse_constness() {
+ } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
// CONST ITEM
if self.token.is_keyword(kw::Impl) {
// recover from `const impl`, suggest `impl const`
- self.recover_const_impl(const_span, attrs, def())?
+ self.recover_const_impl(const_span, attrs, def_())?
} else {
self.recover_const_mut(const_span);
let (ident, ty, expr) = self.parse_item_global(None)?;
- (ident, ItemKind::Const(def(), ty, expr))
+ (ident, ItemKind::Const(def_(), ty, expr))
}
} else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
// TRAIT ITEM
@@ -251,7 +261,7 @@ impl<'a> Parser<'a> {
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Impl])
{
// IMPL ITEM
- self.parse_item_impl(attrs, def())?
+ self.parse_item_impl(attrs, def_())?
} else if self.check_keyword(kw::Mod)
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Mod])
{
@@ -259,7 +269,7 @@ impl<'a> Parser<'a> {
self.parse_item_mod(attrs)?
} else if self.eat_keyword(kw::Type) {
// TYPE ITEM
- self.parse_type_alias(def())?
+ self.parse_type_alias(def_())?
} else if self.eat_keyword(kw::Enum) {
// ENUM ITEM
self.parse_item_enum()?
@@ -286,6 +296,19 @@ impl<'a> Parser<'a> {
} else if self.isnt_macro_invocation() && vis.kind.is_pub() {
self.recover_missing_kw_before_item()?;
return Ok(None);
+ } else if self.isnt_macro_invocation() && case == Case::Sensitive {
+ _ = def_;
+
+ // Recover wrong cased keywords
+ return self.parse_item_kind(
+ attrs,
+ macros_allowed,
+ lo,
+ vis,
+ def,
+ fn_parse_mode,
+ Case::Insensitive,
+ );
} else if macros_allowed && self.check_path() {
// MACRO INVOCATION ITEM
(Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?)))
@@ -449,7 +472,7 @@ impl<'a> Parser<'a> {
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
self.expect(&token::Not)?; // `!`
- match self.parse_mac_args() {
+ match self.parse_delim_args() {
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
Ok(args) => {
self.eat_semi_for_macro_if_needed(&args);
@@ -538,7 +561,7 @@ impl<'a> Parser<'a> {
attrs: &mut AttrVec,
defaultness: Defaultness,
) -> PResult<'a, ItemInfo> {
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
self.expect_keyword(kw::Impl)?;
// First, parse generic parameters if necessary.
@@ -552,7 +575,7 @@ impl<'a> Parser<'a> {
generics
};
- let constness = self.parse_constness();
+ let constness = self.parse_constness(Case::Sensitive);
if let Const::Yes(span) = constness {
self.sess.gated_spans.gate(sym::const_trait_impl, span);
}
@@ -796,7 +819,7 @@ impl<'a> Parser<'a> {
/// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(Case::Sensitive);
// Parse optional `auto` prefix.
let is_auto = if self.eat_keyword(kw::Auto) { IsAuto::Yes } else { IsAuto::No };
@@ -950,7 +973,8 @@ impl<'a> Parser<'a> {
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.token.span;
- let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
+ let mut prefix =
+ ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
|| self.check(&token::BinOp(token::Star))
|| self.is_import_coupler()
@@ -971,7 +995,24 @@ impl<'a> Parser<'a> {
if self.eat(&token::ModSep) {
self.parse_use_tree_glob_or_nested()?
} else {
- UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
+ // Recover from using a colon as path separator.
+ while self.eat_noexpect(&token::Colon) {
+ self.struct_span_err(self.prev_token.span, "expected `::`, found `:`")
+ .span_suggestion_short(
+ self.prev_token.span,
+ "use double colon",
+ "::",
+ Applicability::MachineApplicable,
+ )
+ .note_once("import paths are delimited using `::`")
+ .emit();
+
+ // We parse the rest of the path and append it to the original prefix.
+ self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
+ prefix.span = lo.to(self.prev_token.span);
+ }
+
+ UseTreeKind::Simple(self.parse_rename()?)
}
};
@@ -1215,8 +1256,8 @@ impl<'a> Parser<'a> {
}
};
- match impl_info.1 {
- ItemKind::Impl(box Impl { of_trait: Some(ref trai), ref mut constness, .. }) => {
+ match &mut impl_info.1 {
+ ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
*constness = Const::Yes(const_span);
let before_trait = trai.path.span.shrink_to_lo();
@@ -1373,7 +1414,10 @@ impl<'a> Parser<'a> {
Ok((Some(vr), TrailingToken::MaybeComma))
},
- )
+ ).map_err(|mut err|{
+ err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`");
+ err
+ })
}
/// Parses `struct Foo { ... }`.
@@ -1745,7 +1789,7 @@ impl<'a> Parser<'a> {
let (ident, is_raw) = self.ident_or_err()?;
if !is_raw && ident.is_reserved() {
let snapshot = self.create_snapshot_for_diagnostic();
- let err = if self.check_fn_front_matter(false) {
+ let err = if self.check_fn_front_matter(false, Case::Sensitive) {
let inherited_vis = Visibility {
span: rustc_span::DUMMY_SP,
kind: VisibilityKind::Inherited,
@@ -1753,7 +1797,13 @@ impl<'a> Parser<'a> {
};
// We use `parse_fn` to get a span for the function
let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
- match self.parse_fn(&mut AttrVec::new(), fn_parse_mode, lo, &inherited_vis) {
+ match self.parse_fn(
+ &mut AttrVec::new(),
+ fn_parse_mode,
+ lo,
+ &inherited_vis,
+ Case::Insensitive,
+ ) {
Ok(_) => {
let mut err = self.struct_span_err(
lo.to(self.prev_token.span),
@@ -1827,7 +1877,7 @@ impl<'a> Parser<'a> {
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
- self.parse_mac_args()? // `MacBody`
+ self.parse_delim_args()? // `MacBody`
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let params = self.parse_token_tree(); // `MacParams`
let pspan = params.span();
@@ -1840,7 +1890,7 @@ impl<'a> Parser<'a> {
let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
let tokens = TokenStream::new(vec![params, arrow, body]);
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
- P(MacArgs::Delimited(dspan, MacDelimiter::Brace, tokens))
+ P(DelimArgs { dspan, delim: MacDelimiter::Brace, tokens })
} else {
return self.unexpected();
};
@@ -1895,7 +1945,7 @@ impl<'a> Parser<'a> {
.emit();
}
- let body = self.parse_mac_args()?;
+ let body = self.parse_delim_args()?;
self.eat_semi_for_macro_if_needed(&body);
self.complain_if_pub_macro(vis, true);
@@ -1934,14 +1984,14 @@ impl<'a> Parser<'a> {
}
}
- fn eat_semi_for_macro_if_needed(&mut self, args: &MacArgs) {
+ fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
if args.need_semicolon() && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item(args);
}
}
- fn report_invalid_macro_expansion_item(&self, args: &MacArgs) {
- let span = args.span().expect("undelimited macro call");
+ fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) {
+ let span = args.dspan.entire();
let mut err = self.struct_span_err(
span,
"macros that expand to items must be delimited with braces or followed by a semicolon",
@@ -1950,10 +2000,7 @@ impl<'a> Parser<'a> {
// macros within the same crate (that we can fix), which is sad.
if !span.from_expansion() {
if self.unclosed_delims.is_empty() {
- let DelimSpan { open, close } = match args {
- MacArgs::Empty | MacArgs::Eq(..) => unreachable!(),
- MacArgs::Delimited(dspan, ..) => *dspan,
- };
+ let DelimSpan { open, close } = args.dspan;
err.multipart_suggestion(
"change the delimiters to curly braces",
vec![(open, "{".to_string()), (close, '}'.to_string())],
@@ -2077,8 +2124,9 @@ impl<'a> Parser<'a> {
fn_parse_mode: FnParseMode,
sig_lo: Span,
vis: &Visibility,
+ case: Case,
) -> PResult<'a, (Ident, FnSig, Generics, Option<P<Block>>)> {
- let header = self.parse_fn_front_matter(vis)?; // `const ... fn`
+ let header = self.parse_fn_front_matter(vis, case)?; // `const ... fn`
let ident = self.parse_ident()?; // `foo`
let mut generics = self.parse_generics()?; // `<'a, T, ...>`
let decl =
@@ -2155,7 +2203,7 @@ impl<'a> Parser<'a> {
///
/// `check_pub` adds additional `pub` to the checks in case users place it
/// wrongly, can be used to ensure `pub` never comes after `default`.
- pub(super) fn check_fn_front_matter(&mut self, check_pub: bool) -> bool {
+ pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
// We use an over-approximation here.
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
// `pub` is added in case users got confused with the ordering like `async pub fn`,
@@ -2165,23 +2213,30 @@ impl<'a> Parser<'a> {
} else {
&[kw::Const, kw::Async, kw::Unsafe, kw::Extern]
};
- self.check_keyword(kw::Fn) // Definitely an `fn`.
+ self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
// `$qual fn` or `$qual $qual`:
- || quals.iter().any(|&kw| self.check_keyword(kw))
+ || quals.iter().any(|&kw| self.check_keyword_case(kw, case))
&& self.look_ahead(1, |t| {
// `$qual fn`, e.g. `const fn` or `async fn`.
- t.is_keyword(kw::Fn)
+ t.is_keyword_case(kw::Fn, case)
// Two qualifiers `$qual $qual` is enough, e.g. `async unsafe`.
- || t.is_non_raw_ident_where(|i| quals.contains(&i.name)
- // Rule out 2015 `const async: T = val`.
- && i.is_reserved()
+ || (
+ (
+ t.is_non_raw_ident_where(|i|
+ quals.contains(&i.name)
+ // Rule out 2015 `const async: T = val`.
+ && i.is_reserved()
+ )
+ || case == Case::Insensitive
+ && t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
+ )
// Rule out unsafe extern block.
&& !self.is_unsafe_foreign_mod())
})
// `extern ABI fn`
- || self.check_keyword(kw::Extern)
+ || self.check_keyword_case(kw::Extern, case)
&& self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
- && self.look_ahead(2, |t| t.is_keyword(kw::Fn))
+ && self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case))
}
/// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
@@ -2195,24 +2250,28 @@ impl<'a> Parser<'a> {
///
/// `vis` represents the visibility that was already parsed, if any. Use
/// `Visibility::Inherited` when no visibility is known.
- pub(super) fn parse_fn_front_matter(&mut self, orig_vis: &Visibility) -> PResult<'a, FnHeader> {
+ pub(super) fn parse_fn_front_matter(
+ &mut self,
+ orig_vis: &Visibility,
+ case: Case,
+ ) -> PResult<'a, FnHeader> {
let sp_start = self.token.span;
- let constness = self.parse_constness();
+ let constness = self.parse_constness(case);
let async_start_sp = self.token.span;
- let asyncness = self.parse_asyncness();
+ let asyncness = self.parse_asyncness(case);
let unsafe_start_sp = self.token.span;
- let unsafety = self.parse_unsafety();
+ let unsafety = self.parse_unsafety(case);
let ext_start_sp = self.token.span;
- let ext = self.parse_extern();
+ let ext = self.parse_extern(case);
if let Async::Yes { span, .. } = asyncness {
self.ban_async_in_2015(span);
}
- if !self.eat_keyword(kw::Fn) {
+ if !self.eat_keyword_case(kw::Fn, case) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
@@ -2541,8 +2600,8 @@ impl<'a> Parser<'a> {
}
fn is_named_param(&self) -> bool {
- let offset = match self.token.kind {
- token::Interpolated(ref nt) => match **nt {
+ let offset = match &self.token.kind {
+ token::Interpolated(nt) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
},
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 5fe29062b..bebb01266 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -22,10 +22,11 @@ use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::AttributesData;
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::util::case::Case;
use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID;
-use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, Extern};
-use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacArgsEq, MacDelimiter, Mutability, StrLit};
+use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, DelimArgs, Extern};
+use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit};
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
@@ -104,6 +105,7 @@ macro_rules! maybe_whole {
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery
+ && $self.may_recover()
&& $self.look_ahead(1, |t| t == &token::ModSep)
&& let token::Interpolated(nt) = &$self.token.kind
&& let token::NtTy(ty) = &**nt
@@ -382,8 +384,8 @@ enum TokenType {
impl TokenType {
fn to_string(&self) -> String {
- match *self {
- TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
+ match self {
+ TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
@@ -501,8 +503,8 @@ impl<'a> Parser<'a> {
parser
}
- pub fn forbid_recovery(mut self) -> Self {
- self.recovery = Recovery::Forbidden;
+ pub fn recovery(mut self, recovery: Recovery) -> Self {
+ self.recovery = recovery;
self
}
@@ -635,6 +637,20 @@ impl<'a> Parser<'a> {
self.token.is_keyword(kw)
}
+ fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
+ if self.check_keyword(kw) {
+ return true;
+ }
+
+ if case == Case::Insensitive
+ && let Some((ident, /* is_raw */ false)) = self.token.ident()
+ && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
+ true
+ } else {
+ false
+ }
+ }
+
/// If the next token is the given keyword, eats it and returns `true`.
/// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
// Public for rustfmt usage.
@@ -647,6 +663,33 @@ impl<'a> Parser<'a> {
}
}
+ /// Eats a keyword, optionally ignoring the case.
+ /// If the case differs (and is ignored) an error is issued.
+ /// This is useful for recovery.
+ fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
+ if self.eat_keyword(kw) {
+ return true;
+ }
+
+ if case == Case::Insensitive
+ && let Some((ident, /* is_raw */ false)) = self.token.ident()
+ && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
+ self
+ .struct_span_err(ident.span, format!("keyword `{kw}` is written in a wrong case"))
+ .span_suggestion(
+ ident.span,
+ "write it in the correct case",
+ kw,
+ Applicability::MachineApplicable
+ ).emit();
+
+ self.bump();
+ return true;
+ }
+
+ false
+ }
+
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
@@ -695,8 +738,8 @@ impl<'a> Parser<'a> {
fn check_inline_const(&self, dist: usize) -> bool {
self.is_keyword_ahead(dist, &[kw::Const])
- && self.look_ahead(dist + 1, |t| match t.kind {
- token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
+ && self.look_ahead(dist + 1, |t| match &t.kind {
+ token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
token::OpenDelim(Delimiter::Brace) => true,
_ => false,
})
@@ -817,7 +860,7 @@ impl<'a> Parser<'a> {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
break;
}
- if let Some(ref t) = sep.sep {
+ if let Some(t) = &sep.sep {
if first {
first = false;
} else {
@@ -852,7 +895,7 @@ impl<'a> Parser<'a> {
_ => {
// Attempt to keep parsing if it was a similar separator.
- if let Some(ref tokens) = t.similar_tokens() {
+ if let Some(tokens) = t.similar_tokens() {
if tokens.contains(&self.token.kind) && !unclosed_delims {
self.bump();
}
@@ -900,6 +943,10 @@ impl<'a> Parser<'a> {
Err(e) => {
// Parsing failed, therefore it must be something more serious
// than just a missing separator.
+ for xx in &e.children {
+ // propagate the help message from sub error 'e' to main error 'expect_err;
+ expect_err.children.push(xx.clone());
+ }
expect_err.emit();
e.cancel();
@@ -1126,8 +1173,8 @@ impl<'a> Parser<'a> {
}
/// Parses asyncness: `async` or nothing.
- fn parse_asyncness(&mut self) -> Async {
- if self.eat_keyword(kw::Async) {
+ fn parse_asyncness(&mut self, case: Case) -> Async {
+ if self.eat_keyword_case(kw::Async, case) {
let span = self.prev_token.uninterpolated_span();
Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
} else {
@@ -1136,8 +1183,8 @@ impl<'a> Parser<'a> {
}
/// Parses unsafety: `unsafe` or nothing.
- fn parse_unsafety(&mut self) -> Unsafe {
- if self.eat_keyword(kw::Unsafe) {
+ fn parse_unsafety(&mut self, case: Case) -> Unsafe {
+ if self.eat_keyword_case(kw::Unsafe, case) {
Unsafe::Yes(self.prev_token.uninterpolated_span())
} else {
Unsafe::No
@@ -1145,10 +1192,10 @@ impl<'a> Parser<'a> {
}
/// Parses constness: `const` or nothing.
- fn parse_constness(&mut self) -> Const {
+ fn parse_constness(&mut self, case: Case) -> Const {
// Avoid const blocks to be parsed as const items
if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
- && self.eat_keyword(kw::Const)
+ && self.eat_keyword_case(kw::Const, case)
{
Const::Yes(self.prev_token.uninterpolated_span())
} else {
@@ -1206,39 +1253,40 @@ impl<'a> Parser<'a> {
}
}
- fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
- self.parse_mac_args_common(true).map(P)
+ fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
+ if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
}
- fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
- self.parse_mac_args_common(false)
+ fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
+ Ok(if let Some(args) = self.parse_delim_args_inner() {
+ AttrArgs::Delimited(args)
+ } else {
+ if self.eat(&token::Eq) {
+ let eq_span = self.prev_token.span;
+ AttrArgs::Eq(eq_span, AttrArgsEq::Ast(self.parse_expr_force_collect()?))
+ } else {
+ AttrArgs::Empty
+ }
+ })
}
- fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
- Ok(
- if self.check(&token::OpenDelim(Delimiter::Parenthesis))
- || self.check(&token::OpenDelim(Delimiter::Bracket))
- || self.check(&token::OpenDelim(Delimiter::Brace))
- {
- match self.parse_token_tree() {
- TokenTree::Delimited(dspan, delim, tokens) =>
- // We've confirmed above that there is a delimiter so unwrapping is OK.
- {
- MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
- }
- _ => unreachable!(),
- }
- } else if !delimited_only {
- if self.eat(&token::Eq) {
- let eq_span = self.prev_token.span;
- MacArgs::Eq(eq_span, MacArgsEq::Ast(self.parse_expr_force_collect()?))
- } else {
- MacArgs::Empty
- }
- } else {
- return self.unexpected();
- },
- )
+ fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
+ if self.check(&token::OpenDelim(Delimiter::Parenthesis))
+ || self.check(&token::OpenDelim(Delimiter::Bracket))
+ || self.check(&token::OpenDelim(Delimiter::Brace))
+ {
+ match self.parse_token_tree() {
+ // We've confirmed above that there is a delimiter so unwrapping is OK.
+ TokenTree::Delimited(dspan, delim, tokens) => Some(DelimArgs {
+ dspan,
+ delim: MacDelimiter::from_token(delim).unwrap(),
+ tokens,
+ }),
+ _ => unreachable!(),
+ }
+ } else {
+ None
+ }
}
fn parse_or_use_outer_attributes(
@@ -1403,8 +1451,8 @@ impl<'a> Parser<'a> {
}
/// Parses `extern string_literal?`.
- fn parse_extern(&mut self) -> Extern {
- if self.eat_keyword(kw::Extern) {
+ fn parse_extern(&mut self, case: Case) -> Extern {
+ if self.eat_keyword_case(kw::Extern, case) {
let mut extern_span = self.prev_token.span;
let abi = self.parse_abi();
if let Some(abi) = abi {
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 103dd8012..239ed79ce 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -42,9 +42,9 @@ impl<'a> Parser<'a> {
token::Comma | token::Ident(..) | token::Interpolated(..) => true,
_ => token.can_begin_type(),
},
- NonterminalKind::Block => match token.kind {
+ NonterminalKind::Block => match &token.kind {
token::OpenDelim(Delimiter::Brace) => true,
- token::Interpolated(ref nt) => !matches!(
+ token::Interpolated(nt) => !matches!(
**nt,
token::NtItem(_)
| token::NtPat(_)
@@ -56,16 +56,16 @@ impl<'a> Parser<'a> {
),
_ => false,
},
- NonterminalKind::Path | NonterminalKind::Meta => match token.kind {
+ NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
token::ModSep | token::Ident(..) => true,
- token::Interpolated(ref nt) => match **nt {
+ token::Interpolated(nt) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt),
},
_ => false,
},
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
- match token.kind {
+ match &token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
token::OpenDelim(Delimiter::Bracket) | // slice pattern
@@ -80,13 +80,13 @@ impl<'a> Parser<'a> {
token::BinOp(token::Shl) => true, // path (double UFCS)
// leading vert `|` or-pattern
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr {..}),
- token::Interpolated(ref nt) => may_be_ident(nt),
+ token::Interpolated(nt) => may_be_ident(nt),
_ => false,
}
}
- NonterminalKind::Lifetime => match token.kind {
+ NonterminalKind::Lifetime => match &token.kind {
token::Lifetime(_) => true,
- token::Interpolated(ref nt) => {
+ token::Interpolated(nt) => {
matches!(**nt, token::NtLifetime(_))
}
_ => false,
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 52c11b4e3..cbeec951e 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -420,7 +420,7 @@ impl<'a> Parser<'a> {
err.span_label(self_.token.span, format!("expected {}", expected));
err
});
- PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
+ PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit.token_lit)))
} else {
// Try to parse everything else as literal with optional minus
match self.parse_literal_maybe_minus() {
@@ -485,7 +485,7 @@ impl<'a> Parser<'a> {
let mut rhs = self.parse_pat_no_top_alt(None)?;
let sp = lhs.span.to(rhs.span);
- if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind {
+ if let PatKind::Ident(_, _, sub @ None) = &mut rhs.kind {
// The user inverted the order, so help them fix that.
let mut applicability = Applicability::MachineApplicable;
// FIXME(bindings_after_at): Remove this code when stabilizing the feature.
@@ -595,7 +595,7 @@ impl<'a> Parser<'a> {
self.recover_additional_muts();
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
- if let token::Interpolated(ref nt) = self.token.kind {
+ if let token::Interpolated(nt) = &self.token.kind {
if let token::NtPat(_) = **nt {
self.expected_ident_found().emit();
}
@@ -693,7 +693,7 @@ impl<'a> Parser<'a> {
/// Parse macro invocation
fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
self.bump();
- let args = self.parse_mac_args()?;
+ let args = self.parse_delim_args()?;
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
Ok(PatKind::MacCall(mac))
}
@@ -796,7 +796,7 @@ impl<'a> Parser<'a> {
/// expression syntax `...expr` for splatting in expressions.
fn parse_pat_range_to(&mut self, mut re: Spanned<RangeEnd>) -> PResult<'a, PatKind> {
let end = self.parse_pat_range_end()?;
- if let RangeEnd::Included(ref mut syn @ RangeSyntax::DotDotDot) = &mut re.node {
+ if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
*syn = RangeSyntax::DotDotEq;
self.struct_span_err(re.span, "range-to patterns with `...` are not allowed")
.span_suggestion_short(
@@ -889,7 +889,7 @@ impl<'a> Parser<'a> {
}
/// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
- fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+ fn parse_pat_struct(&mut self, qself: Option<P<QSelf>>, path: Path) -> PResult<'a, PatKind> {
if qself.is_some() {
// Feature gate the use of qualified paths in patterns
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
@@ -906,7 +906,11 @@ impl<'a> Parser<'a> {
}
/// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
- fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+ fn parse_pat_tuple_struct(
+ &mut self,
+ qself: Option<P<QSelf>>,
+ path: Path,
+ ) -> PResult<'a, PatKind> {
let (fields, _) = self.parse_paren_comma_seq(|p| {
p.parse_pat_allow_top_alt(
None,
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index fdc1af27f..2d432e3f5 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -11,8 +11,9 @@ use rustc_ast::{
use rustc_errors::{pluralize, Applicability, PResult};
use rustc_span::source_map::{BytePos, Span};
use rustc_span::symbol::{kw, sym, Ident};
-
use std::mem;
+use thin_vec::ThinVec;
+use tracing::debug;
/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
@@ -48,7 +49,7 @@ impl<'a> Parser<'a> {
/// `<T as U>::a`
/// `<T as U>::F::a<S>` (without disambiguator)
/// `<T as U>::F::a::<S>` (with disambiguator)
- pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> {
+ pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (P<QSelf>, Path)> {
let lo = self.prev_token.span;
let ty = self.parse_ty()?;
@@ -63,7 +64,7 @@ impl<'a> Parser<'a> {
path_span = path_lo.to(self.prev_token.span);
} else {
path_span = self.token.span.to(self.token.span);
- path = ast::Path { segments: Vec::new(), span: path_span, tokens: None };
+ path = ast::Path { segments: ThinVec::new(), span: path_span, tokens: None };
}
// See doc comment for `unmatched_angle_bracket_count`.
@@ -77,7 +78,7 @@ impl<'a> Parser<'a> {
self.expect(&token::ModSep)?;
}
- let qself = QSelf { ty, path_span, position: path.segments.len() };
+ let qself = P(QSelf { ty, path_span, position: path.segments.len() });
self.parse_path_segments(&mut path.segments, style, None)?;
Ok((
@@ -179,7 +180,7 @@ impl<'a> Parser<'a> {
}
let lo = self.token.span;
- let mut segments = Vec::new();
+ let mut segments = ThinVec::new();
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
@@ -191,7 +192,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_path_segments(
&mut self,
- segments: &mut Vec<PathSegment>,
+ segments: &mut ThinVec<PathSegment>,
style: PathStyle,
ty_generics: Option<&Generics>,
) -> PResult<'a, ()> {
@@ -631,7 +632,9 @@ impl<'a> Parser<'a> {
/// - A single-segment path.
pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool {
match &expr.kind {
- ast::ExprKind::Block(_, _) | ast::ExprKind::Lit(_) => true,
+ ast::ExprKind::Block(_, _)
+ | ast::ExprKind::Lit(_)
+ | ast::ExprKind::IncludedBytes(..) => true,
ast::ExprKind::Unary(ast::UnOp::Neg, expr) => {
matches!(expr.kind, ast::ExprKind::Lit(_))
}
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 12753c678..42197e637 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -10,8 +10,8 @@ use super::{
use crate::errors::{
AssignmentElseNotAllowed, CompoundAssignmentExpressionInLet, ConstLetMutuallyExclusive,
DocCommentDoesNotDocumentAnything, ExpectedStatementAfterOuterAttr, InvalidCurlyInLetElse,
- InvalidExpressionInLetElse, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
- WrapExpressionInParentheses,
+ InvalidExpressionInLetElse, InvalidIdentiferStartsWithNumber, InvalidVariableDeclaration,
+ InvalidVariableDeclarationSub, WrapExpressionInParentheses,
};
use crate::maybe_whole;
@@ -19,7 +19,7 @@ use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::util::classify;
-use rustc_ast::{AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
+use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
use rustc_ast::{StmtKind, DUMMY_NODE_ID};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
@@ -72,14 +72,22 @@ impl<'a> Parser<'a> {
Ok(Some(if self.token.is_keyword(kw::Let) {
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
- } else if self.is_kw_followed_by_ident(kw::Mut) {
- self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::MissingLet)?
- } else if self.is_kw_followed_by_ident(kw::Auto) {
+ } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
+ self.recover_stmt_local_after_let(lo, attrs, InvalidVariableDeclarationSub::MissingLet)?
+ } else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
self.bump(); // `auto`
- self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::UseLetNotAuto)?
- } else if self.is_kw_followed_by_ident(sym::var) {
+ self.recover_stmt_local_after_let(
+ lo,
+ attrs,
+ InvalidVariableDeclarationSub::UseLetNotAuto,
+ )?
+ } else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
self.bump(); // `var`
- self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::UseLetNotVar)?
+ self.recover_stmt_local_after_let(
+ lo,
+ attrs,
+ InvalidVariableDeclarationSub::UseLetNotVar,
+ )?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
@@ -101,7 +109,7 @@ impl<'a> Parser<'a> {
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) {
// Do not attempt to parse an expression if we're done here.
- self.error_outer_attrs(&attrs.take_for_recovery());
+ self.error_outer_attrs(attrs);
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(Delimiter::Brace) {
// Remainder are line-expr stmts.
@@ -120,7 +128,7 @@ impl<'a> Parser<'a> {
}
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
} else {
- self.error_outer_attrs(&attrs.take_for_recovery());
+ self.error_outer_attrs(attrs);
return Ok(None);
}))
}
@@ -167,14 +175,13 @@ impl<'a> Parser<'a> {
/// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
/// At this point, the `!` token after the path has already been eaten.
fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
- let args = self.parse_mac_args()?;
- let delim = args.delim();
+ let args = self.parse_delim_args()?;
+ let delim = args.delim.to_token();
let hi = self.prev_token.span;
let style = match delim {
- Some(Delimiter::Brace) => MacStmtStyle::Braces,
- Some(_) => MacStmtStyle::NoBraces,
- None => unreachable!(),
+ Delimiter::Brace => MacStmtStyle::Braces,
+ _ => MacStmtStyle::NoBraces,
};
let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
@@ -199,8 +206,10 @@ impl<'a> Parser<'a> {
/// Error on outer attributes in this context.
/// Also error if the previous token was a doc comment.
- fn error_outer_attrs(&self, attrs: &[Attribute]) {
- if let [.., last] = attrs {
+ fn error_outer_attrs(&self, attrs: AttrWrapper) {
+ if !attrs.is_empty()
+ && let attrs = attrs.take_for_recovery(self.sess)
+ && let attrs @ [.., last] = &*attrs {
if last.is_doc_comment() {
self.sess.emit_err(DocCommentDoesNotDocumentAnything {
span: last.span,
@@ -212,13 +221,21 @@ impl<'a> Parser<'a> {
}
}
- fn recover_stmt_local(
+ fn recover_stmt_local_after_let(
&mut self,
lo: Span,
attrs: AttrWrapper,
subdiagnostic: fn(Span) -> InvalidVariableDeclarationSub,
) -> PResult<'a, Stmt> {
- let stmt = self.recover_local_after_let(lo, attrs)?;
+ let stmt =
+ self.collect_tokens_trailing_token(attrs, ForceCollect::Yes, |this, attrs| {
+ let local = this.parse_local(attrs)?;
+ // FIXME - maybe capture semicolon in recovery?
+ Ok((
+ this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
+ TrailingToken::None,
+ ))
+ })?;
self.sess.emit_err(InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
Ok(stmt)
}
@@ -242,17 +259,6 @@ impl<'a> Parser<'a> {
})
}
- fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
- self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
- let local = this.parse_local(attrs)?;
- // FIXME - maybe capture semicolon in recovery?
- Ok((
- this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
- TrailingToken::None,
- ))
- })
- }
-
/// Parses a local variable declaration.
fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
let lo = self.prev_token.span;
@@ -262,6 +268,7 @@ impl<'a> Parser<'a> {
self.bump();
}
+ self.report_invalid_identifier_error()?;
let (pat, colon) = self.parse_pat_before_ty(None, RecoverComma::Yes, "`let` bindings")?;
let (err, ty) = if colon {
@@ -353,6 +360,17 @@ impl<'a> Parser<'a> {
Ok(P(ast::Local { ty, pat, kind, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None }))
}
+ /// report error for `let 1x = 123`
+ pub fn report_invalid_identifier_error(&mut self) -> PResult<'a, ()> {
+ if let token::Literal(lit) = self.token.uninterpolate().kind &&
+ rustc_ast::MetaItemLit::from_token(&self.token).is_none() &&
+ (lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
+ self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
+ return Err(self.sess.create_err(InvalidIdentiferStartsWithNumber { span: self.token.span }));
+ }
+ Ok(())
+ }
+
fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
if let ast::ExprKind::Binary(op, ..) = init.kind {
if op.node.lazy() {
@@ -550,9 +568,9 @@ impl<'a> Parser<'a> {
};
let mut eat_semi = true;
- match stmt.kind {
+ match &mut stmt.kind {
// Expression without semicolon.
- StmtKind::Expr(ref mut expr)
+ StmtKind::Expr(expr)
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
// Just check for errors and recover; do not eat semicolon yet.
// `expect_one_of` returns PResult<'a, bool /* recovered */>
@@ -598,7 +616,7 @@ impl<'a> Parser<'a> {
}
}
StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
- StmtKind::Local(ref mut local) if let Err(e) = self.expect_semi() => {
+ StmtKind::Local(local) if let Err(e) = self.expect_semi() => {
// We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
match &mut local.kind {
LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 2a8512acf..b7206b576 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,9 +1,11 @@
use super::{Parser, PathStyle, TokenType};
+use crate::errors::{FnPtrWithGenerics, FnPtrWithGenericsSugg};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::util::case::Case;
use rustc_ast::{
self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
@@ -267,16 +269,21 @@ impl<'a> Parser<'a> {
} else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
- } else if self.check_fn_front_matter(false) {
+ } else if self.check_fn_front_matter(false, Case::Sensitive) {
// Function pointer type
- self.parse_ty_bare_fn(lo, Vec::new(), recover_return_sign)?
+ self.parse_ty_bare_fn(lo, Vec::new(), None, recover_return_sign)?
} else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
- if self.check_fn_front_matter(false) {
- self.parse_ty_bare_fn(lo, lifetime_defs, recover_return_sign)?
+ if self.check_fn_front_matter(false, Case::Sensitive) {
+ self.parse_ty_bare_fn(
+ lo,
+ lifetime_defs,
+ Some(self.prev_token.span.shrink_to_lo()),
+ recover_return_sign,
+ )?
} else {
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
@@ -401,7 +408,7 @@ impl<'a> Parser<'a> {
.span_suggestions(
span.shrink_to_hi(),
"add `mut` or `const` here",
- ["mut ".to_string(), "const ".to_string()].into_iter(),
+ ["mut ".to_string(), "const ".to_string()],
Applicability::HasPlaceholders,
)
.emit();
@@ -518,7 +525,8 @@ impl<'a> Parser<'a> {
fn parse_ty_bare_fn(
&mut self,
lo: Span,
- params: Vec<GenericParam>,
+ mut params: Vec<GenericParam>,
+ param_insertion_point: Option<Span>,
recover_return_sign: RecoverReturnSign,
) -> PResult<'a, TyKind> {
let inherited_vis = rustc_ast::Visibility {
@@ -528,7 +536,10 @@ impl<'a> Parser<'a> {
};
let span_start = self.token.span;
let ast::FnHeader { ext, unsafety, constness, asyncness } =
- self.parse_fn_front_matter(&inherited_vis)?;
+ self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
+ if self.may_recover() && self.token.kind == TokenKind::Lt {
+ self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
+ }
let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
let whole_span = lo.to(self.prev_token.span);
if let ast::Const::Yes(span) = constness {
@@ -544,6 +555,48 @@ impl<'a> Parser<'a> {
Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl, decl_span })))
}
+ /// Recover from function pointer types with a generic parameter list (e.g. `fn<'a>(&'a str)`).
+ fn recover_fn_ptr_with_generics(
+ &mut self,
+ lo: Span,
+ params: &mut Vec<GenericParam>,
+ param_insertion_point: Option<Span>,
+ ) -> PResult<'a, ()> {
+ let generics = self.parse_generics()?;
+ let arity = generics.params.len();
+
+ let mut lifetimes: Vec<_> = generics
+ .params
+ .into_iter()
+ .filter(|param| matches!(param.kind, ast::GenericParamKind::Lifetime))
+ .collect();
+
+ let sugg = if !lifetimes.is_empty() {
+ let snippet =
+ lifetimes.iter().map(|param| param.ident.as_str()).intersperse(", ").collect();
+
+ let (left, snippet) = if let Some(span) = param_insertion_point {
+ (span, if params.is_empty() { snippet } else { format!(", {snippet}") })
+ } else {
+ (lo.shrink_to_lo(), format!("for<{snippet}> "))
+ };
+
+ Some(FnPtrWithGenericsSugg {
+ left,
+ snippet,
+ right: generics.span,
+ arity,
+ for_param_list_exists: param_insertion_point.is_some(),
+ })
+ } else {
+ None
+ };
+
+ self.sess.emit_err(FnPtrWithGenerics { span: generics.span, sugg });
+ params.append(&mut lifetimes);
+ Ok(())
+ }
+
/// Emit an error for the given bad function pointer qualifier.
fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) {
self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual))
@@ -612,7 +665,7 @@ impl<'a> Parser<'a> {
// Macro invocation in type position
Ok(TyKind::MacCall(P(MacCall {
path,
- args: self.parse_mac_args()?,
+ args: self.parse_delim_args()?,
prior_type_ascription: self.last_type_ascription,
})))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 47477898b..72402a200 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -3,15 +3,16 @@
use crate::parse_in;
use rustc_ast::tokenstream::DelimSpan;
-use rustc_ast::{self as ast, Attribute, MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind};
+use rustc_ast::MetaItemKind;
+use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MacDelimiter, MetaItem};
use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, FatalError, PResult};
use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
use rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT;
use rustc_session::parse::ParseSess;
-use rustc_span::{sym, Symbol};
+use rustc_span::{sym, Span, Symbol};
-pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
+pub fn check_attr(sess: &ParseSess, attr: &Attribute) {
if attr.is_doc_comment() {
return;
}
@@ -24,7 +25,7 @@ pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => {
check_builtin_attribute(sess, attr, *name, *template)
}
- _ if let MacArgs::Eq(..) = attr.get_normal_item().args => {
+ _ if let AttrArgs::Eq(..) = attr.get_normal_item().args => {
// All key-value attributes are restricted to meta-item syntax.
parse_meta(sess, attr)
.map_err(|mut err| {
@@ -42,17 +43,19 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
span: attr.span,
path: item.path.clone(),
kind: match &item.args {
- MacArgs::Empty => MetaItemKind::Word,
- MacArgs::Delimited(dspan, delim, t) => {
+ AttrArgs::Empty => MetaItemKind::Word,
+ AttrArgs::Delimited(DelimArgs { dspan, delim, tokens }) => {
check_meta_bad_delim(sess, *dspan, *delim, "wrong meta list delimiters");
- let nmis = parse_in(sess, t.clone(), "meta list", |p| p.parse_meta_seq_top())?;
+ let nmis = parse_in(sess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
MetaItemKind::List(nmis)
}
- MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
- if let ast::ExprKind::Lit(lit) = &expr.kind {
- if !lit.kind.is_unsuffixed() {
+ AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
+ if let ast::ExprKind::Lit(token_lit) = expr.kind
+ && let Ok(lit) = ast::MetaItemLit::from_token_lit(token_lit, expr.span)
+ {
+ if token_lit.suffix.is_some() {
let mut err = sess.span_diagnostic.struct_span_err(
- lit.span,
+ expr.span,
"suffixed literals are not allowed in attributes",
);
err.help(
@@ -61,7 +64,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
);
return Err(err);
} else {
- MetaItemKind::NameValue(lit.clone())
+ MetaItemKind::NameValue(lit)
}
} else {
// The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
@@ -76,7 +79,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
return Err(err);
}
}
- MacArgs::Eq(_, MacArgsEq::Hir(lit)) => MetaItemKind::NameValue(lit.clone()),
+ AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => MetaItemKind::NameValue(lit.clone()),
},
})
}
@@ -112,25 +115,34 @@ pub fn check_builtin_attribute(
name: Symbol,
template: AttributeTemplate,
) {
- // Some special attributes like `cfg` must be checked
- // before the generic check, so we skip them here.
- let should_skip = |name| name == sym::cfg;
-
match parse_meta(sess, attr) {
- Ok(meta) => {
- if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
- emit_malformed_attribute(sess, attr, name, template);
- }
- }
+ Ok(meta) => check_builtin_meta_item(sess, &meta, attr.style, name, template),
Err(mut err) => {
err.emit();
}
}
}
+pub fn check_builtin_meta_item(
+ sess: &ParseSess,
+ meta: &MetaItem,
+ style: ast::AttrStyle,
+ name: Symbol,
+ template: AttributeTemplate,
+) {
+ // Some special attributes like `cfg` must be checked
+ // before the generic check, so we skip them here.
+ let should_skip = |name| name == sym::cfg;
+
+ if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
+ emit_malformed_attribute(sess, style, meta.span, name, template);
+ }
+}
+
fn emit_malformed_attribute(
sess: &ParseSess,
- attr: &Attribute,
+ style: ast::AttrStyle,
+ span: Span,
name: Symbol,
template: AttributeTemplate,
) {
@@ -144,7 +156,7 @@ fn emit_malformed_attribute(
let mut msg = "attribute must be of the form ".to_owned();
let mut suggestions = vec![];
let mut first = true;
- let inner = if attr.style == ast::AttrStyle::Inner { "!" } else { "" };
+ let inner = if style == ast::AttrStyle::Inner { "!" } else { "" };
if template.word {
first = false;
let code = format!("#{}[{}]", inner, name);
@@ -169,12 +181,12 @@ fn emit_malformed_attribute(
suggestions.push(code);
}
if should_warn(name) {
- sess.buffer_lint(&ILL_FORMED_ATTRIBUTE_INPUT, attr.span, ast::CRATE_NODE_ID, &msg);
+ sess.buffer_lint(&ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, &msg);
} else {
sess.span_diagnostic
- .struct_span_err(attr.span, &error_msg)
+ .struct_span_err(span, &error_msg)
.span_suggestions(
- attr.span,
+ span,
if suggestions.len() == 1 {
"must be of the form"
} else {
@@ -193,7 +205,7 @@ pub fn emit_fatal_malformed_builtin_attribute(
name: Symbol,
) -> ! {
let template = BUILTIN_ATTRIBUTE_MAP.get(&name).expect("builtin attr defined").template;
- emit_malformed_attribute(sess, attr, name, template);
+ emit_malformed_attribute(sess, attr.style, attr.span, name, template);
// This is fatal, otherwise it will likely cause a cascade of other errors
// (and an error here is expected to be very rare).
FatalError.raise()
diff --git a/compiler/rustc_parse_format/src/lib.rs b/compiler/rustc_parse_format/src/lib.rs
index 1394993ab..0113eb4e3 100644
--- a/compiler/rustc_parse_format/src/lib.rs
+++ b/compiler/rustc_parse_format/src/lib.rs
@@ -818,85 +818,94 @@ fn find_skips_from_snippet(
_ => return (vec![], false),
};
- fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> {
- let mut s = snippet.char_indices().peekable();
- let mut skips = vec![];
- while let Some((pos, c)) = s.next() {
- match (c, s.peek()) {
- // skip whitespace and empty lines ending in '\\'
- ('\\', Some((next_pos, '\n'))) if !is_raw => {
- skips.push(pos);
- skips.push(*next_pos);
- let _ = s.next();
+ if str_style.is_some() {
+ return (vec![], true);
+ }
- while let Some((pos, c)) = s.peek() {
- if matches!(c, ' ' | '\n' | '\t') {
- skips.push(*pos);
- let _ = s.next();
- } else {
- break;
- }
- }
- }
- ('\\', Some((next_pos, 'n' | 't' | 'r' | '0' | '\\' | '\'' | '\"'))) => {
- skips.push(*next_pos);
- let _ = s.next();
- }
- ('\\', Some((_, 'x'))) if !is_raw => {
- for _ in 0..3 {
- // consume `\xAB` literal
- if let Some((pos, _)) = s.next() {
- skips.push(pos);
- } else {
- break;
- }
+ let snippet = &snippet[1..snippet.len() - 1];
+
+ let mut s = snippet.char_indices();
+ let mut skips = vec![];
+ while let Some((pos, c)) = s.next() {
+ match (c, s.clone().next()) {
+ // skip whitespace and empty lines ending in '\\'
+ ('\\', Some((next_pos, '\n'))) => {
+ skips.push(pos);
+ skips.push(next_pos);
+ let _ = s.next();
+
+ while let Some((pos, c)) = s.clone().next() {
+ if matches!(c, ' ' | '\n' | '\t') {
+ skips.push(pos);
+ let _ = s.next();
+ } else {
+ break;
}
}
- ('\\', Some((_, 'u'))) if !is_raw => {
+ }
+ ('\\', Some((next_pos, 'n' | 't' | 'r' | '0' | '\\' | '\'' | '\"'))) => {
+ skips.push(next_pos);
+ let _ = s.next();
+ }
+ ('\\', Some((_, 'x'))) => {
+ for _ in 0..3 {
+ // consume `\xAB` literal
if let Some((pos, _)) = s.next() {
skips.push(pos);
+ } else {
+ break;
}
- if let Some((next_pos, next_c)) = s.next() {
- if next_c == '{' {
- skips.push(next_pos);
- let mut i = 0; // consume up to 6 hexanumeric chars + closing `}`
- while let (Some((next_pos, c)), true) = (s.next(), i < 7) {
- if c.is_digit(16) {
- skips.push(next_pos);
- } else if c == '}' {
- skips.push(next_pos);
- break;
- } else {
- break;
- }
- i += 1;
- }
- } else if next_c.is_digit(16) {
- skips.push(next_pos);
- // We suggest adding `{` and `}` when appropriate, accept it here as if
- // it were correct
- let mut i = 0; // consume up to 6 hexanumeric chars
- while let (Some((next_pos, c)), _) = (s.next(), i < 6) {
- if c.is_digit(16) {
- skips.push(next_pos);
- } else {
- break;
- }
- i += 1;
+ }
+ }
+ ('\\', Some((_, 'u'))) => {
+ if let Some((pos, _)) = s.next() {
+ skips.push(pos);
+ }
+ if let Some((next_pos, next_c)) = s.next() {
+ if next_c == '{' {
+ // consume up to 6 hexanumeric chars
+ let digits_len =
+ s.clone().take(6).take_while(|(_, c)| c.is_digit(16)).count();
+
+ let len_utf8 = s
+ .as_str()
+ .get(..digits_len)
+ .and_then(|digits| u32::from_str_radix(digits, 16).ok())
+ .and_then(char::from_u32)
+ .map_or(1, char::len_utf8);
+
+ // Skip the digits, for chars that encode to more than 1 utf-8 byte
+ // exclude as many digits as it is greater than 1 byte
+ //
+ // So for a 3 byte character, exclude 2 digits
+ let required_skips = digits_len.saturating_sub(len_utf8.saturating_sub(1));
+
+ // skip '{' and '}' also
+ for pos in (next_pos..).take(required_skips + 2) {
+ skips.push(pos)
+ }
+
+ s.nth(digits_len);
+ } else if next_c.is_digit(16) {
+ skips.push(next_pos);
+ // We suggest adding `{` and `}` when appropriate, accept it here as if
+ // it were correct
+ let mut i = 0; // consume up to 6 hexanumeric chars
+ while let (Some((next_pos, c)), _) = (s.next(), i < 6) {
+ if c.is_digit(16) {
+ skips.push(next_pos);
+ } else {
+ break;
}
+ i += 1;
}
}
}
- _ => {}
}
+ _ => {}
}
- skips
}
-
- let r_start = str_style.map_or(0, |r| r + 1);
- let r_end = str_style.unwrap_or(0);
- let s = &snippet[r_start + 1..snippet.len() - r_end - 1];
- (find_skips(s, str_style.is_some()), true)
+ (skips, true)
}
#[cfg(test)]
diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs
index 27a57adf9..2e2874dbc 100644
--- a/compiler/rustc_passes/src/check_attr.rs
+++ b/compiler/rustc_passes/src/check_attr.rs
@@ -8,7 +8,7 @@ use crate::errors::{
self, AttrApplication, DebugVisualizerUnreadable, InvalidAttrAtCrateLevel, ObjectLifetimeErr,
OnlyHasEffectOn, TransparentIncompatible, UnrecognizedReprHint,
};
-use rustc_ast::{ast, AttrStyle, Attribute, Lit, LitKind, MetaItemKind, NestedMetaItem};
+use rustc_ast::{ast, AttrStyle, Attribute, LitKind, MetaItemKind, MetaItemLit, NestedMetaItem};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{fluent, Applicability, MultiSpan};
use rustc_expand::base::resolve_path;
@@ -119,13 +119,13 @@ impl CheckAttrVisitor<'_> {
}
sym::naked => self.check_naked(hir_id, attr, span, target),
sym::rustc_legacy_const_generics => {
- self.check_rustc_legacy_const_generics(&attr, span, target, item)
+ self.check_rustc_legacy_const_generics(hir_id, &attr, span, target, item)
}
sym::rustc_lint_query_instability => {
- self.check_rustc_lint_query_instability(&attr, span, target)
+ self.check_rustc_lint_query_instability(hir_id, &attr, span, target)
}
sym::rustc_lint_diagnostics => {
- self.check_rustc_lint_diagnostics(&attr, span, target)
+ self.check_rustc_lint_diagnostics(hir_id, &attr, span, target)
}
sym::rustc_lint_opt_ty => self.check_rustc_lint_opt_ty(&attr, span, target),
sym::rustc_lint_opt_deny_field_access => {
@@ -135,11 +135,13 @@ impl CheckAttrVisitor<'_> {
| sym::rustc_dirty
| sym::rustc_if_this_changed
| sym::rustc_then_this_would_need => self.check_rustc_dirty_clean(&attr),
- sym::cmse_nonsecure_entry => self.check_cmse_nonsecure_entry(attr, span, target),
+ sym::cmse_nonsecure_entry => {
+ self.check_cmse_nonsecure_entry(hir_id, attr, span, target)
+ }
sym::collapse_debuginfo => self.check_collapse_debuginfo(attr, span, target),
sym::const_trait => self.check_const_trait(attr, span, target),
sym::must_not_suspend => self.check_must_not_suspend(&attr, span, target),
- sym::must_use => self.check_must_use(hir_id, &attr, span, target),
+ sym::must_use => self.check_must_use(hir_id, &attr, target),
sym::rustc_pass_by_value => self.check_pass_by_value(&attr, span, target),
sym::rustc_allow_incoherent_impl => {
self.check_allow_incoherent_impl(&attr, span, target)
@@ -217,18 +219,6 @@ impl CheckAttrVisitor<'_> {
return;
}
- // FIXME(@lcnr): this doesn't belong here.
- if matches!(
- target,
- Target::Closure
- | Target::Fn
- | Target::Method(_)
- | Target::ForeignFn
- | Target::ForeignStatic
- ) {
- self.tcx.ensure().codegen_fn_attrs(self.tcx.hir().local_def_id(hir_id));
- }
-
self.check_repr(attrs, span, target, item, hir_id);
self.check_used(attrs, target);
}
@@ -363,7 +353,7 @@ impl CheckAttrVisitor<'_> {
attr.span,
OnlyHasEffectOn {
attr_name: attr.name_or_empty(),
- target_name: allowed_target.name().replace(" ", "_"),
+ target_name: allowed_target.name().replace(' ', "_"),
},
);
}
@@ -386,6 +376,7 @@ impl CheckAttrVisitor<'_> {
self.tcx.sess.emit_err(errors::AttrShouldBeAppliedToFn {
attr_span: attr.span,
defn_span: span,
+ on_crate: hir_id == CRATE_HIR_ID,
});
false
}
@@ -393,7 +384,13 @@ impl CheckAttrVisitor<'_> {
}
/// Checks if `#[cmse_nonsecure_entry]` is applied to a function definition.
- fn check_cmse_nonsecure_entry(&self, attr: &Attribute, span: Span, target: Target) -> bool {
+ fn check_cmse_nonsecure_entry(
+ &self,
+ hir_id: HirId,
+ attr: &Attribute,
+ span: Span,
+ target: Target,
+ ) -> bool {
match target {
Target::Fn
| Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => true,
@@ -401,6 +398,7 @@ impl CheckAttrVisitor<'_> {
self.tcx.sess.emit_err(errors::AttrShouldBeAppliedToFn {
attr_span: attr.span,
defn_span: span,
+ on_crate: hir_id == CRATE_HIR_ID,
});
false
}
@@ -413,8 +411,7 @@ impl CheckAttrVisitor<'_> {
if let Some(generics) = tcx.hir().get_generics(tcx.hir().local_def_id(hir_id)) {
for p in generics.params {
let hir::GenericParamKind::Type { .. } = p.kind else { continue };
- let param_id = tcx.hir().local_def_id(p.hir_id);
- let default = tcx.object_lifetime_default(param_id);
+ let default = tcx.object_lifetime_default(p.def_id);
let repr = match default {
ObjectLifetimeDefault::Empty => "BaseDefault".to_owned(),
ObjectLifetimeDefault::Static => "'static".to_owned(),
@@ -465,9 +462,11 @@ impl CheckAttrVisitor<'_> {
true
}
_ => {
- self.tcx
- .sess
- .emit_err(errors::TrackedCallerWrongLocation { attr_span, defn_span: span });
+ self.tcx.sess.emit_err(errors::TrackedCallerWrongLocation {
+ attr_span,
+ defn_span: span,
+ on_crate: hir_id == CRATE_HIR_ID,
+ });
false
}
}
@@ -576,6 +575,7 @@ impl CheckAttrVisitor<'_> {
self.tcx.sess.emit_err(errors::AttrShouldBeAppliedToFn {
attr_span: attr.span,
defn_span: span,
+ on_crate: hir_id == CRATE_HIR_ID,
});
false
}
@@ -715,7 +715,7 @@ impl CheckAttrVisitor<'_> {
if let Some(values) = meta.meta_item_list() {
let mut errors = 0;
for v in values {
- match v.literal() {
+ match v.lit() {
Some(l) => match l.kind {
LitKind::Str(s, _) => {
if !self.check_doc_alias_value(v, s, hir_id, target, true, aliases) {
@@ -1163,17 +1163,7 @@ impl CheckAttrVisitor<'_> {
}
/// Warns against some misuses of `#[must_use]`
- fn check_must_use(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) -> bool {
- let node = self.tcx.hir().get(hir_id);
- if let Some(kind) = node.fn_kind() && let rustc_hir::IsAsync::Async = kind.asyncness() {
- self.tcx.emit_spanned_lint(
- UNUSED_ATTRIBUTES,
- hir_id,
- attr.span,
- errors::MustUseAsync { span }
- );
- }
-
+ fn check_must_use(&self, hir_id: HirId, attr: &Attribute, target: Target) -> bool {
if !matches!(
target,
Target::Fn
@@ -1240,7 +1230,7 @@ impl CheckAttrVisitor<'_> {
UNUSED_ATTRIBUTES,
hir_id,
attr.span,
- errors::Cold { span },
+ errors::Cold { span, on_crate: hir_id == CRATE_HIR_ID },
);
}
}
@@ -1365,7 +1355,7 @@ impl CheckAttrVisitor<'_> {
return false;
};
- if matches!(&list[..], &[NestedMetaItem::Literal(Lit { kind: LitKind::Int(..), .. })]) {
+ if matches!(&list[..], &[NestedMetaItem::Lit(MetaItemLit { kind: LitKind::Int(..), .. })]) {
true
} else {
self.tcx.sess.emit_err(errors::RustcLayoutScalarValidRangeArg { attr_span: attr.span });
@@ -1376,6 +1366,7 @@ impl CheckAttrVisitor<'_> {
/// Checks if `#[rustc_legacy_const_generics]` is applied to a function and has a valid argument.
fn check_rustc_legacy_const_generics(
&self,
+ hir_id: HirId,
attr: &Attribute,
span: Span,
target: Target,
@@ -1386,6 +1377,7 @@ impl CheckAttrVisitor<'_> {
self.tcx.sess.emit_err(errors::AttrShouldBeAppliedToFn {
attr_span: attr.span,
defn_span: span,
+ on_crate: hir_id == CRATE_HIR_ID,
});
return false;
}
@@ -1426,7 +1418,7 @@ impl CheckAttrVisitor<'_> {
let arg_count = decl.inputs.len() as u128 + generics.params.len() as u128;
let mut invalid_args = vec![];
for meta in list {
- if let Some(LitKind::Int(val, _)) = meta.literal().map(|lit| &lit.kind) {
+ if let Some(LitKind::Int(val, _)) = meta.lit().map(|lit| &lit.kind) {
if *val >= arg_count {
let span = meta.span();
self.tcx.sess.emit_err(errors::RustcLegacyConstGenericsIndexExceed {
@@ -1450,12 +1442,19 @@ impl CheckAttrVisitor<'_> {
/// Helper function for checking that the provided attribute is only applied to a function or
/// method.
- fn check_applied_to_fn_or_method(&self, attr: &Attribute, span: Span, target: Target) -> bool {
+ fn check_applied_to_fn_or_method(
+ &self,
+ hir_id: HirId,
+ attr: &Attribute,
+ span: Span,
+ target: Target,
+ ) -> bool {
let is_function = matches!(target, Target::Fn | Target::Method(..));
if !is_function {
self.tcx.sess.emit_err(errors::AttrShouldBeAppliedToFn {
attr_span: attr.span,
defn_span: span,
+ on_crate: hir_id == CRATE_HIR_ID,
});
false
} else {
@@ -1467,17 +1466,24 @@ impl CheckAttrVisitor<'_> {
/// or method.
fn check_rustc_lint_query_instability(
&self,
+ hir_id: HirId,
attr: &Attribute,
span: Span,
target: Target,
) -> bool {
- self.check_applied_to_fn_or_method(attr, span, target)
+ self.check_applied_to_fn_or_method(hir_id, attr, span, target)
}
/// Checks that the `#[rustc_lint_diagnostics]` attribute is only applied to a function or
/// method.
- fn check_rustc_lint_diagnostics(&self, attr: &Attribute, span: Span, target: Target) -> bool {
- self.check_applied_to_fn_or_method(attr, span, target)
+ fn check_rustc_lint_diagnostics(
+ &self,
+ hir_id: HirId,
+ attr: &Attribute,
+ span: Span,
+ target: Target,
+ ) -> bool {
+ self.check_applied_to_fn_or_method(hir_id, attr, span, target)
}
/// Checks that the `#[rustc_lint_opt_ty]` attribute is only applied to a struct.
@@ -2131,7 +2137,7 @@ impl<'tcx> Visitor<'tcx> for CheckAttrVisitor<'tcx> {
}
fn visit_variant(&mut self, variant: &'tcx hir::Variant<'tcx>) {
- self.check_attributes(variant.id, variant.span, Target::Variant, None);
+ self.check_attributes(variant.hir_id, variant.span, Target::Variant, None);
intravisit::walk_variant(self, variant)
}
diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs
index 753d01f46..a71ae717a 100644
--- a/compiler/rustc_passes/src/dead.rs
+++ b/compiler/rustc_passes/src/dead.rs
@@ -4,7 +4,7 @@
use itertools::Itertools;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_errors::{pluralize, Applicability, MultiSpan};
+use rustc_errors::MultiSpan;
use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
@@ -18,7 +18,10 @@ use rustc_session::lint;
use rustc_span::symbol::{sym, Symbol};
use std::mem;
-use crate::errors::UselessAssignment;
+use crate::errors::{
+ ChangeFieldsToBeOfUnitType, IgnoredDerivedImpls, MultipleDeadCodes, ParentInfo,
+ UselessAssignment,
+};
// Any local node that may call something in its body block should be
// explored. For example, if it's a live Node::Item that is a
@@ -121,7 +124,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> {
fn handle_field_access(&mut self, lhs: &hir::Expr<'_>, hir_id: hir::HirId) {
match self.typeck_results().expr_ty_adjusted(lhs).kind() {
ty::Adt(def, _) => {
- let index = self.tcx.field_index(hir_id, self.typeck_results());
+ let index = self.typeck_results().field_index(hir_id);
self.insert_def_id(def.non_enum_variant().fields[index].did);
}
ty::Tuple(..) => {}
@@ -205,7 +208,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> {
if let PatKind::Wild = pat.pat.kind {
continue;
}
- let index = self.tcx.field_index(pat.hir_id, self.typeck_results());
+ let index = self.typeck_results().field_index(pat.hir_id);
self.insert_def_id(variant.fields[index].did);
}
}
@@ -338,7 +341,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> {
fn mark_as_used_if_union(&mut self, adt: ty::AdtDef<'tcx>, fields: &[hir::ExprField<'_>]) {
if adt.is_union() && adt.non_enum_variant().fields.len() > 1 && adt.did().is_local() {
for field in fields {
- let index = self.tcx.field_index(field.hir_id, self.typeck_results());
+ let index = self.typeck_results().field_index(field.hir_id);
self.insert_def_id(adt.non_enum_variant().fields[index].did);
}
}
@@ -359,7 +362,7 @@ impl<'tcx> Visitor<'tcx> for MarkSymbolVisitor<'tcx> {
let has_repr_c = self.repr_has_repr_c;
let has_repr_simd = self.repr_has_repr_simd;
let live_fields = def.fields().iter().filter_map(|f| {
- let def_id = tcx.hir().local_def_id(f.hir_id);
+ let def_id = f.def_id;
if has_repr_c || (f.is_positional() && has_repr_simd) {
return Some(def_id);
}
@@ -430,7 +433,7 @@ impl<'tcx> Visitor<'tcx> for MarkSymbolVisitor<'tcx> {
self.in_pat = false;
}
- fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
+ fn visit_path(&mut self, path: &hir::Path<'tcx>, _: hir::HirId) {
self.handle_res(path.res);
intravisit::walk_path(self, path);
}
@@ -470,11 +473,6 @@ fn has_allow_dead_code_or_lang_attr_helper(
return true;
}
- // (To be) stable attribute for #[lang = "oom"]
- if tcx.sess.contains_name(attrs, sym::alloc_error_handler) {
- return true;
- }
-
let def_id = tcx.hir().local_def_id(id);
if tcx.def_kind(def_id).has_codegen_attrs() {
let cg_attrs = tcx.codegen_fn_attrs(def_id);
@@ -524,17 +522,13 @@ fn check_item<'tcx>(
DefKind::Enum => {
let item = tcx.hir().item(id);
if let hir::ItemKind::Enum(ref enum_def, _) = item.kind {
- let hir = tcx.hir();
if allow_dead_code {
- worklist.extend(
- enum_def.variants.iter().map(|variant| hir.local_def_id(variant.id)),
- );
+ worklist.extend(enum_def.variants.iter().map(|variant| variant.def_id));
}
for variant in enum_def.variants {
- if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
- struct_constructors
- .insert(hir.local_def_id(ctor_hir_id), hir.local_def_id(variant.id));
+ if let Some(ctor_def_id) = variant.data.ctor_def_id() {
+ struct_constructors.insert(ctor_def_id, variant.def_id);
}
}
}
@@ -698,99 +692,89 @@ impl<'tcx> DeadVisitor<'tcx> {
parent_item: Option<LocalDefId>,
is_positional: bool,
) {
- if let Some(&first_id) = dead_codes.first() {
- let tcx = self.tcx;
- let names: Vec<_> = dead_codes
- .iter()
- .map(|&def_id| tcx.item_name(def_id.to_def_id()).to_string())
- .collect();
- let spans: Vec<_> = dead_codes
- .iter()
- .map(|&def_id| match tcx.def_ident_span(def_id) {
- Some(s) => s.with_ctxt(tcx.def_span(def_id).ctxt()),
- None => tcx.def_span(def_id),
+ let Some(&first_id) = dead_codes.first() else {
+ return;
+ };
+ let tcx = self.tcx;
+ let names: Vec<_> =
+ dead_codes.iter().map(|&def_id| tcx.item_name(def_id.to_def_id())).collect();
+ let spans: Vec<_> = dead_codes
+ .iter()
+ .map(|&def_id| match tcx.def_ident_span(def_id) {
+ Some(s) => s.with_ctxt(tcx.def_span(def_id).ctxt()),
+ None => tcx.def_span(def_id),
+ })
+ .collect();
+
+ let descr = tcx.def_kind(first_id).descr(first_id.to_def_id());
+ let num = dead_codes.len();
+ let multiple = num > 6;
+ let name_list = names.into();
+
+ let lint = if is_positional {
+ lint::builtin::UNUSED_TUPLE_STRUCT_FIELDS
+ } else {
+ lint::builtin::DEAD_CODE
+ };
+
+ let parent_info = if let Some(parent_item) = parent_item {
+ let parent_descr = tcx.def_kind(parent_item).descr(parent_item.to_def_id());
+ Some(ParentInfo {
+ num,
+ descr,
+ parent_descr,
+ span: tcx.def_ident_span(parent_item).unwrap(),
+ })
+ } else {
+ None
+ };
+
+ let encl_def_id = parent_item.unwrap_or(first_id);
+ let ignored_derived_impls =
+ if let Some(ign_traits) = self.ignored_derived_traits.get(&encl_def_id) {
+ let trait_list = ign_traits
+ .iter()
+ .map(|(trait_id, _)| self.tcx.item_name(*trait_id))
+ .collect::<Vec<_>>();
+ let trait_list_len = trait_list.len();
+ Some(IgnoredDerivedImpls {
+ name: self.tcx.item_name(encl_def_id.to_def_id()),
+ trait_list: trait_list.into(),
+ trait_list_len,
})
- .collect();
-
- let descr = tcx.def_kind(first_id).descr(first_id.to_def_id());
- let span_len = dead_codes.len();
- let names = match &names[..] {
- _ if span_len > 6 => String::new(),
- [name] => format!("`{name}` "),
- [names @ .., last] => {
- format!(
- "{} and `{last}` ",
- names.iter().map(|name| format!("`{name}`")).join(", ")
- )
- }
- [] => unreachable!(),
+ } else {
+ None
};
- let msg = format!(
- "{these}{descr}{s} {names}{are} never {participle}",
- these = if span_len > 6 { "multiple " } else { "" },
- s = pluralize!(span_len),
- are = pluralize!("is", span_len),
- );
-
- tcx.struct_span_lint_hir(
- if is_positional {
- lint::builtin::UNUSED_TUPLE_STRUCT_FIELDS
- } else {
- lint::builtin::DEAD_CODE
- },
- tcx.hir().local_def_id_to_hir_id(first_id),
- MultiSpan::from_spans(spans.clone()),
- msg,
- |err| {
- if is_positional {
- err.multipart_suggestion(
- &format!(
- "consider changing the field{s} to be of unit type to \
- suppress this warning while preserving the field \
- numbering, or remove the field{s}",
- s = pluralize!(span_len)
- ),
- spans.iter().map(|sp| (*sp, "()".to_string())).collect(),
- // "HasPlaceholders" because applying this fix by itself isn't
- // enough: All constructor calls have to be adjusted as well
- Applicability::HasPlaceholders,
- );
- }
- if let Some(parent_item) = parent_item {
- let parent_descr = tcx.def_kind(parent_item).descr(parent_item.to_def_id());
- err.span_label(
- tcx.def_ident_span(parent_item).unwrap(),
- format!("{descr}{s} in this {parent_descr}", s = pluralize!(span_len)),
- );
- }
+ let diag = if is_positional {
+ MultipleDeadCodes::UnusedTupleStructFields {
+ multiple,
+ num,
+ descr,
+ participle,
+ name_list,
+ change_fields_suggestion: ChangeFieldsToBeOfUnitType { num, spans: spans.clone() },
+ parent_info,
+ ignored_derived_impls,
+ }
+ } else {
+ MultipleDeadCodes::DeadCodes {
+ multiple,
+ num,
+ descr,
+ participle,
+ name_list,
+ parent_info,
+ ignored_derived_impls,
+ }
+ };
- let encl_def_id = parent_item.unwrap_or(first_id);
- if let Some(ign_traits) = self.ignored_derived_traits.get(&encl_def_id) {
- let traits_str = ign_traits
- .iter()
- .map(|(trait_id, _)| format!("`{}`", self.tcx.item_name(*trait_id)))
- .collect::<Vec<_>>()
- .join(" and ");
- let plural_s = pluralize!(ign_traits.len());
- let article = if ign_traits.len() > 1 { "" } else { "a " };
- let is_are = if ign_traits.len() > 1 { "these are" } else { "this is" };
- let msg = format!(
- "`{}` has {}derived impl{} for the trait{} {}, but {} \
- intentionally ignored during dead code analysis",
- self.tcx.item_name(encl_def_id.to_def_id()),
- article,
- plural_s,
- plural_s,
- traits_str,
- is_are
- );
- err.note(&msg);
- }
- err
- },
- );
- }
+ self.tcx.emit_spanned_lint(
+ lint,
+ tcx.hir().local_def_id_to_hir_id(first_id),
+ MultiSpan::from_spans(spans),
+ diag,
+ );
}
fn warn_dead_fields_and_variants(
diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs
index adaaf5392..c6cd69add 100644
--- a/compiler/rustc_passes/src/errors.rs
+++ b/compiler/rustc_passes/src/errors.rs
@@ -4,7 +4,9 @@ use std::{
};
use rustc_ast::Label;
-use rustc_errors::{error_code, Applicability, ErrorGuaranteed, IntoDiagnostic, MultiSpan};
+use rustc_errors::{
+ error_code, Applicability, DiagnosticSymbolList, ErrorGuaranteed, IntoDiagnostic, MultiSpan,
+};
use rustc_hir::{self as hir, ExprKind, Target};
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
use rustc_middle::ty::{MainDefinition, Ty};
@@ -79,6 +81,7 @@ pub struct AttrShouldBeAppliedToFn {
pub attr_span: Span,
#[label]
pub defn_span: Span,
+ pub on_crate: bool,
}
#[derive(Diagnostic)]
@@ -95,6 +98,7 @@ pub struct TrackedCallerWrongLocation {
pub attr_span: Span,
#[label]
pub defn_span: Span,
+ pub on_crate: bool,
}
#[derive(Diagnostic)]
@@ -291,7 +295,7 @@ pub struct DocTestUnknownAny {
#[note(no_op_note)]
pub struct DocTestUnknownSpotlight {
pub path: String,
- #[suggestion_short(applicability = "machine-applicable", code = "notable_trait")]
+ #[suggestion(style = "short", applicability = "machine-applicable", code = "notable_trait")]
pub span: Span,
}
@@ -365,6 +369,7 @@ pub struct MustNotSuspend {
pub struct Cold {
#[label]
pub span: Span,
+ pub on_crate: bool,
}
#[derive(LintDiagnostic)]
@@ -702,14 +707,6 @@ pub struct UnknownExternLangItem {
pub struct MissingPanicHandler;
#[derive(Diagnostic)]
-#[diag(passes_alloc_func_required)]
-pub struct AllocFuncRequired;
-
-#[derive(Diagnostic)]
-#[diag(passes_missing_alloc_error_handler)]
-pub struct MissingAllocErrorHandler;
-
-#[derive(Diagnostic)]
#[diag(passes_missing_lang_item)]
#[note]
#[help]
@@ -744,6 +741,7 @@ pub struct InvalidAttrAtCrateLevel {
}
impl IntoDiagnostic<'_> for InvalidAttrAtCrateLevel {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'_ rustc_errors::Handler,
@@ -876,6 +874,7 @@ pub struct BreakNonLoop<'a> {
}
impl<'a> IntoDiagnostic<'_> for BreakNonLoop<'a> {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &rustc_errors::Handler,
@@ -960,6 +959,7 @@ pub struct OutsideLoop<'a> {
#[label]
pub span: Span,
pub name: &'a str,
+ pub is_break: bool,
}
#[derive(Diagnostic)]
@@ -1013,6 +1013,7 @@ pub struct NakedFunctionsAsmBlock {
}
impl IntoDiagnostic<'_> for NakedFunctionsAsmBlock {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &rustc_errors::Handler,
@@ -1136,6 +1137,7 @@ pub struct NoMainErr {
}
impl<'a> IntoDiagnostic<'a> for NoMainErr {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &'a rustc_errors::Handler,
@@ -1196,6 +1198,7 @@ pub struct DuplicateLangItem {
}
impl IntoDiagnostic<'_> for DuplicateLangItem {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &rustc_errors::Handler,
@@ -1449,3 +1452,59 @@ pub struct MissingConstErr {
#[label]
pub const_span: Span,
}
+
+#[derive(LintDiagnostic)]
+pub enum MultipleDeadCodes<'tcx> {
+ #[diag(passes_dead_codes)]
+ DeadCodes {
+ multiple: bool,
+ num: usize,
+ descr: &'tcx str,
+ participle: &'tcx str,
+ name_list: DiagnosticSymbolList,
+ #[subdiagnostic]
+ parent_info: Option<ParentInfo<'tcx>>,
+ #[subdiagnostic]
+ ignored_derived_impls: Option<IgnoredDerivedImpls>,
+ },
+ #[diag(passes_dead_codes)]
+ UnusedTupleStructFields {
+ multiple: bool,
+ num: usize,
+ descr: &'tcx str,
+ participle: &'tcx str,
+ name_list: DiagnosticSymbolList,
+ #[subdiagnostic]
+ change_fields_suggestion: ChangeFieldsToBeOfUnitType,
+ #[subdiagnostic]
+ parent_info: Option<ParentInfo<'tcx>>,
+ #[subdiagnostic]
+ ignored_derived_impls: Option<IgnoredDerivedImpls>,
+ },
+}
+
+#[derive(Subdiagnostic)]
+#[label(passes_parent_info)]
+pub struct ParentInfo<'tcx> {
+ pub num: usize,
+ pub descr: &'tcx str,
+ pub parent_descr: &'tcx str,
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[note(passes_ignored_derived_impls)]
+pub struct IgnoredDerivedImpls {
+ pub name: Symbol,
+ pub trait_list: DiagnosticSymbolList,
+ pub trait_list_len: usize,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(passes_change_fields_to_be_of_unit_type, applicability = "has-placeholders")]
+pub struct ChangeFieldsToBeOfUnitType {
+ pub num: usize,
+ #[suggestion_part(code = "()")]
+ pub spans: Vec<Span>,
+}
diff --git a/compiler/rustc_passes/src/hir_id_validator.rs b/compiler/rustc_passes/src/hir_id_validator.rs
index 88bb39deb..d143adb2e 100644
--- a/compiler/rustc_passes/src/hir_id_validator.rs
+++ b/compiler/rustc_passes/src/hir_id_validator.rs
@@ -1,11 +1,11 @@
use rustc_data_structures::sync::Lock;
use rustc_hir as hir;
+use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit;
use rustc_hir::{HirId, ItemLocalId};
use rustc_index::bit_set::GrowableBitSet;
-use rustc_middle::hir::map::Map;
use rustc_middle::hir::nested_filter;
-use rustc_middle::ty::TyCtxt;
+use rustc_middle::ty::{DefIdTree, TyCtxt};
pub fn check_crate(tcx: TyCtxt<'_>) {
tcx.dep_graph.assert_ignored();
@@ -17,11 +17,10 @@ pub fn check_crate(tcx: TyCtxt<'_>) {
#[cfg(debug_assertions)]
{
let errors = Lock::new(Vec::new());
- let hir_map = tcx.hir();
- hir_map.par_for_each_module(|module_id| {
+ tcx.hir().par_for_each_module(|module_id| {
let mut v = HirIdValidator {
- hir_map,
+ tcx,
owner: None,
hir_ids_seen: Default::default(),
errors: &errors,
@@ -40,20 +39,15 @@ pub fn check_crate(tcx: TyCtxt<'_>) {
}
struct HirIdValidator<'a, 'hir> {
- hir_map: Map<'hir>,
+ tcx: TyCtxt<'hir>,
owner: Option<hir::OwnerId>,
hir_ids_seen: GrowableBitSet<ItemLocalId>,
errors: &'a Lock<Vec<String>>,
}
impl<'a, 'hir> HirIdValidator<'a, 'hir> {
- fn new_visitor(&self, hir_map: Map<'hir>) -> HirIdValidator<'a, 'hir> {
- HirIdValidator {
- hir_map,
- owner: None,
- hir_ids_seen: Default::default(),
- errors: self.errors,
- }
+ fn new_visitor(&self, tcx: TyCtxt<'hir>) -> HirIdValidator<'a, 'hir> {
+ HirIdValidator { tcx, owner: None, hir_ids_seen: Default::default(), errors: self.errors }
}
#[cold]
@@ -96,36 +90,69 @@ impl<'a, 'hir> HirIdValidator<'a, 'hir> {
missing_items.push(format!(
"[local_id: {}, owner: {}]",
local_id,
- self.hir_map.def_path(owner.def_id).to_string_no_crate_verbose()
+ self.tcx.hir().def_path(owner.def_id).to_string_no_crate_verbose()
));
}
self.error(|| {
format!(
"ItemLocalIds not assigned densely in {}. \
Max ItemLocalId = {}, missing IDs = {:#?}; seens IDs = {:#?}",
- self.hir_map.def_path(owner.def_id).to_string_no_crate_verbose(),
+ self.tcx.hir().def_path(owner.def_id).to_string_no_crate_verbose(),
max,
missing_items,
self.hir_ids_seen
.iter()
.map(|local_id| HirId { owner, local_id })
- .map(|h| format!("({:?} {})", h, self.hir_map.node_to_string(h)))
+ .map(|h| format!("({:?} {})", h, self.tcx.hir().node_to_string(h)))
.collect::<Vec<_>>()
)
});
}
}
+
+ fn check_nested_id(&mut self, id: LocalDefId) {
+ let Some(owner) = self.owner else { return };
+ let def_parent = self.tcx.local_parent(id);
+ let def_parent_hir_id = self.tcx.local_def_id_to_hir_id(def_parent);
+ if def_parent_hir_id.owner != owner {
+ self.error(|| {
+ format!(
+ "inconsistent Def parent at `{:?}` for `{:?}`:\nexpected={:?}\nfound={:?}",
+ self.tcx.def_span(id),
+ id,
+ owner,
+ def_parent_hir_id
+ )
+ });
+ }
+ }
}
impl<'a, 'hir> intravisit::Visitor<'hir> for HirIdValidator<'a, 'hir> {
type NestedFilter = nested_filter::OnlyBodies;
fn nested_visit_map(&mut self) -> Self::Map {
- self.hir_map
+ self.tcx.hir()
+ }
+
+ fn visit_nested_item(&mut self, id: hir::ItemId) {
+ self.check_nested_id(id.owner_id.def_id);
+ }
+
+ fn visit_nested_trait_item(&mut self, id: hir::TraitItemId) {
+ self.check_nested_id(id.owner_id.def_id);
+ }
+
+ fn visit_nested_impl_item(&mut self, id: hir::ImplItemId) {
+ self.check_nested_id(id.owner_id.def_id);
+ }
+
+ fn visit_nested_foreign_item(&mut self, id: hir::ForeignItemId) {
+ self.check_nested_id(id.owner_id.def_id);
}
fn visit_item(&mut self, i: &'hir hir::Item<'hir>) {
- let mut inner_visitor = self.new_visitor(self.hir_map);
+ let mut inner_visitor = self.new_visitor(self.tcx);
inner_visitor.check(i.owner_id, |this| intravisit::walk_item(this, i));
}
@@ -136,9 +163,9 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for HirIdValidator<'a, 'hir> {
self.error(|| {
format!(
"HirIdValidator: The recorded owner of {} is {} instead of {}",
- self.hir_map.node_to_string(hir_id),
- self.hir_map.def_path(hir_id.owner.def_id).to_string_no_crate_verbose(),
- self.hir_map.def_path(owner.def_id).to_string_no_crate_verbose()
+ self.tcx.hir().node_to_string(hir_id),
+ self.tcx.hir().def_path(hir_id.owner.def_id).to_string_no_crate_verbose(),
+ self.tcx.hir().def_path(owner.def_id).to_string_no_crate_verbose()
)
});
}
@@ -147,17 +174,17 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for HirIdValidator<'a, 'hir> {
}
fn visit_foreign_item(&mut self, i: &'hir hir::ForeignItem<'hir>) {
- let mut inner_visitor = self.new_visitor(self.hir_map);
+ let mut inner_visitor = self.new_visitor(self.tcx);
inner_visitor.check(i.owner_id, |this| intravisit::walk_foreign_item(this, i));
}
fn visit_trait_item(&mut self, i: &'hir hir::TraitItem<'hir>) {
- let mut inner_visitor = self.new_visitor(self.hir_map);
+ let mut inner_visitor = self.new_visitor(self.tcx);
inner_visitor.check(i.owner_id, |this| intravisit::walk_trait_item(this, i));
}
fn visit_impl_item(&mut self, i: &'hir hir::ImplItem<'hir>) {
- let mut inner_visitor = self.new_visitor(self.hir_map);
+ let mut inner_visitor = self.new_visitor(self.tcx);
inner_visitor.check(i.owner_id, |this| intravisit::walk_impl_item(this, i));
}
}
diff --git a/compiler/rustc_passes/src/hir_stats.rs b/compiler/rustc_passes/src/hir_stats.rs
index 33220fd2b..a7854cd49 100644
--- a/compiler/rustc_passes/src/hir_stats.rs
+++ b/compiler/rustc_passes/src/hir_stats.rs
@@ -369,7 +369,7 @@ impl<'v> hir_visit::Visitor<'v> for StatCollector<'v> {
hir_visit::walk_fn(self, fk, fd, b, id)
}
- fn visit_use(&mut self, p: &'v hir::Path<'v>, hir_id: hir::HirId) {
+ fn visit_use(&mut self, p: &'v hir::UsePath<'v>, hir_id: hir::HirId) {
// This is `visit_use`, but the type is `Path` so record it that way.
self.record("Path", Id::None, p);
hir_visit::walk_use(self, p, hir_id)
@@ -442,7 +442,7 @@ impl<'v> hir_visit::Visitor<'v> for StatCollector<'v> {
hir_visit::walk_lifetime(self, lifetime)
}
- fn visit_path(&mut self, path: &'v hir::Path<'v>, _id: hir::HirId) {
+ fn visit_path(&mut self, path: &hir::Path<'v>, _id: hir::HirId) {
self.record("Path", Id::None, path);
hir_visit::walk_path(self, path)
}
@@ -560,13 +560,14 @@ impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> {
}
fn visit_expr(&mut self, e: &'v ast::Expr) {
+ #[rustfmt::skip]
record_variants!(
(self, e, e.kind, Id::None, ast, Expr, ExprKind),
[
Box, Array, ConstBlock, Call, MethodCall, Tup, Binary, Unary, Lit, Cast, Type, Let,
If, While, ForLoop, Loop, Match, Closure, Block, Async, Await, TryBlock, Assign,
AssignOp, Field, Index, Range, Underscore, Path, AddrOf, Break, Continue, Ret,
- InlineAsm, MacCall, Struct, Repeat, Paren, Try, Yield, Yeet, Err
+ InlineAsm, MacCall, Struct, Repeat, Paren, Try, Yield, Yeet, IncludedBytes, Err
]
);
ast_visit::walk_expr(self, e)
diff --git a/compiler/rustc_passes/src/lang_items.rs b/compiler/rustc_passes/src/lang_items.rs
index df811be2a..99efed0b7 100644
--- a/compiler/rustc_passes/src/lang_items.rs
+++ b/compiler/rustc_passes/src/lang_items.rs
@@ -16,7 +16,7 @@ use crate::weak_lang_items;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
-use rustc_hir::lang_items::{extract, GenericRequirement, ITEM_REFS};
+use rustc_hir::lang_items::{extract, GenericRequirement};
use rustc_hir::{HirId, LangItem, LanguageItems, Target};
use rustc_middle::ty::TyCtxt;
use rustc_session::cstore::ExternCrate;
@@ -43,17 +43,17 @@ impl<'tcx> LanguageItemCollector<'tcx> {
fn check_for_lang(&mut self, actual_target: Target, hir_id: HirId) {
let attrs = self.tcx.hir().attrs(hir_id);
if let Some((name, span)) = extract(&attrs) {
- match ITEM_REFS.get(&name).cloned() {
+ match LangItem::from_name(name) {
// Known lang item with attribute on correct target.
- Some((item_index, expected_target)) if actual_target == expected_target => {
- self.collect_item_extended(item_index, hir_id, span);
+ Some(lang_item) if actual_target == lang_item.target() => {
+ self.collect_item_extended(lang_item, hir_id, span);
}
// Known lang item with attribute on incorrect target.
- Some((_, expected_target)) => {
+ Some(lang_item) => {
self.tcx.sess.emit_err(LangItemOnIncorrectTarget {
span,
name,
- expected_target,
+ expected_target: lang_item.target(),
actual_target,
});
}
@@ -65,12 +65,12 @@ impl<'tcx> LanguageItemCollector<'tcx> {
}
}
- fn collect_item(&mut self, item_index: usize, item_def_id: DefId) {
+ fn collect_item(&mut self, lang_item: LangItem, item_def_id: DefId) {
// Check for duplicates.
- if let Some(original_def_id) = self.items.items[item_index] {
+ if let Some(original_def_id) = self.items.get(lang_item) {
if original_def_id != item_def_id {
let local_span = self.tcx.hir().span_if_local(item_def_id);
- let lang_item_name = LangItem::from_u32(item_index as u32).unwrap().name();
+ let lang_item_name = lang_item.name();
let crate_name = self.tcx.crate_name(item_def_id.krate);
let mut dependency_of = Empty;
let is_local = item_def_id.is_local();
@@ -139,17 +139,13 @@ impl<'tcx> LanguageItemCollector<'tcx> {
}
// Matched.
- self.items.items[item_index] = Some(item_def_id);
- if let Some(group) = LangItem::from_u32(item_index as u32).unwrap().group() {
- self.items.groups[group as usize].push(item_def_id);
- }
+ self.items.set(lang_item, item_def_id);
}
// Like collect_item() above, but also checks whether the lang item is declared
// with the right number of generic arguments.
- fn collect_item_extended(&mut self, item_index: usize, hir_id: HirId, span: Span) {
+ fn collect_item_extended(&mut self, lang_item: LangItem, hir_id: HirId, span: Span) {
let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
- let lang_item = LangItem::from_u32(item_index as u32).unwrap();
let name = lang_item.name();
// Now check whether the lang_item has the expected number of generic
@@ -197,7 +193,7 @@ impl<'tcx> LanguageItemCollector<'tcx> {
}
}
- self.collect_item(item_index, item_def_id);
+ self.collect_item(lang_item, item_def_id);
}
}
@@ -208,8 +204,8 @@ fn get_lang_items(tcx: TyCtxt<'_>, (): ()) -> LanguageItems {
// Collect lang items in other crates.
for &cnum in tcx.crates(()).iter() {
- for &(def_id, item_index) in tcx.defined_lang_items(cnum).iter() {
- collector.collect_item(item_index, def_id);
+ for &(def_id, lang_item) in tcx.defined_lang_items(cnum).iter() {
+ collector.collect_item(lang_item, def_id);
}
}
@@ -223,7 +219,7 @@ fn get_lang_items(tcx: TyCtxt<'_>, (): ()) -> LanguageItems {
let item = tcx.hir().item(id);
if let hir::ItemKind::Enum(def, ..) = &item.kind {
for variant in def.variants {
- collector.check_for_lang(Target::Variant, variant.id);
+ collector.check_for_lang(Target::Variant, variant.hir_id);
}
}
}
diff --git a/compiler/rustc_passes/src/lib.rs b/compiler/rustc_passes/src/lib.rs
index 15f60f626..6e621b7eb 100644
--- a/compiler/rustc_passes/src/lib.rs
+++ b/compiler/rustc_passes/src/lib.rs
@@ -5,8 +5,6 @@
//! This API is completely unstable and subject to change.
#![allow(rustc::potential_query_instability)]
-#![deny(rustc::untranslatable_diagnostic)]
-#![deny(rustc::diagnostic_outside_of_impl)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(iter_intersperse)]
#![feature(let_chains)]
diff --git a/compiler/rustc_passes/src/liveness.rs b/compiler/rustc_passes/src/liveness.rs
index c6fe40f72..1f65cc8b6 100644
--- a/compiler/rustc_passes/src/liveness.rs
+++ b/compiler/rustc_passes/src/liveness.rs
@@ -87,6 +87,7 @@ use self::VarKind::*;
use rustc_ast::InlineAsmOptions;
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::Applicability;
+use rustc_errors::Diagnostic;
use rustc_hir as hir;
use rustc_hir::def::*;
use rustc_hir::def_id::{DefId, LocalDefId};
@@ -412,7 +413,7 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
}
intravisit::walk_expr(self, expr);
}
- hir::ExprKind::Closure { .. } => {
+ hir::ExprKind::Closure(closure) => {
// Interesting control flow (for loops can contain labeled
// breaks or continues)
self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
@@ -422,8 +423,7 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
// in better error messages than just pointing at the closure
// construction site.
let mut call_caps = Vec::new();
- let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id);
- if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
+ if let Some(upvars) = self.tcx.upvars_mentioned(closure.def_id) {
call_caps.extend(upvars.keys().map(|var_id| {
let upvar = upvars[var_id];
let upvar_ln = self.add_live_node(UpvarNode(upvar.span));
@@ -922,8 +922,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
// v v
// ( succ )
//
- let else_ln =
- self.propagate_through_opt_expr(else_opt.as_ref().map(|e| &**e), succ);
+ let else_ln = self.propagate_through_opt_expr(else_opt.as_deref(), succ);
let then_ln = self.propagate_through_expr(&then, succ);
let ln = self.live_node(expr.hir_id, expr.span);
self.init_from_succ(ln, else_ln);
@@ -966,7 +965,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
hir::ExprKind::Ret(ref o_e) => {
// Ignore succ and subst exit_ln.
- self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), self.exit_ln)
+ self.propagate_through_opt_expr(o_e.as_deref(), self.exit_ln)
}
hir::ExprKind::Break(label, ref opt_expr) => {
@@ -981,7 +980,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
// look it up in the break loop nodes table
match target {
- Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
+ Some(b) => self.propagate_through_opt_expr(opt_expr.as_deref(), b),
None => span_bug!(expr.span, "`break` to unknown label"),
}
}
@@ -1026,7 +1025,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
- let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
+ let succ = self.propagate_through_opt_expr(with_expr.as_deref(), succ);
fields
.iter()
.rev()
@@ -1284,20 +1283,19 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
fn check_is_ty_uninhabited(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
let ty = self.typeck_results.expr_ty(expr);
let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
- if self.ir.tcx.is_ty_uninhabited_from(m, ty, self.param_env) {
- match self.ir.lnks[succ] {
- LiveNodeKind::ExprNode(succ_span, succ_id) => {
- self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "expression");
- }
- LiveNodeKind::VarDefNode(succ_span, succ_id) => {
- self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "definition");
- }
- _ => {}
- };
- self.exit_ln
- } else {
- succ
+ if ty.is_inhabited_from(self.ir.tcx, m, self.param_env) {
+ return succ;
}
+ match self.ir.lnks[succ] {
+ LiveNodeKind::ExprNode(succ_span, succ_id) => {
+ self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "expression");
+ }
+ LiveNodeKind::VarDefNode(succ_span, succ_id) => {
+ self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "definition");
+ }
+ _ => {}
+ };
+ self.exit_ln
}
fn warn_about_unreachable(
@@ -1550,7 +1548,13 @@ impl<'tcx> Liveness<'_, 'tcx> {
.or_insert_with(|| (ln, var, vec![id_and_sp]));
});
- let can_remove = matches!(&pat.kind, hir::PatKind::Struct(_, _, true));
+ let can_remove = match pat.kind {
+ hir::PatKind::Struct(_, fields, true) => {
+ // if all fields are shorthand, remove the struct field, otherwise, mark with _ as prefix
+ fields.iter().all(|f| f.is_shorthand)
+ }
+ _ => false,
+ };
for (_, (ln, var, hir_ids_and_spans)) in vars {
if self.used_on_entry(ln, var) {
@@ -1690,7 +1694,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
&self,
name: &str,
opt_body: Option<&hir::Body<'_>>,
- err: &mut rustc_errors::DiagnosticBuilder<'_, ()>,
+ err: &mut Diagnostic,
) -> bool {
let mut has_litstring = false;
let Some(opt_body) = opt_body else {return false;};
diff --git a/compiler/rustc_passes/src/loops.rs b/compiler/rustc_passes/src/loops.rs
index 077194ec6..b4cf19e4a 100644
--- a/compiler/rustc_passes/src/loops.rs
+++ b/compiler/rustc_passes/src/loops.rs
@@ -193,7 +193,7 @@ impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
self.sess.emit_err(BreakInsideAsyncBlock { span, closure_span, name });
}
Normal | AnonConst => {
- self.sess.emit_err(OutsideLoop { span, name });
+ self.sess.emit_err(OutsideLoop { span, name, is_break: name == "break" });
}
}
}
diff --git a/compiler/rustc_passes/src/reachable.rs b/compiler/rustc_passes/src/reachable.rs
index 50070869a..e7c3c7128 100644
--- a/compiler/rustc_passes/src/reachable.rs
+++ b/compiler/rustc_passes/src/reachable.rs
@@ -391,11 +391,9 @@ fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> FxHashSet<LocalDefId> {
})
.collect::<Vec<_>>();
- for item in tcx.lang_items().items().iter() {
- if let Some(def_id) = *item {
- if let Some(def_id) = def_id.as_local() {
- reachable_context.worklist.push(def_id);
- }
+ for (_, def_id) in tcx.lang_items().iter() {
+ if let Some(def_id) = def_id.as_local() {
+ reachable_context.worklist.push(def_id);
}
}
{
diff --git a/compiler/rustc_passes/src/stability.rs b/compiler/rustc_passes/src/stability.rs
index 78afa2f25..da7155234 100644
--- a/compiler/rustc_passes/src/stability.rs
+++ b/compiler/rustc_passes/src/stability.rs
@@ -358,9 +358,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
const_stab_inherit = InheritConstStability::Yes;
}
hir::ItemKind::Struct(ref sd, _) => {
- if let Some(ctor_hir_id) = sd.ctor_hir_id() {
+ if let Some(ctor_def_id) = sd.ctor_def_id() {
self.annotate(
- self.tcx.hir().local_def_id(ctor_hir_id),
+ ctor_def_id,
i.span,
None,
AnnotationKind::Required,
@@ -435,7 +435,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
fn visit_variant(&mut self, var: &'tcx Variant<'tcx>) {
self.annotate(
- self.tcx.hir().local_def_id(var.id),
+ var.def_id,
var.span,
None,
AnnotationKind::Required,
@@ -443,9 +443,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
InheritConstStability::No,
InheritStability::Yes,
|v| {
- if let Some(ctor_hir_id) = var.data.ctor_hir_id() {
+ if let Some(ctor_def_id) = var.data.ctor_def_id() {
v.annotate(
- v.tcx.hir().local_def_id(ctor_hir_id),
+ ctor_def_id,
var.span,
None,
AnnotationKind::Required,
@@ -463,7 +463,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
fn visit_field_def(&mut self, s: &'tcx FieldDef<'tcx>) {
self.annotate(
- self.tcx.hir().local_def_id(s.hir_id),
+ s.def_id,
s.span,
None,
AnnotationKind::Required,
@@ -500,7 +500,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
};
self.annotate(
- self.tcx.hir().local_def_id(p.hir_id),
+ p.def_id,
p.span,
None,
kind,
@@ -536,6 +536,14 @@ impl<'tcx> MissingStabilityAnnotations<'tcx> {
return;
}
+ // if the const impl is derived using the `derive_const` attribute,
+ // then it would be "stable" at least for the impl.
+ // We gate usages of it using `feature(const_trait_impl)` anyways
+ // so there is no unstable leakage
+ if self.tcx.is_builtin_derive(def_id.to_def_id()) {
+ return;
+ }
+
let is_const = self.tcx.is_const_fn(def_id.to_def_id())
|| self.tcx.is_const_trait_impl_raw(def_id.to_def_id());
let is_stable = self
@@ -593,15 +601,15 @@ impl<'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'tcx> {
}
fn visit_variant(&mut self, var: &'tcx Variant<'tcx>) {
- self.check_missing_stability(self.tcx.hir().local_def_id(var.id), var.span);
- if let Some(ctor_hir_id) = var.data.ctor_hir_id() {
- self.check_missing_stability(self.tcx.hir().local_def_id(ctor_hir_id), var.span);
+ self.check_missing_stability(var.def_id, var.span);
+ if let Some(ctor_def_id) = var.data.ctor_def_id() {
+ self.check_missing_stability(ctor_def_id, var.span);
}
intravisit::walk_variant(self, var);
}
fn visit_field_def(&mut self, s: &'tcx FieldDef<'tcx>) {
- self.check_missing_stability(self.tcx.hir().local_def_id(s.hir_id), s.span);
+ self.check_missing_stability(s.def_id, s.span);
intravisit::walk_field_def(self, s);
}
@@ -779,7 +787,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'tcx> {
intravisit::walk_item(self, item);
}
- fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, id: hir::HirId) {
+ fn visit_path(&mut self, path: &hir::Path<'tcx>, id: hir::HirId) {
if let Some(def_id) = path.res.opt_def_id() {
let method_span = path.segments.last().map(|s| s.ident.span);
let item_is_allowed = self.tcx.check_stability_allow_unstable(
@@ -872,7 +880,7 @@ struct CheckTraitImplStable<'tcx> {
}
impl<'tcx> Visitor<'tcx> for CheckTraitImplStable<'tcx> {
- fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _id: hir::HirId) {
+ fn visit_path(&mut self, path: &hir::Path<'tcx>, _id: hir::HirId) {
if let Some(def_id) = path.res.opt_def_id() {
if let Some(stab) = self.tcx.lookup_stability(def_id) {
self.fully_stable &= stab.level.is_stable();
diff --git a/compiler/rustc_passes/src/upvars.rs b/compiler/rustc_passes/src/upvars.rs
index 68d9bf22b..605cf0a93 100644
--- a/compiler/rustc_passes/src/upvars.rs
+++ b/compiler/rustc_passes/src/upvars.rs
@@ -66,7 +66,7 @@ impl CaptureCollector<'_, '_> {
}
impl<'tcx> Visitor<'tcx> for CaptureCollector<'_, 'tcx> {
- fn visit_path(&mut self, path: &'tcx hir::Path<'tcx>, _: hir::HirId) {
+ fn visit_path(&mut self, path: &hir::Path<'tcx>, _: hir::HirId) {
if let Res::Local(var_id) = path.res {
self.visit_local_use(var_id, path.span);
}
@@ -75,9 +75,8 @@ impl<'tcx> Visitor<'tcx> for CaptureCollector<'_, 'tcx> {
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
- if let hir::ExprKind::Closure { .. } = expr.kind {
- let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id);
- if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
+ if let hir::ExprKind::Closure(closure) = expr.kind {
+ if let Some(upvars) = self.tcx.upvars_mentioned(closure.def_id) {
// Every capture of a closure expression is a local in scope,
// that is moved/copied/borrowed into the closure value, and
// for this analysis they are like any other access to a local.
diff --git a/compiler/rustc_passes/src/weak_lang_items.rs b/compiler/rustc_passes/src/weak_lang_items.rs
index 959ee600c..f0815fcd8 100644
--- a/compiler/rustc_passes/src/weak_lang_items.rs
+++ b/compiler/rustc_passes/src/weak_lang_items.rs
@@ -2,15 +2,12 @@
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::lang_items::{self, LangItem};
-use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS;
+use rustc_hir::weak_lang_items::WEAK_LANG_ITEMS;
use rustc_middle::middle::lang_items::required;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::CrateType;
-use crate::errors::{
- AllocFuncRequired, MissingAllocErrorHandler, MissingLangItem, MissingPanicHandler,
- UnknownExternLangItem,
-};
+use crate::errors::{MissingLangItem, MissingPanicHandler, UnknownExternLangItem};
/// Checks the crate for usage of weak lang items, returning a vector of all the
/// language items required by this crate, but not defined yet.
@@ -29,8 +26,8 @@ pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>, items: &mut lang_items::LanguageItem
for id in crate_items.foreign_items() {
let attrs = tcx.hir().attrs(id.hir_id());
if let Some((lang_item, _)) = lang_items::extract(attrs) {
- if let Some(&item) = WEAK_ITEMS_REFS.get(&lang_item) {
- if items.require(item).is_err() {
+ if let Some(item) = LangItem::from_name(lang_item) && item.is_weak() {
+ if items.get(item).is_none() {
items.missing.push(item);
}
} else {
@@ -65,17 +62,12 @@ fn verify<'tcx>(tcx: TyCtxt<'tcx>, items: &lang_items::LanguageItems) {
}
}
- for (name, &item) in WEAK_ITEMS_REFS.iter() {
- if missing.contains(&item) && required(tcx, item) && items.require(item).is_err() {
+ for &item in WEAK_LANG_ITEMS.iter() {
+ if missing.contains(&item) && required(tcx, item) && items.get(item).is_none() {
if item == LangItem::PanicImpl {
tcx.sess.emit_err(MissingPanicHandler);
- } else if item == LangItem::Oom {
- if !tcx.features().default_alloc_error_handler {
- tcx.sess.emit_err(AllocFuncRequired);
- tcx.sess.emit_note(MissingAllocErrorHandler);
- }
} else {
- tcx.sess.emit_err(MissingLangItem { name: *name });
+ tcx.sess.emit_err(MissingLangItem { name: item.name() });
}
}
}
diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs
index 865d6306b..a254c8924 100644
--- a/compiler/rustc_privacy/src/lib.rs
+++ b/compiler/rustc_privacy/src/lib.rs
@@ -3,6 +3,7 @@
#![feature(control_flow_enum)]
#![feature(rustc_private)]
#![feature(try_blocks)]
+#![feature(let_chains)]
#![recursion_limit = "256"]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
@@ -25,7 +26,6 @@ use rustc_middle::bug;
use rustc_middle::hir::nested_filter;
use rustc_middle::middle::privacy::{EffectiveVisibilities, Level};
use rustc_middle::span_bug;
-use rustc_middle::ty::abstract_const::{walk_abstract_const, AbstractConst, Node as ACNode};
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::subst::InternalSubsts;
use rustc_middle::ty::{self, Const, DefIdTree, GenericParamDefKind};
@@ -146,19 +146,23 @@ where
fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow<V::BreakTy> {
match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(ty::TraitPredicate {
+ ty::PredicateKind::Clause(ty::Clause::Trait(ty::TraitPredicate {
trait_ref,
constness: _,
polarity: _,
- }) => self.visit_trait(trait_ref),
- ty::PredicateKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => {
+ })) => self.visit_trait(trait_ref),
+ ty::PredicateKind::Clause(ty::Clause::Projection(ty::ProjectionPredicate {
+ projection_ty,
+ term,
+ })) => {
term.visit_with(self)?;
self.visit_projection_ty(projection_ty)
}
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty, _region)) => {
- ty.visit_with(self)
- }
- ty::PredicateKind::RegionOutlives(..) => ControlFlow::CONTINUE,
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty,
+ _region,
+ ))) => ty.visit_with(self),
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) => ControlFlow::CONTINUE,
ty::PredicateKind::ConstEvaluatable(ct) => ct.visit_with(self),
ty::PredicateKind::WellFormed(arg) => arg.visit_with(self),
_ => bug!("unexpected predicate: {:?}", predicate),
@@ -284,19 +288,8 @@ where
}
fn visit_const(&mut self, c: Const<'tcx>) -> ControlFlow<Self::BreakTy> {
- self.visit_ty(c.ty())?;
let tcx = self.def_id_visitor.tcx();
- if let Ok(Some(ct)) = AbstractConst::from_const(tcx, c) {
- walk_abstract_const(tcx, ct, |node| match node.root(tcx) {
- ACNode::Leaf(leaf) => self.visit_const(leaf),
- ACNode::Cast(_, _, ty) => self.visit_ty(ty),
- ACNode::Binop(..) | ACNode::UnaryOp(..) | ACNode::FunctionCall(_, _) => {
- ControlFlow::CONTINUE
- }
- })
- } else {
- ControlFlow::CONTINUE
- }
+ tcx.expand_abstract_consts(c).super_visit_with(self)
}
}
@@ -419,11 +412,6 @@ impl<'tcx> EmbargoVisitor<'tcx> {
self.effective_visibilities.public_at_level(def_id)
}
- fn update_with_hir_id(&mut self, hir_id: hir::HirId, level: Option<Level>) -> Option<Level> {
- let def_id = self.tcx.hir().local_def_id(hir_id);
- self.update(def_id, level)
- }
-
/// Updates node level and returns the updated level.
fn update(&mut self, def_id: LocalDefId, level: Option<Level>) -> Option<Level> {
let old_level = self.get(def_id);
@@ -573,10 +561,9 @@ impl<'tcx> EmbargoVisitor<'tcx> {
| hir::ItemKind::Union(ref struct_def, _) = item.kind
{
for field in struct_def.fields() {
- let def_id = self.tcx.hir().local_def_id(field.hir_id);
- let field_vis = self.tcx.local_visibility(def_id);
+ let field_vis = self.tcx.local_visibility(field.def_id);
if field_vis.is_accessible_from(module, self.tcx) {
- self.reach(def_id, level).ty();
+ self.reach(field.def_id, level).ty();
}
}
} else {
@@ -641,12 +628,12 @@ impl<'tcx> Visitor<'tcx> for EmbargoVisitor<'tcx> {
match item.kind {
hir::ItemKind::Enum(ref def, _) => {
for variant in def.variants {
- let variant_level = self.update_with_hir_id(variant.id, item_level);
- if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
- self.update_with_hir_id(ctor_hir_id, item_level);
+ let variant_level = self.update(variant.def_id, item_level);
+ if let Some(ctor_def_id) = variant.data.ctor_def_id() {
+ self.update(ctor_def_id, item_level);
}
for field in variant.data.fields() {
- self.update_with_hir_id(field.hir_id, variant_level);
+ self.update(field.def_id, variant_level);
}
}
}
@@ -665,14 +652,13 @@ impl<'tcx> Visitor<'tcx> for EmbargoVisitor<'tcx> {
}
}
hir::ItemKind::Struct(ref def, _) | hir::ItemKind::Union(ref def, _) => {
- if let Some(ctor_hir_id) = def.ctor_hir_id() {
- self.update_with_hir_id(ctor_hir_id, item_level);
+ if let Some(ctor_def_id) = def.ctor_def_id() {
+ self.update(ctor_def_id, item_level);
}
for field in def.fields() {
- let def_id = self.tcx.hir().local_def_id(field.hir_id);
- let vis = self.tcx.visibility(def_id);
+ let vis = self.tcx.visibility(field.def_id);
if vis.is_public() {
- self.update_with_hir_id(field.hir_id, item_level);
+ self.update(field.def_id, item_level);
}
}
}
@@ -782,18 +768,16 @@ impl<'tcx> Visitor<'tcx> for EmbargoVisitor<'tcx> {
self.reach(item.owner_id.def_id, item_level).generics().predicates();
}
for variant in def.variants {
- let variant_level = self.get(self.tcx.hir().local_def_id(variant.id));
+ let variant_level = self.get(variant.def_id);
if variant_level.is_some() {
for field in variant.data.fields() {
- self.reach(self.tcx.hir().local_def_id(field.hir_id), variant_level)
- .ty();
+ self.reach(field.def_id, variant_level).ty();
}
// Corner case: if the variant is reachable, but its
// enum is not, make the enum reachable as well.
self.reach(item.owner_id.def_id, variant_level).ty();
}
- if let Some(hir_id) = variant.data.ctor_hir_id() {
- let ctor_def_id = self.tcx.hir().local_def_id(hir_id);
+ if let Some(ctor_def_id) = variant.data.ctor_def_id() {
let ctor_level = self.get(ctor_def_id);
if ctor_level.is_some() {
self.reach(item.owner_id.def_id, ctor_level).ty();
@@ -818,15 +802,13 @@ impl<'tcx> Visitor<'tcx> for EmbargoVisitor<'tcx> {
if item_level.is_some() {
self.reach(item.owner_id.def_id, item_level).generics().predicates();
for field in struct_def.fields() {
- let def_id = self.tcx.hir().local_def_id(field.hir_id);
- let field_level = self.get(def_id);
+ let field_level = self.get(field.def_id);
if field_level.is_some() {
- self.reach(def_id, field_level).ty();
+ self.reach(field.def_id, field_level).ty();
}
}
}
- if let Some(hir_id) = struct_def.ctor_hir_id() {
- let ctor_def_id = self.tcx.hir().local_def_id(hir_id);
+ if let Some(ctor_def_id) = struct_def.ctor_def_id() {
let ctor_level = self.get(ctor_def_id);
if ctor_level.is_some() {
self.reach(item.owner_id.def_id, ctor_level).ty();
@@ -957,18 +939,21 @@ impl<'tcx, 'a> Visitor<'tcx> for TestReachabilityVisitor<'tcx, 'a> {
match item.kind {
hir::ItemKind::Enum(ref def, _) => {
for variant in def.variants.iter() {
- let variant_id = self.tcx.hir().local_def_id(variant.id);
- self.effective_visibility_diagnostic(variant_id);
+ self.effective_visibility_diagnostic(variant.def_id);
+ if let Some(ctor_def_id) = variant.data.ctor_def_id() {
+ self.effective_visibility_diagnostic(ctor_def_id);
+ }
for field in variant.data.fields() {
- let def_id = self.tcx.hir().local_def_id(field.hir_id);
- self.effective_visibility_diagnostic(def_id);
+ self.effective_visibility_diagnostic(field.def_id);
}
}
}
hir::ItemKind::Struct(ref def, _) | hir::ItemKind::Union(ref def, _) => {
+ if let Some(ctor_def_id) = def.ctor_def_id() {
+ self.effective_visibility_diagnostic(ctor_def_id);
+ }
for field in def.fields() {
- let def_id = self.tcx.hir().local_def_id(field.hir_id);
- self.effective_visibility_diagnostic(def_id);
+ self.effective_visibility_diagnostic(field.def_id);
}
}
_ => {}
@@ -1080,9 +1065,9 @@ impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> {
// are checked for privacy (RFC 736). Rather than computing the set of
// unmentioned fields, just check them all.
for (vf_index, variant_field) in variant.fields.iter().enumerate() {
- let field = fields.iter().find(|f| {
- self.tcx.field_index(f.hir_id, self.typeck_results()) == vf_index
- });
+ let field = fields
+ .iter()
+ .find(|f| self.typeck_results().field_index(f.hir_id) == vf_index);
let (use_ctxt, span) = match field {
Some(field) => (field.ident.span, field.span),
None => (base.span, base.span),
@@ -1092,7 +1077,7 @@ impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> {
} else {
for field in fields {
let use_ctxt = field.ident.span;
- let index = self.tcx.field_index(field.hir_id, self.typeck_results());
+ let index = self.typeck_results().field_index(field.hir_id);
self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false);
}
}
@@ -1108,7 +1093,7 @@ impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> {
let variant = adt.variant_of_res(res);
for field in fields {
let use_ctxt = field.ident.span;
- let index = self.tcx.field_index(field.hir_id, self.typeck_results());
+ let index = self.typeck_results().field_index(field.hir_id);
self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false);
}
}
@@ -1711,7 +1696,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
}
fn visit_variant(&mut self, v: &'tcx hir::Variant<'tcx>) {
- if self.effective_visibilities.is_reachable(self.tcx.hir().local_def_id(v.id)) {
+ if self.effective_visibilities.is_reachable(v.def_id) {
self.in_variant = true;
intravisit::walk_variant(self, v);
self.in_variant = false;
@@ -1719,8 +1704,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
}
fn visit_field_def(&mut self, s: &'tcx hir::FieldDef<'tcx>) {
- let def_id = self.tcx.hir().local_def_id(s.hir_id);
- let vis = self.tcx.visibility(def_id);
+ let vis = self.tcx.visibility(s.def_id);
if vis.is_public() || self.in_variant {
intravisit::walk_field_def(self, s);
}
@@ -1974,8 +1958,7 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'tcx> {
for variant in def.variants {
for field in variant.data.fields() {
- self.check(self.tcx.hir().local_def_id(field.hir_id), item_visibility)
- .ty();
+ self.check(field.def_id, item_visibility).ty();
}
}
}
@@ -2002,9 +1985,8 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'tcx> {
self.check(item.owner_id.def_id, item_visibility).generics().predicates();
for field in struct_def.fields() {
- let def_id = tcx.hir().local_def_id(field.hir_id);
- let field_visibility = tcx.local_visibility(def_id);
- self.check(def_id, min(item_visibility, field_visibility, tcx)).ty();
+ let field_visibility = tcx.local_visibility(field.def_id);
+ self.check(field.def_id, min(item_visibility, field_visibility, tcx)).ty();
}
}
}
@@ -2131,6 +2113,7 @@ fn effective_visibilities(tcx: TyCtxt<'_>, (): ()) -> &EffectiveVisibilities {
changed: false,
};
+ visitor.effective_visibilities.check_invariants(tcx, true);
loop {
tcx.hir().walk_toplevel_module(&mut visitor);
if visitor.changed {
@@ -2139,6 +2122,7 @@ fn effective_visibilities(tcx: TyCtxt<'_>, (): ()) -> &EffectiveVisibilities {
break;
}
}
+ visitor.effective_visibilities.check_invariants(tcx, false);
let mut check_visitor =
TestReachabilityVisitor { tcx, effective_visibilities: &visitor.effective_visibilities };
diff --git a/compiler/rustc_query_impl/Cargo.toml b/compiler/rustc_query_impl/Cargo.toml
index e7f12caaf..b2111a126 100644
--- a/compiler/rustc_query_impl/Cargo.toml
+++ b/compiler/rustc_query_impl/Cargo.toml
@@ -21,7 +21,7 @@ rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
[features]
diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs
index 11d4c97e7..d426a2b6b 100644
--- a/compiler/rustc_query_impl/src/lib.rs
+++ b/compiler/rustc_query_impl/src/lib.rs
@@ -20,6 +20,7 @@ extern crate rustc_middle;
use rustc_data_structures::sync::AtomicU64;
use rustc_middle::arena::Arena;
use rustc_middle::dep_graph::{self, DepKindStruct};
+use rustc_middle::query::Key;
use rustc_middle::ty::query::{query_keys, query_storage, query_stored, query_values};
use rustc_middle::ty::query::{ExternProviders, Providers, QueryEngine};
use rustc_middle::ty::TyCtxt;
@@ -32,11 +33,8 @@ use rustc_query_system::query::*;
#[cfg(parallel_compiler)]
pub use rustc_query_system::query::{deadlock, QueryContext};
-mod keys;
-use keys::Key;
-
pub use rustc_query_system::query::QueryConfig;
-pub(crate) use rustc_query_system::query::{QueryDescription, QueryVTable};
+pub(crate) use rustc_query_system::query::QueryVTable;
mod on_disk_cache;
pub use on_disk_cache::OnDiskCache;
diff --git a/compiler/rustc_query_impl/src/on_disk_cache.rs b/compiler/rustc_query_impl/src/on_disk_cache.rs
index 9000f81d9..ac9653b90 100644
--- a/compiler/rustc_query_impl/src/on_disk_cache.rs
+++ b/compiler/rustc_query_impl/src/on_disk_cache.rs
@@ -812,13 +812,13 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
}
}
-impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [ty::abstract_const::Node<'tcx>] {
+impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d)
}
}
-impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
+impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Clause<'tcx>, Span)] {
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self {
RefDecodable::decode(d)
}
@@ -1062,7 +1062,7 @@ pub fn encode_query_results<'a, 'tcx, CTX, Q>(
query_result_index: &mut EncodedDepNodeIndex,
) where
CTX: QueryContext + 'tcx,
- Q: super::QueryDescription<CTX>,
+ Q: super::QueryConfig<CTX>,
Q::Value: Encodable<CacheEncoder<'a, 'tcx>>,
{
let _timer = tcx
diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs
index 1d17f4221..8d5d84c5d 100644
--- a/compiler/rustc_query_impl/src/plumbing.rs
+++ b/compiler/rustc_query_impl/src/plumbing.rs
@@ -2,7 +2,6 @@
//! generate the actual methods on tcx which find and execute the provider,
//! manage the caches, and so forth.
-use crate::keys::Key;
use crate::on_disk_cache::{CacheDecoder, CacheEncoder, EncodedDepNodeIndex};
use crate::profiling_support::QueryKeyStringCache;
use crate::{on_disk_cache, Queries};
@@ -12,13 +11,13 @@ use rustc_errors::{Diagnostic, Handler};
use rustc_middle::dep_graph::{
self, DepKind, DepKindStruct, DepNode, DepNodeIndex, SerializedDepNodeIndex,
};
+use rustc_middle::query::Key;
use rustc_middle::ty::tls::{self, ImplicitCtxt};
use rustc_middle::ty::{self, TyCtxt};
use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext};
use rustc_query_system::ich::StableHashingContext;
use rustc_query_system::query::{
- force_query, QueryConfig, QueryContext, QueryDescription, QueryJobId, QueryMap,
- QuerySideEffects, QueryStackFrame,
+ force_query, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffects, QueryStackFrame,
};
use rustc_query_system::{LayoutOfDepth, QueryOverflow, Value};
use rustc_serialize::Decodable;
@@ -253,6 +252,18 @@ macro_rules! depth_limit {
};
}
+macro_rules! feedable {
+ ([]) => {{
+ false
+ }};
+ ([(feedable) $($rest:tt)*]) => {{
+ true
+ }};
+ ([$other:tt $($modifiers:tt)*]) => {
+ feedable!([$($modifiers)*])
+ };
+}
+
macro_rules! hash_result {
([]) => {{
Some(dep_graph::hash_result)
@@ -310,7 +321,7 @@ pub(crate) fn create_query_frame<
ty::print::with_forced_impl_filename_line!(do_describe(tcx.tcx, key))
);
let description =
- if tcx.sess.verbose() { format!("{} [{}]", description, name) } else { description };
+ if tcx.sess.verbose() { format!("{} [{:?}]", description, name) } else { description };
let span = if kind == dep_graph::DepKind::def_span {
// The `def_span` query is used to calculate `default_span`,
// so exit to avoid infinite recursion.
@@ -340,7 +351,7 @@ pub(crate) fn create_query_frame<
fn try_load_from_on_disk_cache<'tcx, Q>(tcx: TyCtxt<'tcx>, dep_node: DepNode)
where
- Q: QueryDescription<QueryCtxt<'tcx>>,
+ Q: QueryConfig<QueryCtxt<'tcx>>,
Q::Key: DepNodeParams<TyCtxt<'tcx>>,
{
debug_assert!(tcx.dep_graph.is_green(&dep_node));
@@ -365,7 +376,7 @@ where
fn force_from_dep_node<'tcx, Q>(tcx: TyCtxt<'tcx>, dep_node: DepNode) -> bool
where
- Q: QueryDescription<QueryCtxt<'tcx>>,
+ Q: QueryConfig<QueryCtxt<'tcx>>,
Q::Key: DepNodeParams<TyCtxt<'tcx>>,
Q::Value: Value<TyCtxt<'tcx>>,
{
@@ -398,12 +409,9 @@ where
}
}
-pub(crate) fn query_callback<'tcx, Q: QueryConfig>(
- is_anon: bool,
- is_eval_always: bool,
-) -> DepKindStruct<'tcx>
+pub(crate) fn query_callback<'tcx, Q>(is_anon: bool, is_eval_always: bool) -> DepKindStruct<'tcx>
where
- Q: QueryDescription<QueryCtxt<'tcx>>,
+ Q: QueryConfig<QueryCtxt<'tcx>>,
Q::Key: DepNodeParams<TyCtxt<'tcx>>,
{
let fingerprint_style = Q::Key::fingerprint_style();
@@ -458,14 +466,12 @@ macro_rules! define_queries {
})*
}
- $(impl<'tcx> QueryConfig for queries::$name<'tcx> {
+ $(impl<'tcx> QueryConfig<QueryCtxt<'tcx>> for queries::$name<'tcx> {
type Key = query_keys::$name<'tcx>;
type Value = query_values::$name<'tcx>;
type Stored = query_stored::$name<'tcx>;
const NAME: &'static str = stringify!($name);
- }
- impl<'tcx> QueryDescription<QueryCtxt<'tcx>> for queries::$name<'tcx> {
#[inline]
fn cache_on_disk(tcx: TyCtxt<'tcx>, key: &Self::Key) -> bool {
::rustc_middle::query::cached::$name(tcx, key)
@@ -497,6 +503,7 @@ macro_rules! define_queries {
anon: is_anon!([$($modifiers)*]),
eval_always: is_eval_always!([$($modifiers)*]),
depth_limit: depth_limit!([$($modifiers)*]),
+ feedable: feedable!([$($modifiers)*]),
dep_kind: dep_graph::DepKind::$name,
hash_result: hash_result!([$($modifiers)*]),
handle_cycle_error: handle_cycle_error!([$($modifiers)*]),
@@ -662,12 +669,15 @@ macro_rules! define_queries_struct {
local_providers: Box<Providers>,
extern_providers: Box<ExternProviders>,
query_structs: Vec<$crate::plumbing::QueryStruct<'tcx>>,
-
pub on_disk_cache: Option<OnDiskCache<'tcx>>,
-
jobs: AtomicU64,
- $($(#[$attr])* $name: QueryState<<queries::$name<'tcx> as QueryConfig>::Key>,)*
+ $(
+ $(#[$attr])*
+ $name: QueryState<
+ <queries::$name<'tcx> as QueryConfig<QueryCtxt<'tcx>>>::Key
+ >,
+ )*
}
impl<'tcx> Queries<'tcx> {
@@ -704,7 +714,7 @@ macro_rules! define_queries_struct {
&'tcx self,
tcx: TyCtxt<'tcx>,
span: Span,
- key: <queries::$name<'tcx> as QueryConfig>::Key,
+ key: <queries::$name<'tcx> as QueryConfig<QueryCtxt<'tcx>>>::Key,
mode: QueryMode,
) -> Option<query_stored::$name<'tcx>> {
let qcx = QueryCtxt { tcx, queries: self };
diff --git a/compiler/rustc_query_impl/src/profiling_support.rs b/compiler/rustc_query_impl/src/profiling_support.rs
index 2cc311d48..81114f2cd 100644
--- a/compiler/rustc_query_impl/src/profiling_support.rs
+++ b/compiler/rustc_query_impl/src/profiling_support.rs
@@ -19,18 +19,18 @@ impl QueryKeyStringCache {
}
}
-struct QueryKeyStringBuilder<'p, 'c, 'tcx> {
+struct QueryKeyStringBuilder<'p, 'tcx> {
profiler: &'p SelfProfiler,
tcx: TyCtxt<'tcx>,
- string_cache: &'c mut QueryKeyStringCache,
+ string_cache: &'p mut QueryKeyStringCache,
}
-impl<'p, 'c, 'tcx> QueryKeyStringBuilder<'p, 'c, 'tcx> {
+impl<'p, 'tcx> QueryKeyStringBuilder<'p, 'tcx> {
fn new(
profiler: &'p SelfProfiler,
tcx: TyCtxt<'tcx>,
- string_cache: &'c mut QueryKeyStringCache,
- ) -> QueryKeyStringBuilder<'p, 'c, 'tcx> {
+ string_cache: &'p mut QueryKeyStringCache,
+ ) -> QueryKeyStringBuilder<'p, 'tcx> {
QueryKeyStringBuilder { profiler, tcx, string_cache }
}
@@ -99,7 +99,7 @@ impl<'p, 'c, 'tcx> QueryKeyStringBuilder<'p, 'c, 'tcx> {
}
trait IntoSelfProfilingString {
- fn to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_, '_>) -> StringId;
+ fn to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId;
}
// The default implementation of `IntoSelfProfilingString` just uses `Debug`
@@ -109,7 +109,7 @@ trait IntoSelfProfilingString {
impl<T: Debug> IntoSelfProfilingString for T {
default fn to_self_profile_string(
&self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
+ builder: &mut QueryKeyStringBuilder<'_, '_>,
) -> StringId {
let s = format!("{:?}", self);
builder.profiler.alloc_string(&s[..])
@@ -117,60 +117,42 @@ impl<T: Debug> IntoSelfProfilingString for T {
}
impl<T: SpecIntoSelfProfilingString> IntoSelfProfilingString for T {
- fn to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_, '_>) -> StringId {
+ fn to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
self.spec_to_self_profile_string(builder)
}
}
#[rustc_specialization_trait]
trait SpecIntoSelfProfilingString: Debug {
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId;
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId;
}
impl SpecIntoSelfProfilingString for DefId {
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId {
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
builder.def_id_to_string_id(*self)
}
}
impl SpecIntoSelfProfilingString for CrateNum {
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId {
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
builder.def_id_to_string_id(self.as_def_id())
}
}
impl SpecIntoSelfProfilingString for DefIndex {
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId {
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
builder.def_id_to_string_id(DefId { krate: LOCAL_CRATE, index: *self })
}
}
impl SpecIntoSelfProfilingString for LocalDefId {
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId {
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
builder.def_id_to_string_id(DefId { krate: LOCAL_CRATE, index: self.local_def_index })
}
}
impl<T: SpecIntoSelfProfilingString> SpecIntoSelfProfilingString for WithOptConstParam<T> {
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId {
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
// We print `WithOptConstParam` values as tuples to make them shorter
// and more readable, without losing information:
//
@@ -205,10 +187,7 @@ where
T0: SpecIntoSelfProfilingString,
T1: SpecIntoSelfProfilingString,
{
- fn spec_to_self_profile_string(
- &self,
- builder: &mut QueryKeyStringBuilder<'_, '_, '_>,
- ) -> StringId {
+ fn spec_to_self_profile_string(&self, builder: &mut QueryKeyStringBuilder<'_, '_>) -> StringId {
let val0 = self.0.to_self_profile_string(builder);
let val1 = self.1.to_self_profile_string(builder);
diff --git a/compiler/rustc_query_system/Cargo.toml b/compiler/rustc_query_system/Cargo.toml
index faddad741..028756b5a 100644
--- a/compiler/rustc_query_system/Cargo.toml
+++ b/compiler/rustc_query_system/Cargo.toml
@@ -22,7 +22,7 @@ rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_type_ir = { path = "../rustc_type_ir" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
tracing = "0.1"
[features]
diff --git a/compiler/rustc_query_system/src/cache.rs b/compiler/rustc_query_system/src/cache.rs
index d592812f7..7cc885be2 100644
--- a/compiler/rustc_query_system/src/cache.rs
+++ b/compiler/rustc_query_system/src/cache.rs
@@ -26,7 +26,7 @@ impl<Key, Value> Cache<Key, Value> {
}
impl<Key: Eq + Hash, Value: Clone> Cache<Key, Value> {
- pub fn get<CTX: DepContext>(&self, key: &Key, tcx: CTX) -> Option<Value> {
+ pub fn get<Tcx: DepContext>(&self, key: &Key, tcx: Tcx) -> Option<Value> {
Some(self.hashmap.borrow().get(key)?.get(tcx))
}
@@ -46,7 +46,7 @@ impl<T: Clone> WithDepNode<T> {
WithDepNode { dep_node, cached_value }
}
- pub fn get<CTX: DepContext>(&self, tcx: CTX) -> T {
+ pub fn get<Tcx: DepContext>(&self, tcx: Tcx) -> T {
tcx.dep_graph().read_index(self.dep_node);
self.cached_value.clone()
}
diff --git a/compiler/rustc_query_system/src/dep_graph/dep_node.rs b/compiler/rustc_query_system/src/dep_graph/dep_node.rs
index 5c6ce0556..d79c5816a 100644
--- a/compiler/rustc_query_system/src/dep_graph/dep_node.rs
+++ b/compiler/rustc_query_system/src/dep_graph/dep_node.rs
@@ -61,18 +61,18 @@ impl<K: DepKind> DepNode<K> {
/// Creates a new, parameterless DepNode. This method will assert
/// that the DepNode corresponding to the given DepKind actually
/// does not require any parameters.
- pub fn new_no_params<Ctxt>(tcx: Ctxt, kind: K) -> DepNode<K>
+ pub fn new_no_params<Tcx>(tcx: Tcx, kind: K) -> DepNode<K>
where
- Ctxt: super::DepContext<DepKind = K>,
+ Tcx: super::DepContext<DepKind = K>,
{
debug_assert_eq!(tcx.fingerprint_style(kind), FingerprintStyle::Unit);
DepNode { kind, hash: Fingerprint::ZERO.into() }
}
- pub fn construct<Ctxt, Key>(tcx: Ctxt, kind: K, arg: &Key) -> DepNode<K>
+ pub fn construct<Tcx, Key>(tcx: Tcx, kind: K, arg: &Key) -> DepNode<K>
where
- Ctxt: super::DepContext<DepKind = K>,
- Key: DepNodeParams<Ctxt>,
+ Tcx: super::DepContext<DepKind = K>,
+ Key: DepNodeParams<Tcx>,
{
let hash = arg.to_fingerprint(tcx);
let dep_node = DepNode { kind, hash: hash.into() };
@@ -93,9 +93,9 @@ impl<K: DepKind> DepNode<K> {
/// Construct a DepNode from the given DepKind and DefPathHash. This
/// method will assert that the given DepKind actually requires a
/// single DefId/DefPathHash parameter.
- pub fn from_def_path_hash<Ctxt>(tcx: Ctxt, def_path_hash: DefPathHash, kind: K) -> Self
+ pub fn from_def_path_hash<Tcx>(tcx: Tcx, def_path_hash: DefPathHash, kind: K) -> Self
where
- Ctxt: super::DepContext<DepKind = K>,
+ Tcx: super::DepContext<DepKind = K>,
{
debug_assert!(tcx.fingerprint_style(kind) == FingerprintStyle::DefPathHash);
DepNode { kind, hash: def_path_hash.0.into() }
@@ -108,18 +108,18 @@ impl<K: DepKind> fmt::Debug for DepNode<K> {
}
}
-pub trait DepNodeParams<Ctxt: DepContext>: fmt::Debug + Sized {
+pub trait DepNodeParams<Tcx: DepContext>: fmt::Debug + Sized {
fn fingerprint_style() -> FingerprintStyle;
/// This method turns the parameters of a DepNodeConstructor into an opaque
/// Fingerprint to be used in DepNode.
/// Not all DepNodeParams support being turned into a Fingerprint (they
/// don't need to if the corresponding DepNode is anonymous).
- fn to_fingerprint(&self, _: Ctxt) -> Fingerprint {
+ fn to_fingerprint(&self, _: Tcx) -> Fingerprint {
panic!("Not implemented. Accidentally called on anonymous node?")
}
- fn to_debug_str(&self, _: Ctxt) -> String {
+ fn to_debug_str(&self, _: Tcx) -> String {
format!("{:?}", self)
}
@@ -129,10 +129,10 @@ pub trait DepNodeParams<Ctxt: DepContext>: fmt::Debug + Sized {
/// `fingerprint_style()` is not `FingerprintStyle::Opaque`.
/// It is always valid to return `None` here, in which case incremental
/// compilation will treat the query as having changed instead of forcing it.
- fn recover(tcx: Ctxt, dep_node: &DepNode<Ctxt::DepKind>) -> Option<Self>;
+ fn recover(tcx: Tcx, dep_node: &DepNode<Tcx::DepKind>) -> Option<Self>;
}
-impl<Ctxt: DepContext, T> DepNodeParams<Ctxt> for T
+impl<Tcx: DepContext, T> DepNodeParams<Tcx> for T
where
T: for<'a> HashStable<StableHashingContext<'a>> + fmt::Debug,
{
@@ -142,7 +142,7 @@ where
}
#[inline(always)]
- default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint {
+ default fn to_fingerprint(&self, tcx: Tcx) -> Fingerprint {
tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
self.hash_stable(&mut hcx, &mut hasher);
@@ -151,12 +151,12 @@ where
}
#[inline(always)]
- default fn to_debug_str(&self, _: Ctxt) -> String {
+ default fn to_debug_str(&self, _: Tcx) -> String {
format!("{:?}", *self)
}
#[inline(always)]
- default fn recover(_: Ctxt, _: &DepNode<Ctxt::DepKind>) -> Option<Self> {
+ default fn recover(_: Tcx, _: &DepNode<Tcx::DepKind>) -> Option<Self> {
None
}
}
@@ -166,7 +166,7 @@ where
/// Information is retrieved by indexing the `DEP_KINDS` array using the integer value
/// of the `DepKind`. Overall, this allows to implement `DepContext` using this manual
/// jump table instead of large matches.
-pub struct DepKindStruct<CTX: DepContext> {
+pub struct DepKindStruct<Tcx: DepContext> {
/// Anonymous queries cannot be replayed from one compiler invocation to the next.
/// When their result is needed, it is recomputed. They are useful for fine-grained
/// dependency tracking, and caching within one compiler invocation.
@@ -216,10 +216,10 @@ pub struct DepKindStruct<CTX: DepContext> {
/// with kind `MirValidated`, we know that the GUID/fingerprint of the `DepNode`
/// is actually a `DefPathHash`, and can therefore just look up the corresponding
/// `DefId` in `tcx.def_path_hash_to_def_id`.
- pub force_from_dep_node: Option<fn(tcx: CTX, dep_node: DepNode<CTX::DepKind>) -> bool>,
+ pub force_from_dep_node: Option<fn(tcx: Tcx, dep_node: DepNode<Tcx::DepKind>) -> bool>,
/// Invoke a query to put the on-disk cached value in memory.
- pub try_load_from_on_disk_cache: Option<fn(CTX, DepNode<CTX::DepKind>)>,
+ pub try_load_from_on_disk_cache: Option<fn(Tcx, DepNode<Tcx::DepKind>)>,
}
/// A "work product" corresponds to a `.o` (or other) file that we
diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs
index 8ff561327..38c7c6cce 100644
--- a/compiler/rustc_query_system/src/dep_graph/graph.rs
+++ b/compiler/rustc_query_system/src/dep_graph/graph.rs
@@ -377,9 +377,9 @@ impl<K: DepKind> DepGraph<K> {
/// Executes something within an "anonymous" task, that is, a task the
/// `DepNode` of which is determined by the list of inputs it read from.
- pub fn with_anon_task<Ctxt: DepContext<DepKind = K>, OP, R>(
+ pub fn with_anon_task<Tcx: DepContext<DepKind = K>, OP, R>(
&self,
- cx: Ctxt,
+ cx: Tcx,
dep_kind: K,
op: OP,
) -> (R, DepNodeIndex)
@@ -489,6 +489,95 @@ impl<K: DepKind> DepGraph<K> {
}
}
+ /// Create a node when we force-feed a value into the query cache.
+ /// This is used to remove cycles during type-checking const generic parameters.
+ ///
+ /// As usual in the query system, we consider the current state of the calling query
+ /// only depends on the list of dependencies up to now. As a consequence, the value
+ /// that this query gives us can only depend on those dependencies too. Therefore,
+ /// it is sound to use the current dependency set for the created node.
+ ///
+ /// During replay, the order of the nodes is relevant in the dependency graph.
+ /// So the unchanged replay will mark the caller query before trying to mark this one.
+ /// If there is a change to report, the caller query will be re-executed before this one.
+ ///
+ /// FIXME: If the code is changed enough for this node to be marked before requiring the
+ /// caller's node, we suppose that those changes will be enough to mark this node red and
+ /// force a recomputation using the "normal" way.
+ pub fn with_feed_task<Ctxt: DepContext<DepKind = K>, A: Debug, R: Debug>(
+ &self,
+ node: DepNode<K>,
+ cx: Ctxt,
+ key: A,
+ result: &R,
+ hash_result: Option<fn(&mut StableHashingContext<'_>, &R) -> Fingerprint>,
+ ) -> DepNodeIndex {
+ if let Some(data) = self.data.as_ref() {
+ // The caller query has more dependencies than the node we are creating. We may
+ // encounter a case where this created node is marked as green, but the caller query is
+ // subsequently marked as red or recomputed. In this case, we will end up feeding a
+ // value to an existing node.
+ //
+ // For sanity, we still check that the loaded stable hash and the new one match.
+ if let Some(dep_node_index) = self.dep_node_index_of_opt(&node) {
+ let _current_fingerprint =
+ crate::query::incremental_verify_ich(cx, result, &node, hash_result);
+
+ #[cfg(debug_assertions)]
+ if hash_result.is_some() {
+ data.current.record_edge(dep_node_index, node, _current_fingerprint);
+ }
+
+ return dep_node_index;
+ }
+
+ let mut edges = SmallVec::new();
+ K::read_deps(|task_deps| match task_deps {
+ TaskDepsRef::Allow(deps) => edges.extend(deps.lock().reads.iter().copied()),
+ TaskDepsRef::Ignore => {} // During HIR lowering, we have no dependencies.
+ TaskDepsRef::Forbid => {
+ panic!("Cannot summarize when dependencies are not recorded.")
+ }
+ });
+
+ let hashing_timer = cx.profiler().incr_result_hashing();
+ let current_fingerprint = hash_result.map(|hash_result| {
+ cx.with_stable_hashing_context(|mut hcx| hash_result(&mut hcx, result))
+ });
+
+ let print_status = cfg!(debug_assertions) && cx.sess().opts.unstable_opts.dep_tasks;
+
+ // Intern the new `DepNode` with the dependencies up-to-now.
+ let (dep_node_index, prev_and_color) = data.current.intern_node(
+ cx.profiler(),
+ &data.previous,
+ node,
+ edges,
+ current_fingerprint,
+ print_status,
+ );
+
+ hashing_timer.finish_with_query_invocation_id(dep_node_index.into());
+
+ if let Some((prev_index, color)) = prev_and_color {
+ debug_assert!(
+ data.colors.get(prev_index).is_none(),
+ "DepGraph::with_task() - Duplicate DepNodeColor insertion for {key:?}",
+ );
+
+ data.colors.insert(prev_index, color);
+ }
+
+ dep_node_index
+ } else {
+ // Incremental compilation is turned off. We just execute the task
+ // without tracking. We still provide a dep-node index that uniquely
+ // identifies the task so that we have a cheap way of referring to
+ // the query for self-profiling.
+ self.next_virtual_depnode_index()
+ }
+ }
+
#[inline]
pub fn dep_node_index_of(&self, dep_node: &DepNode<K>) -> DepNodeIndex {
self.dep_node_index_of_opt(dep_node).unwrap()
@@ -571,12 +660,12 @@ impl<K: DepKind> DepGraph<K> {
/// A node will have an index, when it's already been marked green, or when we can mark it
/// green. This function will mark the current task as a reader of the specified node, when
/// a node index can be found for that node.
- pub fn try_mark_green<Ctxt: QueryContext<DepKind = K>>(
+ pub fn try_mark_green<Qcx: QueryContext<DepKind = K>>(
&self,
- tcx: Ctxt,
+ qcx: Qcx,
dep_node: &DepNode<K>,
) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> {
- debug_assert!(!tcx.dep_context().is_eval_always(dep_node.kind));
+ debug_assert!(!qcx.dep_context().is_eval_always(dep_node.kind));
// Return None if the dep graph is disabled
let data = self.data.as_ref()?;
@@ -592,15 +681,16 @@ impl<K: DepKind> DepGraph<K> {
// in the previous compilation session too, so we can try to
// mark it as green by recursively marking all of its
// dependencies green.
- self.try_mark_previous_green(tcx, data, prev_index, &dep_node)
+ self.try_mark_previous_green(qcx, data, prev_index, &dep_node)
.map(|dep_node_index| (prev_index, dep_node_index))
}
}
}
- fn try_mark_parent_green<Ctxt: QueryContext<DepKind = K>>(
+ #[instrument(skip(self, qcx, data, parent_dep_node_index), level = "debug")]
+ fn try_mark_parent_green<Qcx: QueryContext<DepKind = K>>(
&self,
- tcx: Ctxt,
+ qcx: Qcx,
data: &DepGraphData<K>,
parent_dep_node_index: SerializedDepNodeIndex,
dep_node: &DepNode<K>,
@@ -613,11 +703,7 @@ impl<K: DepKind> DepGraph<K> {
// This dependency has been marked as green before, we are
// still fine and can continue with checking the other
// dependencies.
- debug!(
- "try_mark_previous_green({:?}) --- found dependency {:?} to \
- be immediately green",
- dep_node, dep_dep_node,
- );
+ debug!("dependency {dep_dep_node:?} was immediately green");
return Some(());
}
Some(DepNodeColor::Red) => {
@@ -625,10 +711,7 @@ impl<K: DepKind> DepGraph<K> {
// compared to the previous compilation session. We cannot
// mark the DepNode as green and also don't need to bother
// with checking any of the other dependencies.
- debug!(
- "try_mark_previous_green({:?}) - END - dependency {:?} was immediately red",
- dep_node, dep_dep_node,
- );
+ debug!("dependency {dep_dep_node:?} was immediately red");
return None;
}
None => {}
@@ -636,35 +719,26 @@ impl<K: DepKind> DepGraph<K> {
// We don't know the state of this dependency. If it isn't
// an eval_always node, let's try to mark it green recursively.
- if !tcx.dep_context().is_eval_always(dep_dep_node.kind) {
+ if !qcx.dep_context().is_eval_always(dep_dep_node.kind) {
debug!(
- "try_mark_previous_green({:?}) --- state of dependency {:?} ({}) \
- is unknown, trying to mark it green",
- dep_node, dep_dep_node, dep_dep_node.hash,
+ "state of dependency {:?} ({}) is unknown, trying to mark it green",
+ dep_dep_node, dep_dep_node.hash,
);
let node_index =
- self.try_mark_previous_green(tcx, data, parent_dep_node_index, dep_dep_node);
+ self.try_mark_previous_green(qcx, data, parent_dep_node_index, dep_dep_node);
+
if node_index.is_some() {
- debug!(
- "try_mark_previous_green({:?}) --- managed to MARK dependency {:?} as green",
- dep_node, dep_dep_node
- );
+ debug!("managed to MARK dependency {dep_dep_node:?} as green",);
return Some(());
}
}
// We failed to mark it green, so we try to force the query.
- debug!(
- "try_mark_previous_green({:?}) --- trying to force dependency {:?}",
- dep_node, dep_dep_node
- );
- if !tcx.dep_context().try_force_from_dep_node(*dep_dep_node) {
+ debug!("trying to force dependency {dep_dep_node:?}");
+ if !qcx.dep_context().try_force_from_dep_node(*dep_dep_node) {
// The DepNode could not be forced.
- debug!(
- "try_mark_previous_green({:?}) - END - dependency {:?} could not be forced",
- dep_node, dep_dep_node
- );
+ debug!("dependency {dep_dep_node:?} could not be forced");
return None;
}
@@ -672,23 +746,17 @@ impl<K: DepKind> DepGraph<K> {
match dep_dep_node_color {
Some(DepNodeColor::Green(_)) => {
- debug!(
- "try_mark_previous_green({:?}) --- managed to FORCE dependency {:?} to green",
- dep_node, dep_dep_node
- );
+ debug!("managed to FORCE dependency {dep_dep_node:?} to green");
return Some(());
}
Some(DepNodeColor::Red) => {
- debug!(
- "try_mark_previous_green({:?}) - END - dependency {:?} was red after forcing",
- dep_node, dep_dep_node
- );
+ debug!("dependency {dep_dep_node:?} was red after forcing",);
return None;
}
None => {}
}
- if !tcx.dep_context().sess().has_errors_or_delayed_span_bugs() {
+ if let None = qcx.dep_context().sess().has_errors_or_delayed_span_bugs() {
panic!("try_mark_previous_green() - Forcing the DepNode should have set its color")
}
@@ -702,23 +770,19 @@ impl<K: DepKind> DepGraph<K> {
// invalid state will not be persisted to the
// incremental compilation cache because of
// compilation errors being present.
- debug!(
- "try_mark_previous_green({:?}) - END - dependency {:?} resulted in compilation error",
- dep_node, dep_dep_node
- );
+ debug!("dependency {dep_dep_node:?} resulted in compilation error",);
return None;
}
/// Try to mark a dep-node which existed in the previous compilation session as green.
- fn try_mark_previous_green<Ctxt: QueryContext<DepKind = K>>(
+ #[instrument(skip(self, qcx, data, prev_dep_node_index), level = "debug")]
+ fn try_mark_previous_green<Qcx: QueryContext<DepKind = K>>(
&self,
- tcx: Ctxt,
+ qcx: Qcx,
data: &DepGraphData<K>,
prev_dep_node_index: SerializedDepNodeIndex,
dep_node: &DepNode<K>,
) -> Option<DepNodeIndex> {
- debug!("try_mark_previous_green({:?}) - BEGIN", dep_node);
-
#[cfg(not(parallel_compiler))]
{
debug_assert!(!self.dep_node_exists(dep_node));
@@ -726,14 +790,14 @@ impl<K: DepKind> DepGraph<K> {
}
// We never try to mark eval_always nodes as green
- debug_assert!(!tcx.dep_context().is_eval_always(dep_node.kind));
+ debug_assert!(!qcx.dep_context().is_eval_always(dep_node.kind));
debug_assert_eq!(data.previous.index_to_node(prev_dep_node_index), *dep_node);
let prev_deps = data.previous.edge_targets_from(prev_dep_node_index);
for &dep_dep_node_index in prev_deps {
- self.try_mark_parent_green(tcx, data, dep_dep_node_index, dep_node)?
+ self.try_mark_parent_green(qcx, data, dep_dep_node_index, dep_node)?
}
// If we got here without hitting a `return` that means that all
@@ -745,7 +809,7 @@ impl<K: DepKind> DepGraph<K> {
// We allocating an entry for the node in the current dependency graph and
// adding all the appropriate edges imported from the previous graph
let dep_node_index = data.current.promote_node_and_deps_to_current(
- tcx.dep_context().profiler(),
+ qcx.dep_context().profiler(),
&data.previous,
prev_dep_node_index,
);
@@ -754,7 +818,7 @@ impl<K: DepKind> DepGraph<K> {
// FIXME: Store the fact that a node has diagnostics in a bit in the dep graph somewhere
// Maybe store a list on disk and encode this fact in the DepNodeState
- let side_effects = tcx.load_side_effects(prev_dep_node_index);
+ let side_effects = qcx.load_side_effects(prev_dep_node_index);
#[cfg(not(parallel_compiler))]
debug_assert!(
@@ -765,14 +829,14 @@ impl<K: DepKind> DepGraph<K> {
);
if !side_effects.is_empty() {
- self.emit_side_effects(tcx, data, dep_node_index, side_effects);
+ self.emit_side_effects(qcx, data, dep_node_index, side_effects);
}
// ... and finally storing a "Green" entry in the color map.
// Multiple threads can all write the same color here
data.colors.insert(prev_dep_node_index, DepNodeColor::Green(dep_node_index));
- debug!("try_mark_previous_green({:?}) - END - successfully marked as green", dep_node);
+ debug!("successfully marked {dep_node:?} as green");
Some(dep_node_index)
}
@@ -780,9 +844,9 @@ impl<K: DepKind> DepGraph<K> {
/// This may be called concurrently on multiple threads for the same dep node.
#[cold]
#[inline(never)]
- fn emit_side_effects<Ctxt: QueryContext<DepKind = K>>(
+ fn emit_side_effects<Qcx: QueryContext<DepKind = K>>(
&self,
- tcx: Ctxt,
+ qcx: Qcx,
data: &DepGraphData<K>,
dep_node_index: DepNodeIndex,
side_effects: QuerySideEffects,
@@ -794,9 +858,9 @@ impl<K: DepKind> DepGraph<K> {
// must process side effects
// Promote the previous diagnostics to the current session.
- tcx.store_side_effects(dep_node_index, side_effects.clone());
+ qcx.store_side_effects(dep_node_index, side_effects.clone());
- let handle = tcx.dep_context().sess().diagnostic();
+ let handle = qcx.dep_context().sess().diagnostic();
for mut diagnostic in side_effects.diagnostics {
handle.emit_diagnostic(&mut diagnostic);
@@ -804,27 +868,27 @@ impl<K: DepKind> DepGraph<K> {
}
}
- // Returns true if the given node has been marked as red during the
- // current compilation session. Used in various assertions
+ /// Returns true if the given node has been marked as red during the
+ /// current compilation session. Used in various assertions
pub fn is_red(&self, dep_node: &DepNode<K>) -> bool {
self.node_color(dep_node) == Some(DepNodeColor::Red)
}
- // Returns true if the given node has been marked as green during the
- // current compilation session. Used in various assertions
+ /// Returns true if the given node has been marked as green during the
+ /// current compilation session. Used in various assertions
pub fn is_green(&self, dep_node: &DepNode<K>) -> bool {
self.node_color(dep_node).map_or(false, |c| c.is_green())
}
- // This method loads all on-disk cacheable query results into memory, so
- // they can be written out to the new cache file again. Most query results
- // will already be in memory but in the case where we marked something as
- // green but then did not need the value, that value will never have been
- // loaded from disk.
- //
- // This method will only load queries that will end up in the disk cache.
- // Other queries will not be executed.
- pub fn exec_cache_promotions<Ctxt: DepContext<DepKind = K>>(&self, tcx: Ctxt) {
+ /// This method loads all on-disk cacheable query results into memory, so
+ /// they can be written out to the new cache file again. Most query results
+ /// will already be in memory but in the case where we marked something as
+ /// green but then did not need the value, that value will never have been
+ /// loaded from disk.
+ ///
+ /// This method will only load queries that will end up in the disk cache.
+ /// Other queries will not be executed.
+ pub fn exec_cache_promotions<Tcx: DepContext<DepKind = K>>(&self, tcx: Tcx) {
let _prof_timer = tcx.profiler().generic_activity("incr_comp_query_cache_promotion");
let data = self.data.as_ref().unwrap();
@@ -941,6 +1005,11 @@ pub(super) struct CurrentDepGraph<K: DepKind> {
new_node_to_index: Sharded<FxHashMap<DepNode<K>, DepNodeIndex>>,
prev_index_to_index: Lock<IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>>,
+ /// This is used to verify that fingerprints do not change between the creation of a node
+ /// and its recomputation.
+ #[cfg(debug_assertions)]
+ fingerprints: Lock<FxHashMap<DepNode<K>, Fingerprint>>,
+
/// Used to trap when a specific edge is added to the graph.
/// This is used for debug purposes and is only active with `debug_assertions`.
#[cfg(debug_assertions)]
@@ -1024,6 +1093,8 @@ impl<K: DepKind> CurrentDepGraph<K> {
anon_id_seed,
#[cfg(debug_assertions)]
forbidden_edge,
+ #[cfg(debug_assertions)]
+ fingerprints: Lock::new(Default::default()),
total_read_count: AtomicU64::new(0),
total_duplicate_read_count: AtomicU64::new(0),
node_intern_event_id,
@@ -1031,10 +1102,18 @@ impl<K: DepKind> CurrentDepGraph<K> {
}
#[cfg(debug_assertions)]
- fn record_edge(&self, dep_node_index: DepNodeIndex, key: DepNode<K>) {
+ fn record_edge(&self, dep_node_index: DepNodeIndex, key: DepNode<K>, fingerprint: Fingerprint) {
if let Some(forbidden_edge) = &self.forbidden_edge {
forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
}
+ match self.fingerprints.lock().entry(key) {
+ Entry::Vacant(v) => {
+ v.insert(fingerprint);
+ }
+ Entry::Occupied(o) => {
+ assert_eq!(*o.get(), fingerprint, "Unstable fingerprints for {:?}", key);
+ }
+ }
}
/// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
@@ -1046,17 +1125,21 @@ impl<K: DepKind> CurrentDepGraph<K> {
edges: EdgesVec,
current_fingerprint: Fingerprint,
) -> DepNodeIndex {
- match self.new_node_to_index.get_shard_by_value(&key).lock().entry(key) {
+ let dep_node_index = match self.new_node_to_index.get_shard_by_value(&key).lock().entry(key)
+ {
Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => {
let dep_node_index =
self.encoder.borrow().send(profiler, key, current_fingerprint, edges);
entry.insert(dep_node_index);
- #[cfg(debug_assertions)]
- self.record_edge(dep_node_index, key);
dep_node_index
}
- }
+ };
+
+ #[cfg(debug_assertions)]
+ self.record_edge(dep_node_index, key, current_fingerprint);
+
+ dep_node_index
}
fn intern_node(
@@ -1097,7 +1180,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
};
#[cfg(debug_assertions)]
- self.record_edge(dep_node_index, key);
+ self.record_edge(dep_node_index, key, fingerprint);
(dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index))))
} else {
if print_status {
@@ -1119,7 +1202,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
};
#[cfg(debug_assertions)]
- self.record_edge(dep_node_index, key);
+ self.record_edge(dep_node_index, key, fingerprint);
(dep_node_index, Some((prev_index, DepNodeColor::Red)))
}
} else {
@@ -1144,7 +1227,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
};
#[cfg(debug_assertions)]
- self.record_edge(dep_node_index, key);
+ self.record_edge(dep_node_index, key, Fingerprint::ZERO);
(dep_node_index, Some((prev_index, DepNodeColor::Red)))
}
} else {
@@ -1175,19 +1258,16 @@ impl<K: DepKind> CurrentDepGraph<K> {
Some(dep_node_index) => dep_node_index,
None => {
let key = prev_graph.index_to_node(prev_index);
- let dep_node_index = self.encoder.borrow().send(
- profiler,
- key,
- prev_graph.fingerprint_by_index(prev_index),
- prev_graph
- .edge_targets_from(prev_index)
- .iter()
- .map(|i| prev_index_to_index[*i].unwrap())
- .collect(),
- );
+ let edges = prev_graph
+ .edge_targets_from(prev_index)
+ .iter()
+ .map(|i| prev_index_to_index[*i].unwrap())
+ .collect();
+ let fingerprint = prev_graph.fingerprint_by_index(prev_index);
+ let dep_node_index = self.encoder.borrow().send(profiler, key, fingerprint, edges);
prev_index_to_index[prev_index] = Some(dep_node_index);
#[cfg(debug_assertions)]
- self.record_edge(dep_node_index, key);
+ self.record_edge(dep_node_index, key, fingerprint);
dep_node_index
}
}
diff --git a/compiler/rustc_query_system/src/dep_graph/mod.rs b/compiler/rustc_query_system/src/dep_graph/mod.rs
index da2075fd5..e370c6990 100644
--- a/compiler/rustc_query_system/src/dep_graph/mod.rs
+++ b/compiler/rustc_query_system/src/dep_graph/mod.rs
@@ -52,9 +52,8 @@ pub trait DepContext: Copy {
}
/// Try to force a dep node to execute and see if it's green.
+ #[instrument(skip(self), level = "debug")]
fn try_force_from_dep_node(self, dep_node: DepNode<Self::DepKind>) -> bool {
- debug!("try_force_from_dep_node({:?}) --- trying to force", dep_node);
-
let cb = self.dep_kind_info(dep_node.kind);
if let Some(f) = cb.force_from_dep_node {
f(self, dep_node);
diff --git a/compiler/rustc_query_system/src/ich/hcx.rs b/compiler/rustc_query_system/src/ich/hcx.rs
index 148eabb38..6378ec108 100644
--- a/compiler/rustc_query_system/src/ich/hcx.rs
+++ b/compiler/rustc_query_system/src/ich/hcx.rs
@@ -49,15 +49,13 @@ pub(super) enum BodyResolver<'tcx> {
impl<'a> StableHashingContext<'a> {
#[inline]
- fn new_with_or_without_spans(
+ pub fn new(
sess: &'a Session,
definitions: &'a Definitions,
cstore: &'a dyn CrateStore,
source_span: &'a IndexVec<LocalDefId, Span>,
- always_ignore_spans: bool,
) -> Self {
- let hash_spans_initial =
- !always_ignore_spans && !sess.opts.unstable_opts.incremental_ignore_spans;
+ let hash_spans_initial = !sess.opts.unstable_opts.incremental_ignore_spans;
StableHashingContext {
body_resolver: BodyResolver::Forbidden,
@@ -72,33 +70,6 @@ impl<'a> StableHashingContext<'a> {
}
#[inline]
- pub fn new(
- sess: &'a Session,
- definitions: &'a Definitions,
- cstore: &'a dyn CrateStore,
- source_span: &'a IndexVec<LocalDefId, Span>,
- ) -> Self {
- Self::new_with_or_without_spans(
- sess,
- definitions,
- cstore,
- source_span,
- /*always_ignore_spans=*/ false,
- )
- }
-
- #[inline]
- pub fn ignore_spans(
- sess: &'a Session,
- definitions: &'a Definitions,
- cstore: &'a dyn CrateStore,
- source_span: &'a IndexVec<LocalDefId, Span>,
- ) -> Self {
- let always_ignore_spans = true;
- Self::new_with_or_without_spans(sess, definitions, cstore, source_span, always_ignore_spans)
- }
-
- #[inline]
pub fn without_hir_bodies(&mut self, f: impl FnOnce(&mut StableHashingContext<'_>)) {
f(&mut StableHashingContext { body_resolver: BodyResolver::Ignore, ..self.clone() });
}
@@ -202,10 +173,4 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
}
}
-impl<'a> rustc_data_structures::intern::InternedHashingContext for StableHashingContext<'a> {
- fn with_def_path_and_no_spans(&mut self, f: impl FnOnce(&mut Self)) {
- self.while_hashing_spans(false, f);
- }
-}
-
impl<'a> rustc_session::HashStableContext for StableHashingContext<'a> {}
diff --git a/compiler/rustc_query_system/src/query/caches.rs b/compiler/rustc_query_system/src/query/caches.rs
index 85c5af72e..4c4680b5d 100644
--- a/compiler/rustc_query_system/src/query/caches.rs
+++ b/compiler/rustc_query_system/src/query/caches.rs
@@ -8,13 +8,17 @@ use rustc_data_structures::sharded::Sharded;
#[cfg(not(parallel_compiler))]
use rustc_data_structures::sync::Lock;
use rustc_data_structures::sync::WorkerLocal;
+use rustc_index::vec::{Idx, IndexVec};
use std::default::Default;
use std::fmt::Debug;
use std::hash::Hash;
use std::marker::PhantomData;
-pub trait CacheSelector<K, V> {
- type Cache;
+pub trait CacheSelector<'tcx, V> {
+ type Cache
+ where
+ V: Clone;
+ type ArenaCache;
}
pub trait QueryStorage {
@@ -47,10 +51,13 @@ pub trait QueryCache: QueryStorage + Sized {
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex));
}
-pub struct DefaultCacheSelector;
+pub struct DefaultCacheSelector<K>(PhantomData<K>);
-impl<K: Eq + Hash, V: Clone> CacheSelector<K, V> for DefaultCacheSelector {
- type Cache = DefaultCache<K, V>;
+impl<'tcx, K: Eq + Hash, V: 'tcx> CacheSelector<'tcx, V> for DefaultCacheSelector<K> {
+ type Cache = DefaultCache<K, V>
+ where
+ V: Clone;
+ type ArenaCache = ArenaCache<'tcx, K, V>;
}
pub struct DefaultCache<K, V> {
@@ -110,6 +117,8 @@ where
let mut lock = self.cache.get_shard_by_value(&key).lock();
#[cfg(not(parallel_compiler))]
let mut lock = self.cache.lock();
+ // We may be overwriting another value. This is all right, since the dep-graph
+ // will check that the fingerprint matches.
lock.insert(key, (value.clone(), index));
value
}
@@ -134,12 +143,6 @@ where
}
}
-pub struct ArenaCacheSelector<'tcx>(PhantomData<&'tcx ()>);
-
-impl<'tcx, K: Eq + Hash, V: 'tcx> CacheSelector<K, V> for ArenaCacheSelector<'tcx> {
- type Cache = ArenaCache<'tcx, K, V>;
-}
-
pub struct ArenaCache<'tcx, K, V> {
arena: WorkerLocal<TypedArena<(V, DepNodeIndex)>>,
#[cfg(parallel_compiler)]
@@ -201,6 +204,8 @@ where
let mut lock = self.cache.get_shard_by_value(&key).lock();
#[cfg(not(parallel_compiler))]
let mut lock = self.cache.lock();
+ // We may be overwriting another value. This is all right, since the dep-graph
+ // will check that the fingerprint matches.
lock.insert(key, value);
&value.0
}
@@ -224,3 +229,183 @@ where
}
}
}
+
+pub struct VecCacheSelector<K>(PhantomData<K>);
+
+impl<'tcx, K: Idx, V: 'tcx> CacheSelector<'tcx, V> for VecCacheSelector<K> {
+ type Cache = VecCache<K, V>
+ where
+ V: Clone;
+ type ArenaCache = VecArenaCache<'tcx, K, V>;
+}
+
+pub struct VecCache<K: Idx, V> {
+ #[cfg(parallel_compiler)]
+ cache: Sharded<IndexVec<K, Option<(V, DepNodeIndex)>>>,
+ #[cfg(not(parallel_compiler))]
+ cache: Lock<IndexVec<K, Option<(V, DepNodeIndex)>>>,
+}
+
+impl<K: Idx, V> Default for VecCache<K, V> {
+ fn default() -> Self {
+ VecCache { cache: Default::default() }
+ }
+}
+
+impl<K: Eq + Idx, V: Clone + Debug> QueryStorage for VecCache<K, V> {
+ type Value = V;
+ type Stored = V;
+
+ #[inline]
+ fn store_nocache(&self, value: Self::Value) -> Self::Stored {
+ // We have no dedicated storage
+ value
+ }
+}
+
+impl<K, V> QueryCache for VecCache<K, V>
+where
+ K: Eq + Idx + Clone + Debug,
+ V: Clone + Debug,
+{
+ type Key = K;
+
+ #[inline(always)]
+ fn lookup<R, OnHit>(&self, key: &K, on_hit: OnHit) -> Result<R, ()>
+ where
+ OnHit: FnOnce(&V, DepNodeIndex) -> R,
+ {
+ #[cfg(parallel_compiler)]
+ let lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
+ #[cfg(not(parallel_compiler))]
+ let lock = self.cache.lock();
+ if let Some(Some(value)) = lock.get(*key) {
+ let hit_result = on_hit(&value.0, value.1);
+ Ok(hit_result)
+ } else {
+ Err(())
+ }
+ }
+
+ #[inline]
+ fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
+ #[cfg(parallel_compiler)]
+ let mut lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
+ #[cfg(not(parallel_compiler))]
+ let mut lock = self.cache.lock();
+ lock.insert(key, (value.clone(), index));
+ value
+ }
+
+ fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
+ #[cfg(parallel_compiler)]
+ {
+ let shards = self.cache.lock_shards();
+ for shard in shards.iter() {
+ for (k, v) in shard.iter_enumerated() {
+ if let Some(v) = v {
+ f(&k, &v.0, v.1);
+ }
+ }
+ }
+ }
+ #[cfg(not(parallel_compiler))]
+ {
+ let map = self.cache.lock();
+ for (k, v) in map.iter_enumerated() {
+ if let Some(v) = v {
+ f(&k, &v.0, v.1);
+ }
+ }
+ }
+ }
+}
+
+pub struct VecArenaCache<'tcx, K: Idx, V> {
+ arena: WorkerLocal<TypedArena<(V, DepNodeIndex)>>,
+ #[cfg(parallel_compiler)]
+ cache: Sharded<IndexVec<K, Option<&'tcx (V, DepNodeIndex)>>>,
+ #[cfg(not(parallel_compiler))]
+ cache: Lock<IndexVec<K, Option<&'tcx (V, DepNodeIndex)>>>,
+}
+
+impl<'tcx, K: Idx, V> Default for VecArenaCache<'tcx, K, V> {
+ fn default() -> Self {
+ VecArenaCache {
+ arena: WorkerLocal::new(|_| TypedArena::default()),
+ cache: Default::default(),
+ }
+ }
+}
+
+impl<'tcx, K: Eq + Idx, V: Debug + 'tcx> QueryStorage for VecArenaCache<'tcx, K, V> {
+ type Value = V;
+ type Stored = &'tcx V;
+
+ #[inline]
+ fn store_nocache(&self, value: Self::Value) -> Self::Stored {
+ let value = self.arena.alloc((value, DepNodeIndex::INVALID));
+ let value = unsafe { &*(&value.0 as *const _) };
+ &value
+ }
+}
+
+impl<'tcx, K, V: 'tcx> QueryCache for VecArenaCache<'tcx, K, V>
+where
+ K: Eq + Idx + Clone + Debug,
+ V: Debug,
+{
+ type Key = K;
+
+ #[inline(always)]
+ fn lookup<R, OnHit>(&self, key: &K, on_hit: OnHit) -> Result<R, ()>
+ where
+ OnHit: FnOnce(&&'tcx V, DepNodeIndex) -> R,
+ {
+ #[cfg(parallel_compiler)]
+ let lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
+ #[cfg(not(parallel_compiler))]
+ let lock = self.cache.lock();
+ if let Some(Some(value)) = lock.get(*key) {
+ let hit_result = on_hit(&&value.0, value.1);
+ Ok(hit_result)
+ } else {
+ Err(())
+ }
+ }
+
+ #[inline]
+ fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
+ let value = self.arena.alloc((value, index));
+ let value = unsafe { &*(value as *const _) };
+ #[cfg(parallel_compiler)]
+ let mut lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
+ #[cfg(not(parallel_compiler))]
+ let mut lock = self.cache.lock();
+ lock.insert(key, value);
+ &value.0
+ }
+
+ fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
+ #[cfg(parallel_compiler)]
+ {
+ let shards = self.cache.lock_shards();
+ for shard in shards.iter() {
+ for (k, v) in shard.iter_enumerated() {
+ if let Some(v) = v {
+ f(&k, &v.0, v.1);
+ }
+ }
+ }
+ }
+ #[cfg(not(parallel_compiler))]
+ {
+ let map = self.cache.lock();
+ for (k, v) in map.iter_enumerated() {
+ if let Some(v) = v {
+ f(&k, &v.0, v.1);
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/rustc_query_system/src/query/config.rs b/compiler/rustc_query_system/src/query/config.rs
index 0a1cffa3b..7d1b62ab1 100644
--- a/compiler/rustc_query_system/src/query/config.rs
+++ b/compiler/rustc_query_system/src/query/config.rs
@@ -11,59 +11,58 @@ use rustc_data_structures::fingerprint::Fingerprint;
use std::fmt::Debug;
use std::hash::Hash;
-pub trait QueryConfig {
+pub trait QueryConfig<Qcx: QueryContext> {
const NAME: &'static str;
type Key: Eq + Hash + Clone + Debug;
- type Value;
- type Stored: Clone;
+ type Value: Debug;
+ type Stored: Debug + Clone + std::borrow::Borrow<Self::Value>;
+
+ type Cache: QueryCache<Key = Self::Key, Stored = Self::Stored, Value = Self::Value>;
+
+ // Don't use this method to access query results, instead use the methods on TyCtxt
+ fn query_state<'a>(tcx: Qcx) -> &'a QueryState<Self::Key>
+ where
+ Qcx: 'a;
+
+ // Don't use this method to access query results, instead use the methods on TyCtxt
+ fn query_cache<'a>(tcx: Qcx) -> &'a Self::Cache
+ where
+ Qcx: 'a;
+
+ // Don't use this method to compute query results, instead use the methods on TyCtxt
+ fn make_vtable(tcx: Qcx, key: &Self::Key) -> QueryVTable<Qcx, Self::Key, Self::Value>;
+
+ fn cache_on_disk(tcx: Qcx::DepContext, key: &Self::Key) -> bool;
+
+ // Don't use this method to compute query results, instead use the methods on TyCtxt
+ fn execute_query(tcx: Qcx::DepContext, k: Self::Key) -> Self::Stored;
}
#[derive(Copy, Clone)]
-pub struct QueryVTable<CTX: QueryContext, K, V> {
+pub struct QueryVTable<Qcx: QueryContext, K, V> {
pub anon: bool,
- pub dep_kind: CTX::DepKind,
+ pub dep_kind: Qcx::DepKind,
pub eval_always: bool,
pub depth_limit: bool,
+ pub feedable: bool,
- pub compute: fn(CTX::DepContext, K) -> V,
+ pub compute: fn(Qcx::DepContext, K) -> V,
pub hash_result: Option<fn(&mut StableHashingContext<'_>, &V) -> Fingerprint>,
pub handle_cycle_error: HandleCycleError,
// NOTE: this is also `None` if `cache_on_disk()` returns false, not just if it's unsupported by the query
- pub try_load_from_disk: Option<fn(CTX, SerializedDepNodeIndex) -> Option<V>>,
+ pub try_load_from_disk: Option<fn(Qcx, SerializedDepNodeIndex) -> Option<V>>,
}
-impl<CTX: QueryContext, K, V> QueryVTable<CTX, K, V> {
- pub(crate) fn to_dep_node(&self, tcx: CTX::DepContext, key: &K) -> DepNode<CTX::DepKind>
+impl<Qcx: QueryContext, K, V> QueryVTable<Qcx, K, V> {
+ pub(crate) fn to_dep_node(&self, tcx: Qcx::DepContext, key: &K) -> DepNode<Qcx::DepKind>
where
- K: crate::dep_graph::DepNodeParams<CTX::DepContext>,
+ K: crate::dep_graph::DepNodeParams<Qcx::DepContext>,
{
DepNode::construct(tcx, self.dep_kind, key)
}
- pub(crate) fn compute(&self, tcx: CTX::DepContext, key: K) -> V {
+ pub(crate) fn compute(&self, tcx: Qcx::DepContext, key: K) -> V {
(self.compute)(tcx, key)
}
}
-
-pub trait QueryDescription<CTX: QueryContext>: QueryConfig {
- type Cache: QueryCache<Key = Self::Key, Stored = Self::Stored, Value = Self::Value>;
-
- // Don't use this method to access query results, instead use the methods on TyCtxt
- fn query_state<'a>(tcx: CTX) -> &'a QueryState<Self::Key>
- where
- CTX: 'a;
-
- // Don't use this method to access query results, instead use the methods on TyCtxt
- fn query_cache<'a>(tcx: CTX) -> &'a Self::Cache
- where
- CTX: 'a;
-
- // Don't use this method to compute query results, instead use the methods on TyCtxt
- fn make_vtable(tcx: CTX, key: &Self::Key) -> QueryVTable<CTX, Self::Key, Self::Value>;
-
- fn cache_on_disk(tcx: CTX::DepContext, key: &Self::Key) -> bool;
-
- // Don't use this method to compute query results, instead use the methods on TyCtxt
- fn execute_query(tcx: CTX::DepContext, k: Self::Key) -> Self::Stored;
-}
diff --git a/compiler/rustc_query_system/src/query/job.rs b/compiler/rustc_query_system/src/query/job.rs
index ed65393f5..49bbcf578 100644
--- a/compiler/rustc_query_system/src/query/job.rs
+++ b/compiler/rustc_query_system/src/query/job.rs
@@ -596,8 +596,8 @@ pub(crate) fn report_cycle<'a>(
cycle_diag.into_diagnostic(&sess.parse_sess.span_diagnostic)
}
-pub fn print_query_stack<CTX: QueryContext>(
- tcx: CTX,
+pub fn print_query_stack<Qcx: QueryContext>(
+ qcx: Qcx,
mut current_query: Option<QueryJobId>,
handler: &Handler,
num_frames: Option<usize>,
@@ -606,7 +606,7 @@ pub fn print_query_stack<CTX: QueryContext>(
// a panic hook, which means that the global `Handler` may be in a weird
// state if it was responsible for triggering the panic.
let mut i = 0;
- let query_map = tcx.try_collect_active_jobs();
+ let query_map = qcx.try_collect_active_jobs();
while let Some(query) = current_query {
if Some(i) == num_frames {
diff --git a/compiler/rustc_query_system/src/query/mod.rs b/compiler/rustc_query_system/src/query/mod.rs
index 118703fc0..7f3dc50d2 100644
--- a/compiler/rustc_query_system/src/query/mod.rs
+++ b/compiler/rustc_query_system/src/query/mod.rs
@@ -8,11 +8,11 @@ pub use self::job::{print_query_stack, QueryInfo, QueryJob, QueryJobId, QueryJob
mod caches;
pub use self::caches::{
- ArenaCacheSelector, CacheSelector, DefaultCacheSelector, QueryCache, QueryStorage,
+ CacheSelector, DefaultCacheSelector, QueryCache, QueryStorage, VecCacheSelector,
};
mod config;
-pub use self::config::{QueryConfig, QueryDescription, QueryVTable};
+pub use self::config::{QueryConfig, QueryVTable};
use crate::dep_graph::{DepNodeIndex, HasDepContext, SerializedDepNodeIndex};
use rustc_data_structures::sync::Lock;
diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs
index 15b89daa6..848fa67e3 100644
--- a/compiler/rustc_query_system/src/query/plumbing.rs
+++ b/compiler/rustc_query_system/src/query/plumbing.rs
@@ -3,8 +3,9 @@
//! manage the caches, and so forth.
use crate::dep_graph::{DepContext, DepNode, DepNodeIndex, DepNodeParams};
+use crate::ich::StableHashingContext;
use crate::query::caches::QueryCache;
-use crate::query::config::{QueryDescription, QueryVTable};
+use crate::query::config::QueryVTable;
use crate::query::job::{report_cycle, QueryInfo, QueryJob, QueryJobId, QueryJobInfo};
use crate::query::{QueryContext, QueryMap, QuerySideEffects, QueryStackFrame};
use crate::values::Value;
@@ -19,6 +20,7 @@ use rustc_data_structures::sync::Lock;
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed, FatalError};
use rustc_session::Session;
use rustc_span::{Span, DUMMY_SP};
+use std::borrow::Borrow;
use std::cell::Cell;
use std::collections::hash_map::Entry;
use std::fmt::Debug;
@@ -27,6 +29,8 @@ use std::mem;
use std::ptr;
use thin_vec::ThinVec;
+use super::QueryConfig;
+
pub struct QueryState<K> {
#[cfg(parallel_compiler)]
active: Sharded<FxHashMap<K, QueryResult>>,
@@ -60,10 +64,10 @@ where
}
}
- pub fn try_collect_active_jobs<CTX: Copy>(
+ pub fn try_collect_active_jobs<Qcx: Copy>(
&self,
- tcx: CTX,
- make_query: fn(CTX, K) -> QueryStackFrame,
+ qcx: Qcx,
+ make_query: fn(Qcx, K) -> QueryStackFrame,
jobs: &mut QueryMap,
) -> Option<()> {
#[cfg(parallel_compiler)]
@@ -74,7 +78,7 @@ where
for shard in shards.iter() {
for (k, v) in shard.iter() {
if let QueryResult::Started(ref job) = *v {
- let query = make_query(tcx, k.clone());
+ let query = make_query(qcx, k.clone());
jobs.insert(job.id, QueryJobInfo { query, job: job.clone() });
}
}
@@ -88,7 +92,7 @@ where
// really hurt much.)
for (k, v) in self.active.try_lock()?.iter() {
if let QueryResult::Started(ref job) = *v {
- let query = make_query(tcx, k.clone());
+ let query = make_query(qcx, k.clone());
jobs.insert(job.id, QueryJobInfo { query, job: job.clone() });
}
}
@@ -117,31 +121,31 @@ where
#[cold]
#[inline(never)]
-fn mk_cycle<CTX, V, R>(
- tcx: CTX,
+fn mk_cycle<Qcx, V, R>(
+ qcx: Qcx,
cycle_error: CycleError,
handler: HandleCycleError,
cache: &dyn crate::query::QueryStorage<Value = V, Stored = R>,
) -> R
where
- CTX: QueryContext,
- V: std::fmt::Debug + Value<CTX::DepContext>,
+ Qcx: QueryContext,
+ V: std::fmt::Debug + Value<Qcx::DepContext>,
R: Clone,
{
- let error = report_cycle(tcx.dep_context().sess(), &cycle_error);
- let value = handle_cycle_error(*tcx.dep_context(), &cycle_error, error, handler);
+ let error = report_cycle(qcx.dep_context().sess(), &cycle_error);
+ let value = handle_cycle_error(*qcx.dep_context(), &cycle_error, error, handler);
cache.store_nocache(value)
}
-fn handle_cycle_error<CTX, V>(
- tcx: CTX,
+fn handle_cycle_error<Tcx, V>(
+ tcx: Tcx,
cycle_error: &CycleError,
mut error: DiagnosticBuilder<'_, ErrorGuaranteed>,
handler: HandleCycleError,
) -> V
where
- CTX: DepContext,
- V: Value<CTX>,
+ Tcx: DepContext,
+ V: Value<Tcx>,
{
use HandleCycleError::*;
match handler {
@@ -174,14 +178,14 @@ where
/// This function is inlined because that results in a noticeable speed-up
/// for some compile-time benchmarks.
#[inline(always)]
- fn try_start<'b, CTX>(
- tcx: &'b CTX,
+ fn try_start<'b, Qcx>(
+ qcx: &'b Qcx,
state: &'b QueryState<K>,
span: Span,
key: K,
) -> TryGetJob<'b, K>
where
- CTX: QueryContext,
+ Qcx: QueryContext,
{
#[cfg(parallel_compiler)]
let mut state_lock = state.active.get_shard_by_value(&key).lock();
@@ -191,8 +195,8 @@ where
match lock.entry(key) {
Entry::Vacant(entry) => {
- let id = tcx.next_job_id();
- let job = tcx.current_query_job();
+ let id = qcx.next_job_id();
+ let job = qcx.current_query_job();
let job = QueryJob::new(id, span, job);
let key = entry.key().clone();
@@ -211,8 +215,8 @@ where
// If we are single-threaded we know that we have cycle error,
// so we just return the error.
return TryGetJob::Cycle(id.find_cycle_in_stack(
- tcx.try_collect_active_jobs().unwrap(),
- &tcx.current_query_job(),
+ qcx.try_collect_active_jobs().unwrap(),
+ &qcx.current_query_job(),
span,
));
}
@@ -221,7 +225,7 @@ where
// For parallel queries, we'll block and wait until the query running
// in another thread has completed. Record how long we wait in the
// self-profiler.
- let query_blocked_prof_timer = tcx.dep_context().profiler().query_blocked();
+ let query_blocked_prof_timer = qcx.dep_context().profiler().query_blocked();
// Get the latch out
let latch = job.latch();
@@ -230,7 +234,7 @@ where
// With parallel queries we might just have to wait on some other
// thread.
- let result = latch.wait_on(tcx.current_query_job(), span);
+ let result = latch.wait_on(qcx.current_query_job(), span);
match result {
Ok(()) => TryGetJob::JobCompleted(query_blocked_prof_timer),
@@ -333,8 +337,8 @@ where
/// which will be used if the query is not in the cache and we need
/// to compute it.
#[inline]
-pub fn try_get_cached<'a, CTX, C, R, OnHit>(
- tcx: CTX,
+pub fn try_get_cached<'a, Tcx, C, R, OnHit>(
+ tcx: Tcx,
cache: &'a C,
key: &C::Key,
// `on_hit` can be called while holding a lock to the query cache
@@ -342,7 +346,7 @@ pub fn try_get_cached<'a, CTX, C, R, OnHit>(
) -> Result<R, ()>
where
C: QueryCache,
- CTX: DepContext,
+ Tcx: DepContext,
OnHit: FnOnce(&C::Stored) -> R,
{
cache.lookup(&key, |value, index| {
@@ -354,29 +358,44 @@ where
})
}
-fn try_execute_query<CTX, C>(
- tcx: CTX,
+fn try_execute_query<Qcx, C>(
+ qcx: Qcx,
state: &QueryState<C::Key>,
cache: &C,
span: Span,
key: C::Key,
- dep_node: Option<DepNode<CTX::DepKind>>,
- query: &QueryVTable<CTX, C::Key, C::Value>,
+ dep_node: Option<DepNode<Qcx::DepKind>>,
+ query: &QueryVTable<Qcx, C::Key, C::Value>,
) -> (C::Stored, Option<DepNodeIndex>)
where
C: QueryCache,
- C::Key: Clone + DepNodeParams<CTX::DepContext>,
- C::Value: Value<CTX::DepContext>,
- CTX: QueryContext,
+ C::Key: Clone + DepNodeParams<Qcx::DepContext>,
+ C::Value: Value<Qcx::DepContext>,
+ C::Stored: Debug + std::borrow::Borrow<C::Value>,
+ Qcx: QueryContext,
{
- match JobOwner::<'_, C::Key>::try_start(&tcx, state, span, key.clone()) {
+ match JobOwner::<'_, C::Key>::try_start(&qcx, state, span, key.clone()) {
TryGetJob::NotYetStarted(job) => {
- let (result, dep_node_index) = execute_job(tcx, key, dep_node, query, job.id);
+ let (result, dep_node_index) = execute_job(qcx, key.clone(), dep_node, query, job.id);
+ if query.feedable {
+ // We may have put a value inside the cache from inside the execution.
+ // Verify that it has the same hash as what we have now, to ensure consistency.
+ let _ = cache.lookup(&key, |cached_result, _| {
+ let hasher = query.hash_result.expect("feedable forbids no_hash");
+ let old_hash = qcx.dep_context().with_stable_hashing_context(|mut hcx| hasher(&mut hcx, cached_result.borrow()));
+ let new_hash = qcx.dep_context().with_stable_hashing_context(|mut hcx| hasher(&mut hcx, &result));
+ debug_assert_eq!(
+ old_hash, new_hash,
+ "Computed query value for {:?}({:?}) is inconsistent with fed value,\ncomputed={:#?}\nfed={:#?}",
+ query.dep_kind, key, result, cached_result,
+ );
+ });
+ }
let result = job.complete(cache, result, dep_node_index);
(result, Some(dep_node_index))
}
TryGetJob::Cycle(error) => {
- let result = mk_cycle(tcx, error, query.handle_cycle_error, cache);
+ let result = mk_cycle(qcx, error, query.handle_cycle_error, cache);
(result, None)
}
#[cfg(parallel_compiler)]
@@ -385,8 +404,8 @@ where
.lookup(&key, |value, index| (value.clone(), index))
.unwrap_or_else(|_| panic!("value must be in cache after waiting"));
- if std::intrinsics::unlikely(tcx.dep_context().profiler().enabled()) {
- tcx.dep_context().profiler().query_cache_hit(index.into());
+ if std::intrinsics::unlikely(qcx.dep_context().profiler().enabled()) {
+ qcx.dep_context().profiler().query_cache_hit(index.into());
}
query_blocked_prof_timer.finish_with_query_invocation_id(index.into());
@@ -395,25 +414,25 @@ where
}
}
-fn execute_job<CTX, K, V>(
- tcx: CTX,
+fn execute_job<Qcx, K, V>(
+ qcx: Qcx,
key: K,
- mut dep_node_opt: Option<DepNode<CTX::DepKind>>,
- query: &QueryVTable<CTX, K, V>,
+ mut dep_node_opt: Option<DepNode<Qcx::DepKind>>,
+ query: &QueryVTable<Qcx, K, V>,
job_id: QueryJobId,
) -> (V, DepNodeIndex)
where
- K: Clone + DepNodeParams<CTX::DepContext>,
+ K: Clone + DepNodeParams<Qcx::DepContext>,
V: Debug,
- CTX: QueryContext,
+ Qcx: QueryContext,
{
- let dep_graph = tcx.dep_context().dep_graph();
+ let dep_graph = qcx.dep_context().dep_graph();
// Fast path for when incr. comp. is off.
if !dep_graph.is_fully_enabled() {
- let prof_timer = tcx.dep_context().profiler().query_provider();
- let result = tcx.start_query(job_id, query.depth_limit, None, || {
- query.compute(*tcx.dep_context(), key)
+ let prof_timer = qcx.dep_context().profiler().query_provider();
+ let result = qcx.start_query(job_id, query.depth_limit, None, || {
+ query.compute(*qcx.dep_context(), key)
});
let dep_node_index = dep_graph.next_virtual_depnode_index();
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
@@ -423,33 +442,33 @@ where
if !query.anon && !query.eval_always {
// `to_dep_node` is expensive for some `DepKind`s.
let dep_node =
- dep_node_opt.get_or_insert_with(|| query.to_dep_node(*tcx.dep_context(), &key));
+ dep_node_opt.get_or_insert_with(|| query.to_dep_node(*qcx.dep_context(), &key));
// The diagnostics for this query will be promoted to the current session during
// `try_mark_green()`, so we can ignore them here.
- if let Some(ret) = tcx.start_query(job_id, false, None, || {
- try_load_from_disk_and_cache_in_memory(tcx, &key, &dep_node, query)
+ if let Some(ret) = qcx.start_query(job_id, false, None, || {
+ try_load_from_disk_and_cache_in_memory(qcx, &key, &dep_node, query)
}) {
return ret;
}
}
- let prof_timer = tcx.dep_context().profiler().query_provider();
+ let prof_timer = qcx.dep_context().profiler().query_provider();
let diagnostics = Lock::new(ThinVec::new());
let (result, dep_node_index) =
- tcx.start_query(job_id, query.depth_limit, Some(&diagnostics), || {
+ qcx.start_query(job_id, query.depth_limit, Some(&diagnostics), || {
if query.anon {
- return dep_graph.with_anon_task(*tcx.dep_context(), query.dep_kind, || {
- query.compute(*tcx.dep_context(), key)
+ return dep_graph.with_anon_task(*qcx.dep_context(), query.dep_kind, || {
+ query.compute(*qcx.dep_context(), key)
});
}
// `to_dep_node` is expensive for some `DepKind`s.
let dep_node =
- dep_node_opt.unwrap_or_else(|| query.to_dep_node(*tcx.dep_context(), &key));
+ dep_node_opt.unwrap_or_else(|| query.to_dep_node(*qcx.dep_context(), &key));
- dep_graph.with_task(dep_node, *tcx.dep_context(), key, query.compute, query.hash_result)
+ dep_graph.with_task(dep_node, *qcx.dep_context(), key, query.compute, query.hash_result)
});
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
@@ -459,55 +478,55 @@ where
if std::intrinsics::unlikely(!side_effects.is_empty()) {
if query.anon {
- tcx.store_side_effects_for_anon_node(dep_node_index, side_effects);
+ qcx.store_side_effects_for_anon_node(dep_node_index, side_effects);
} else {
- tcx.store_side_effects(dep_node_index, side_effects);
+ qcx.store_side_effects(dep_node_index, side_effects);
}
}
(result, dep_node_index)
}
-fn try_load_from_disk_and_cache_in_memory<CTX, K, V>(
- tcx: CTX,
+fn try_load_from_disk_and_cache_in_memory<Qcx, K, V>(
+ qcx: Qcx,
key: &K,
- dep_node: &DepNode<CTX::DepKind>,
- query: &QueryVTable<CTX, K, V>,
+ dep_node: &DepNode<Qcx::DepKind>,
+ query: &QueryVTable<Qcx, K, V>,
) -> Option<(V, DepNodeIndex)>
where
K: Clone,
- CTX: QueryContext,
+ Qcx: QueryContext,
V: Debug,
{
// Note this function can be called concurrently from the same query
// We must ensure that this is handled correctly.
- let dep_graph = tcx.dep_context().dep_graph();
- let (prev_dep_node_index, dep_node_index) = dep_graph.try_mark_green(tcx, &dep_node)?;
+ let dep_graph = qcx.dep_context().dep_graph();
+ let (prev_dep_node_index, dep_node_index) = dep_graph.try_mark_green(qcx, &dep_node)?;
debug_assert!(dep_graph.is_green(dep_node));
// First we try to load the result from the on-disk cache.
// Some things are never cached on disk.
if let Some(try_load_from_disk) = query.try_load_from_disk {
- let prof_timer = tcx.dep_context().profiler().incr_cache_loading();
+ let prof_timer = qcx.dep_context().profiler().incr_cache_loading();
// The call to `with_query_deserialization` enforces that no new `DepNodes`
// are created during deserialization. See the docs of that method for more
// details.
let result =
- dep_graph.with_query_deserialization(|| try_load_from_disk(tcx, prev_dep_node_index));
+ dep_graph.with_query_deserialization(|| try_load_from_disk(qcx, prev_dep_node_index));
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
if let Some(result) = result {
if std::intrinsics::unlikely(
- tcx.dep_context().sess().opts.unstable_opts.query_dep_graph,
+ qcx.dep_context().sess().opts.unstable_opts.query_dep_graph,
) {
dep_graph.mark_debug_loaded_from_disk(*dep_node)
}
- let prev_fingerprint = tcx
+ let prev_fingerprint = qcx
.dep_context()
.dep_graph()
.prev_fingerprint_of(dep_node)
@@ -521,9 +540,9 @@ where
// give us some coverage of potential bugs though.
let try_verify = prev_fingerprint.as_value().1 % 32 == 0;
if std::intrinsics::unlikely(
- try_verify || tcx.dep_context().sess().opts.unstable_opts.incremental_verify_ich,
+ try_verify || qcx.dep_context().sess().opts.unstable_opts.incremental_verify_ich,
) {
- incremental_verify_ich(*tcx.dep_context(), &result, dep_node, query);
+ incremental_verify_ich(*qcx.dep_context(), &result, dep_node, query.hash_result);
}
return Some((result, dep_node_index));
@@ -532,7 +551,7 @@ where
// We always expect to find a cached result for things that
// can be forced from `DepNode`.
debug_assert!(
- !tcx.dep_context().fingerprint_style(dep_node.kind).reconstructible(),
+ !qcx.dep_context().fingerprint_style(dep_node.kind).reconstructible(),
"missing on-disk cache entry for {:?}",
dep_node
);
@@ -540,10 +559,10 @@ where
// We could not load a result from the on-disk cache, so
// recompute.
- let prof_timer = tcx.dep_context().profiler().query_provider();
+ let prof_timer = qcx.dep_context().profiler().query_provider();
// The dep-graph for this computation is already in-place.
- let result = dep_graph.with_ignore(|| query.compute(*tcx.dep_context(), key.clone()));
+ let result = dep_graph.with_ignore(|| query.compute(*qcx.dep_context(), key.clone()));
prof_timer.finish_with_query_invocation_id(dep_node_index.into());
@@ -556,18 +575,20 @@ where
//
// See issue #82920 for an example of a miscompilation that would get turned into
// an ICE by this check
- incremental_verify_ich(*tcx.dep_context(), &result, dep_node, query);
+ incremental_verify_ich(*qcx.dep_context(), &result, dep_node, query.hash_result);
Some((result, dep_node_index))
}
-fn incremental_verify_ich<CTX, K, V: Debug>(
- tcx: CTX::DepContext,
+#[instrument(skip(tcx, result, hash_result), level = "debug")]
+pub(crate) fn incremental_verify_ich<Tcx, V: Debug>(
+ tcx: Tcx,
result: &V,
- dep_node: &DepNode<CTX::DepKind>,
- query: &QueryVTable<CTX, K, V>,
-) where
- CTX: QueryContext,
+ dep_node: &DepNode<Tcx::DepKind>,
+ hash_result: Option<fn(&mut StableHashingContext<'_>, &V) -> Fingerprint>,
+) -> Fingerprint
+where
+ Tcx: DepContext,
{
assert!(
tcx.dep_graph().is_green(dep_node),
@@ -575,16 +596,21 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
dep_node,
);
- debug!("BEGIN verify_ich({:?})", dep_node);
- let new_hash = query.hash_result.map_or(Fingerprint::ZERO, |f| {
+ let new_hash = hash_result.map_or(Fingerprint::ZERO, |f| {
tcx.with_stable_hashing_context(|mut hcx| f(&mut hcx, result))
});
+
let old_hash = tcx.dep_graph().prev_fingerprint_of(dep_node);
- debug!("END verify_ich({:?})", dep_node);
if Some(new_hash) != old_hash {
- incremental_verify_ich_cold(tcx.sess(), DebugArg::from(&dep_node), DebugArg::from(&result));
+ incremental_verify_ich_failed(
+ tcx.sess(),
+ DebugArg::from(&dep_node),
+ DebugArg::from(&result),
+ );
}
+
+ new_hash
}
// This DebugArg business is largely a mirror of std::fmt::ArgumentV1, which is
@@ -629,13 +655,7 @@ impl std::fmt::Debug for DebugArg<'_> {
// different implementations for LLVM to chew on (and filling up the final
// binary, too).
#[cold]
-fn incremental_verify_ich_cold(sess: &Session, dep_node: DebugArg<'_>, result: DebugArg<'_>) {
- let run_cmd = if let Some(crate_name) = &sess.opts.crate_name {
- format!("`cargo clean -p {}` or `cargo clean`", crate_name)
- } else {
- "`cargo clean`".to_string()
- };
-
+fn incremental_verify_ich_failed(sess: &Session, dep_node: DebugArg<'_>, result: DebugArg<'_>) {
// When we emit an error message and panic, we try to debug-print the `DepNode`
// and query result. Unfortunately, this can cause us to run additional queries,
// which may result in another fingerprint mismatch while we're in the middle
@@ -651,6 +671,12 @@ fn incremental_verify_ich_cold(sess: &Session, dep_node: DebugArg<'_>, result: D
if old_in_panic {
sess.emit_err(crate::error::Reentrant);
} else {
+ let run_cmd = if let Some(crate_name) = &sess.opts.crate_name {
+ format!("`cargo clean -p {}` or `cargo clean`", crate_name)
+ } else {
+ "`cargo clean`".to_string()
+ };
+
sess.emit_err(crate::error::IncrementCompilation {
run_cmd,
dep_node: format!("{:?}", dep_node),
@@ -670,14 +696,14 @@ fn incremental_verify_ich_cold(sess: &Session, dep_node: DebugArg<'_>, result: D
///
/// Note: The optimization is only available during incr. comp.
#[inline(never)]
-fn ensure_must_run<CTX, K, V>(
- tcx: CTX,
+fn ensure_must_run<Qcx, K, V>(
+ qcx: Qcx,
key: &K,
- query: &QueryVTable<CTX, K, V>,
-) -> (bool, Option<DepNode<CTX::DepKind>>)
+ query: &QueryVTable<Qcx, K, V>,
+) -> (bool, Option<DepNode<Qcx::DepKind>>)
where
- K: crate::dep_graph::DepNodeParams<CTX::DepContext>,
- CTX: QueryContext,
+ K: crate::dep_graph::DepNodeParams<Qcx::DepContext>,
+ Qcx: QueryContext,
{
if query.eval_always {
return (true, None);
@@ -686,10 +712,10 @@ where
// Ensuring an anonymous query makes no sense
assert!(!query.anon);
- let dep_node = query.to_dep_node(*tcx.dep_context(), key);
+ let dep_node = query.to_dep_node(*qcx.dep_context(), key);
- let dep_graph = tcx.dep_context().dep_graph();
- match dep_graph.try_mark_green(tcx, &dep_node) {
+ let dep_graph = qcx.dep_context().dep_graph();
+ match dep_graph.try_mark_green(qcx, &dep_node) {
None => {
// A None return from `try_mark_green` means that this is either
// a new dep node or that the dep node has already been marked red.
@@ -701,7 +727,7 @@ where
}
Some((_, dep_node_index)) => {
dep_graph.read_index(dep_node_index);
- tcx.dep_context().profiler().query_cache_hit(dep_node_index.into());
+ qcx.dep_context().profiler().query_cache_hit(dep_node_index.into());
(false, None)
}
}
@@ -713,16 +739,16 @@ pub enum QueryMode {
Ensure,
}
-pub fn get_query<Q, CTX>(tcx: CTX, span: Span, key: Q::Key, mode: QueryMode) -> Option<Q::Stored>
+pub fn get_query<Q, Qcx>(qcx: Qcx, span: Span, key: Q::Key, mode: QueryMode) -> Option<Q::Stored>
where
- Q: QueryDescription<CTX>,
- Q::Key: DepNodeParams<CTX::DepContext>,
- Q::Value: Value<CTX::DepContext>,
- CTX: QueryContext,
+ Q: QueryConfig<Qcx>,
+ Q::Key: DepNodeParams<Qcx::DepContext>,
+ Q::Value: Value<Qcx::DepContext>,
+ Qcx: QueryContext,
{
- let query = Q::make_vtable(tcx, &key);
+ let query = Q::make_vtable(qcx, &key);
let dep_node = if let QueryMode::Ensure = mode {
- let (must_run, dep_node) = ensure_must_run(tcx, &key, &query);
+ let (must_run, dep_node) = ensure_must_run(qcx, &key, &query);
if !must_run {
return None;
}
@@ -732,33 +758,33 @@ where
};
let (result, dep_node_index) = try_execute_query(
- tcx,
- Q::query_state(tcx),
- Q::query_cache(tcx),
+ qcx,
+ Q::query_state(qcx),
+ Q::query_cache(qcx),
span,
key,
dep_node,
&query,
);
if let Some(dep_node_index) = dep_node_index {
- tcx.dep_context().dep_graph().read_index(dep_node_index)
+ qcx.dep_context().dep_graph().read_index(dep_node_index)
}
Some(result)
}
-pub fn force_query<Q, CTX>(tcx: CTX, key: Q::Key, dep_node: DepNode<CTX::DepKind>)
+pub fn force_query<Q, Qcx>(qcx: Qcx, key: Q::Key, dep_node: DepNode<Qcx::DepKind>)
where
- Q: QueryDescription<CTX>,
- Q::Key: DepNodeParams<CTX::DepContext>,
- Q::Value: Value<CTX::DepContext>,
- CTX: QueryContext,
+ Q: QueryConfig<Qcx>,
+ Q::Key: DepNodeParams<Qcx::DepContext>,
+ Q::Value: Value<Qcx::DepContext>,
+ Qcx: QueryContext,
{
// We may be concurrently trying both execute and force a query.
// Ensure that only one of them runs the query.
- let cache = Q::query_cache(tcx);
+ let cache = Q::query_cache(qcx);
let cached = cache.lookup(&key, |_, index| {
- if std::intrinsics::unlikely(tcx.dep_context().profiler().enabled()) {
- tcx.dep_context().profiler().query_cache_hit(index.into());
+ if std::intrinsics::unlikely(qcx.dep_context().profiler().enabled()) {
+ qcx.dep_context().profiler().query_cache_hit(index.into());
}
});
@@ -767,9 +793,9 @@ where
Err(()) => {}
}
- let query = Q::make_vtable(tcx, &key);
- let state = Q::query_state(tcx);
+ let query = Q::make_vtable(qcx, &key);
+ let state = Q::query_state(qcx);
debug_assert!(!query.anon);
- try_execute_query(tcx, state, cache, DUMMY_SP, key, Some(dep_node), &query);
+ try_execute_query(qcx, state, cache, DUMMY_SP, key, Some(dep_node), &query);
}
diff --git a/compiler/rustc_query_system/src/values.rs b/compiler/rustc_query_system/src/values.rs
index 67fbf14e6..214656abe 100644
--- a/compiler/rustc_query_system/src/values.rs
+++ b/compiler/rustc_query_system/src/values.rs
@@ -1,12 +1,12 @@
use crate::dep_graph::DepContext;
use crate::query::QueryInfo;
-pub trait Value<CTX: DepContext>: Sized {
- fn from_cycle_error(tcx: CTX, cycle: &[QueryInfo]) -> Self;
+pub trait Value<Tcx: DepContext>: Sized {
+ fn from_cycle_error(tcx: Tcx, cycle: &[QueryInfo]) -> Self;
}
-impl<CTX: DepContext, T> Value<CTX> for T {
- default fn from_cycle_error(tcx: CTX, _: &[QueryInfo]) -> T {
+impl<Tcx: DepContext, T> Value<Tcx> for T {
+ default fn from_cycle_error(tcx: Tcx, _: &[QueryInfo]) -> T {
tcx.sess().abort_if_errors();
// Ideally we would use `bug!` here. But bug! is only defined in rustc_middle, and it's
// non-trivial to define it earlier.
diff --git a/compiler/rustc_resolve/Cargo.toml b/compiler/rustc_resolve/Cargo.toml
index d66db1d7a..7c3a0f8f2 100644
--- a/compiler/rustc_resolve/Cargo.toml
+++ b/compiler/rustc_resolve/Cargo.toml
@@ -7,10 +7,8 @@ edition = "2021"
[dependencies]
bitflags = "1.2.1"
-tracing = "0.1"
-rustc_ast = { path = "../rustc_ast" }
rustc_arena = { path = "../rustc_arena" }
-rustc_middle = { path = "../rustc_middle" }
+rustc_ast = { path = "../rustc_ast" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
rustc_attr = { path = "../rustc_attr" }
rustc_data_structures = { path = "../rustc_data_structures" }
@@ -19,8 +17,12 @@ rustc_expand = { path = "../rustc_expand" }
rustc_feature = { path = "../rustc_feature" }
rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }
+rustc_macros = { path = "../rustc_macros" }
rustc_metadata = { path = "../rustc_metadata" }
+rustc_middle = { path = "../rustc_middle" }
rustc_query_system = { path = "../rustc_query_system" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
+thin-vec = "0.2.8"
+tracing = "0.1"
diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs
index a17793ecd..9c90d67aa 100644
--- a/compiler/rustc_resolve/src/build_reduced_graph.rs
+++ b/compiler/rustc_resolve/src/build_reduced_graph.rs
@@ -30,7 +30,7 @@ use rustc_middle::metadata::ModChild;
use rustc_middle::ty::{self, DefIdTree};
use rustc_session::cstore::CrateStore;
use rustc_span::hygiene::{ExpnId, LocalExpnId, MacroKind};
-use rustc_span::source_map::{respan, Spanned};
+use rustc_span::source_map::respan;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
@@ -56,21 +56,7 @@ impl<'a, Id: Into<DefId>> ToNameBinding<'a>
impl<'a, Id: Into<DefId>> ToNameBinding<'a> for (Res, ty::Visibility<Id>, Span, LocalExpnId) {
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
arenas.alloc_name_binding(NameBinding {
- kind: NameBindingKind::Res(self.0, false),
- ambiguity: None,
- vis: self.1.to_def_id(),
- span: self.2,
- expansion: self.3,
- })
- }
-}
-
-struct IsMacroExport;
-
-impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, LocalExpnId, IsMacroExport) {
- fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
- arenas.alloc_name_binding(NameBinding {
- kind: NameBindingKind::Res(self.0, true),
+ kind: NameBindingKind::Res(self.0),
ambiguity: None,
vis: self.1.to_def_id(),
span: self.2,
@@ -218,7 +204,9 @@ impl<'a> Resolver<'a> {
}
pub(crate) fn build_reduced_graph_external(&mut self, module: Module<'a>) {
- for child in self.cstore().module_children_untracked(module.def_id(), self.session) {
+ for child in
+ Vec::from_iter(self.cstore().module_children_untracked(module.def_id(), self.session))
+ {
let parent_scope = ParentScope::module(module, self);
BuildReducedGraphVisitor { r: self, parent_scope }
.build_reduced_graph_for_external_crate_res(child);
@@ -343,10 +331,12 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
.iter()
.map(|field| respan(field.span, field.ident.map_or(kw::Empty, |ident| ident.name)))
.collect();
- self.insert_field_names(def_id, field_names);
+ self.r.field_names.insert(def_id, field_names);
}
- fn insert_field_names(&mut self, def_id: DefId, field_names: Vec<Spanned<Symbol>>) {
+ fn insert_field_names_extern(&mut self, def_id: DefId) {
+ let field_names =
+ self.r.cstore().struct_field_names_untracked(def_id, self.r.session).collect();
self.r.field_names.insert(def_id, field_names);
}
@@ -364,7 +354,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
module_path: Vec<Segment>,
kind: ImportKind<'a>,
span: Span,
- id: NodeId,
item: &ast::Item,
root_span: Span,
root_id: NodeId,
@@ -377,7 +366,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
module_path,
imported_module: Cell::new(None),
span,
- id,
use_span: item.span,
use_span_with_attributes: item.span_with_attributes(),
has_attributes: !item.attrs.is_empty(),
@@ -457,19 +445,13 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
prefix.is_empty() || prefix.len() == 1 && prefix[0].ident.name == kw::PathRoot
};
match use_tree.kind {
- ast::UseTreeKind::Simple(rename, id1, id2) => {
+ ast::UseTreeKind::Simple(rename) => {
let mut ident = use_tree.ident();
let mut module_path = prefix;
let mut source = module_path.pop().unwrap();
let mut type_ns_only = false;
self.r.visibilities.insert(self.r.local_def_id(id), vis);
- if id1 != ast::DUMMY_NODE_ID {
- self.r.visibilities.insert(self.r.local_def_id(id1), vis);
- }
- if id2 != ast::DUMMY_NODE_ID {
- self.r.visibilities.insert(self.r.local_def_id(id2), vis);
- }
if nested {
// Correctly handle `self`
@@ -485,9 +467,11 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
}
// Replace `use foo::{ self };` with `use foo;`
+ let self_span = source.ident.span;
source = module_path.pop().unwrap();
if rename.is_none() {
- ident = source.ident;
+ // Keep the span of `self`, but the name of `foo`
+ ident = Ident { name: source.ident.name, span: self_span };
}
}
} else {
@@ -574,27 +558,19 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
},
type_ns_only,
nested,
- additional_ids: (id1, id2),
+ id,
};
- self.add_import(
- module_path,
- kind,
- use_tree.span,
- id,
- item,
- root_span,
- item.id,
- vis,
- );
+ self.add_import(module_path, kind, use_tree.span, item, root_span, item.id, vis);
}
ast::UseTreeKind::Glob => {
let kind = ImportKind::Glob {
is_prelude: self.r.session.contains_name(&item.attrs, sym::prelude_import),
max_vis: Cell::new(None),
+ id,
};
self.r.visibilities.insert(self.r.local_def_id(id), vis);
- self.add_import(prefix, kind, use_tree.span, id, item, root_span, item.id, vis);
+ self.add_import(prefix, kind, use_tree.span, item, root_span, item.id, vis);
}
ast::UseTreeKind::Nested(ref items) => {
// Ensure there is at most one `self` in the list
@@ -638,11 +614,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
let new_span = prefix[prefix.len() - 1].ident.span;
let tree = ast::UseTree {
prefix: ast::Path::from_ident(Ident::new(kw::SelfLower, new_span)),
- kind: ast::UseTreeKind::Simple(
- Some(Ident::new(kw::Underscore, new_span)),
- ast::DUMMY_NODE_ID,
- ast::DUMMY_NODE_ID,
- ),
+ kind: ast::UseTreeKind::Simple(Some(Ident::new(kw::Underscore, new_span))),
span: use_tree.span,
};
self.build_reduced_graph_for_use_tree(
@@ -768,7 +740,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
// If this is a tuple or unit struct, define a name
// in the value namespace as well.
- if let Some(ctor_node_id) = vdata.ctor_id() {
+ if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(vdata) {
// If the structure is marked as non_exhaustive then lower the visibility
// to within the crate.
let mut ctor_vis = if vis.is_public()
@@ -794,10 +766,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
ret_fields.push(field_vis.to_def_id());
}
let ctor_def_id = self.r.local_def_id(ctor_node_id);
- let ctor_res = Res::Def(
- DefKind::Ctor(CtorOf::Struct, CtorKind::from_ast(vdata)),
- ctor_def_id.to_def_id(),
- );
+ let ctor_res =
+ Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id.to_def_id());
self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, sp, expansion));
self.r.visibilities.insert(ctor_def_id, ctor_vis);
@@ -881,9 +851,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
})
.unwrap_or((true, None, self.r.dummy_binding));
let import = self.r.arenas.alloc_import(Import {
- kind: ImportKind::ExternCrate { source: orig_name, target: ident },
+ kind: ImportKind::ExternCrate { source: orig_name, target: ident, id: item.id },
root_id: item.id,
- id: item.id,
parent_scope: self.parent_scope,
imported_module: Cell::new(module),
has_attributes: !item.attrs.is_empty(),
@@ -1019,10 +988,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
let cstore = self.r.cstore();
match res {
Res::Def(DefKind::Struct, def_id) => {
- let field_names =
- cstore.struct_field_names_untracked(def_id, self.r.session).collect();
- let ctor = cstore.ctor_def_id_and_kind_untracked(def_id);
- if let Some((ctor_def_id, ctor_kind)) = ctor {
+ if let Some((ctor_kind, ctor_def_id)) = cstore.ctor_untracked(def_id) {
let ctor_res = Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id);
let ctor_vis = cstore.visibility_untracked(ctor_def_id);
let field_visibilities =
@@ -1031,13 +997,9 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
.struct_constructors
.insert(def_id, (ctor_res, ctor_vis, field_visibilities));
}
- self.insert_field_names(def_id, field_names);
- }
- Res::Def(DefKind::Union, def_id) => {
- let field_names =
- cstore.struct_field_names_untracked(def_id, self.r.session).collect();
- self.insert_field_names(def_id, field_names);
+ self.insert_field_names_extern(def_id)
}
+ Res::Def(DefKind::Union, def_id) => self.insert_field_names_extern(def_id),
Res::Def(DefKind::AssocFn, def_id) => {
if cstore.fn_has_self_parameter_untracked(def_id, self.r.session) {
self.r.has_self.insert(def_id);
@@ -1118,7 +1080,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
this.r.arenas.alloc_import(Import {
kind: ImportKind::MacroUse,
root_id: item.id,
- id: item.id,
parent_scope: this.parent_scope,
imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))),
use_span_with_attributes: item.span_with_attributes(),
@@ -1278,8 +1239,22 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
let binding = (res, vis, span, expansion).to_name_binding(self.r.arenas);
self.r.set_binding_parent_module(binding, parent_scope.module);
if is_macro_export {
- let module = self.r.graph_root;
- self.r.define(module, ident, MacroNS, (res, vis, span, expansion, IsMacroExport));
+ let import = self.r.arenas.alloc_import(Import {
+ kind: ImportKind::MacroExport,
+ root_id: item.id,
+ parent_scope: self.parent_scope,
+ imported_module: Cell::new(None),
+ has_attributes: false,
+ use_span_with_attributes: span,
+ use_span: span,
+ root_span: span,
+ span: span,
+ module_path: Vec::new(),
+ vis: Cell::new(Some(vis)),
+ used: Cell::new(true),
+ });
+ let import_binding = self.r.import(binding, import);
+ self.r.define(self.r.graph_root, ident, MacroNS, import_binding);
} else {
self.r.check_reserved_macro_name(ident, res);
self.insert_unused_macro(ident, def_id, item.id, &rule_spans);
@@ -1526,20 +1501,16 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> {
};
// Define a constructor name in the value namespace.
- // Braced variants, unlike structs, generate unusable names in
- // value namespace, they are reserved for possible future use.
- // It's ok to use the variant's id as a ctor id since an
- // error will be reported on any use of such resolution anyway.
- let ctor_node_id = variant.data.ctor_id().unwrap_or(variant.id);
- let ctor_def_id = self.r.local_def_id(ctor_node_id);
- let ctor_kind = CtorKind::from_ast(&variant.data);
- let ctor_res = Res::Def(DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id.to_def_id());
- self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, variant.span, expn_id));
- if ctor_def_id != def_id {
+ if let Some((ctor_kind, ctor_node_id)) = CtorKind::from_ast(&variant.data) {
+ let ctor_def_id = self.r.local_def_id(ctor_node_id);
+ let ctor_res =
+ Res::Def(DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id.to_def_id());
+ self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, variant.span, expn_id));
self.r.visibilities.insert(ctor_def_id, ctor_vis);
}
+
// Record field names for error reporting.
- self.insert_field_names_local(ctor_def_id.to_def_id(), &variant.data);
+ self.insert_field_names_local(def_id.to_def_id(), &variant.data);
visit::walk_variant(self, variant);
}
diff --git a/compiler/rustc_resolve/src/check_unused.rs b/compiler/rustc_resolve/src/check_unused.rs
index 01c3801f2..32fb5e182 100644
--- a/compiler/rustc_resolve/src/check_unused.rs
+++ b/compiler/rustc_resolve/src/check_unused.rs
@@ -234,7 +234,7 @@ impl Resolver<'_> {
if !import.span.is_dummy() {
self.lint_buffer.buffer_lint(
MACRO_USE_EXTERN_CRATE,
- import.id,
+ import.root_id,
import.span,
"deprecated `#[macro_use]` attribute used to \
import macros should be replaced at use sites \
@@ -244,13 +244,13 @@ impl Resolver<'_> {
}
}
}
- ImportKind::ExternCrate { .. } => {
- let def_id = self.local_def_id(import.id);
+ ImportKind::ExternCrate { id, .. } => {
+ let def_id = self.local_def_id(id);
self.maybe_unused_extern_crates.push((def_id, import.span));
}
ImportKind::MacroUse => {
let msg = "unused `#[macro_use]` import";
- self.lint_buffer.buffer_lint(UNUSED_IMPORTS, import.id, import.span, msg);
+ self.lint_buffer.buffer_lint(UNUSED_IMPORTS, import.root_id, import.span, msg);
}
_ => {}
}
diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs
index d36e0f61d..2764a6c28 100644
--- a/compiler/rustc_resolve/src/def_collector.rs
+++ b/compiler/rustc_resolve/src/def_collector.rs
@@ -118,8 +118,8 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
match i.kind {
ItemKind::Struct(ref struct_def, _) | ItemKind::Union(ref struct_def, _) => {
// If this is a unit or tuple-like struct, register the constructor.
- if let Some(ctor_hir_id) = struct_def.ctor_id() {
- this.create_def(ctor_hir_id, DefPathData::Ctor, i.span);
+ if let Some(ctor_node_id) = struct_def.ctor_node_id() {
+ this.create_def(ctor_node_id, DefPathData::Ctor, i.span);
}
}
_ => {}
@@ -131,12 +131,9 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
fn visit_fn(&mut self, fn_kind: FnKind<'a>, span: Span, _: NodeId) {
if let FnKind::Fn(_, _, sig, _, generics, body) = fn_kind {
- if let Async::Yes { closure_id, return_impl_trait_id, .. } = sig.header.asyncness {
+ if let Async::Yes { closure_id, .. } = sig.header.asyncness {
self.visit_generics(generics);
- let return_impl_trait_id =
- self.create_def(return_impl_trait_id, DefPathData::ImplTrait, span);
-
// For async functions, we need to create their inner defs inside of a
// closure to match their desugared representation. Besides that,
// we must mirror everything that `visit::walk_fn` below does.
@@ -144,9 +141,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
for param in &sig.decl.inputs {
self.visit_param(param);
}
- self.with_parent(return_impl_trait_id, |this| {
- this.visit_fn_ret_ty(&sig.decl.output)
- });
+ self.visit_fn_ret_ty(&sig.decl.output);
// If this async fn has no body (i.e. it's an async fn signature in a trait)
// then the closure_def will never be used, and we should avoid generating a
// def-id for it.
@@ -163,14 +158,6 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
fn visit_use_tree(&mut self, use_tree: &'a UseTree, id: NodeId, _nested: bool) {
self.create_def(id, DefPathData::Use, use_tree.span);
- match use_tree.kind {
- UseTreeKind::Simple(_, id1, id2) => {
- self.create_def(id1, DefPathData::Use, use_tree.prefix.span);
- self.create_def(id2, DefPathData::Use, use_tree.prefix.span);
- }
- UseTreeKind::Glob => (),
- UseTreeKind::Nested(..) => {}
- }
visit::walk_use_tree(self, use_tree, id);
}
@@ -196,8 +183,8 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
}
let def = self.create_def(v.id, DefPathData::TypeNs(v.ident.name), v.span);
self.with_parent(def, |this| {
- if let Some(ctor_hir_id) = v.data.ctor_id() {
- this.create_def(ctor_hir_id, DefPathData::Ctor, v.span);
+ if let Some(ctor_node_id) = v.data.ctor_node_id() {
+ this.create_def(ctor_node_id, DefPathData::Ctor, v.span);
}
visit::walk_variant(this, v)
});
@@ -262,11 +249,11 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> {
fn visit_expr(&mut self, expr: &'a Expr) {
let parent_def = match expr.kind {
ExprKind::MacCall(..) => return self.visit_macro_invoc(expr.id),
- ExprKind::Closure(_, _, asyncness, ..) => {
+ ExprKind::Closure(ref closure) => {
// Async closures desugar to closures inside of closures, so
// we must create two defs.
let closure_def = self.create_def(expr.id, DefPathData::ClosureExpr, expr.span);
- match asyncness {
+ match closure.asyncness {
Async::Yes { closure_id, .. } => {
self.create_def(closure_id, DefPathData::ClosureExpr, expr.span)
}
diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs
index 5d868ebec..e392df6c5 100644
--- a/compiler/rustc_resolve/src/diagnostics.rs
+++ b/compiler/rustc_resolve/src/diagnostics.rs
@@ -25,7 +25,9 @@ use rustc_span::lev_distance::find_best_match_for_name;
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, Span, SyntaxContext};
+use thin_vec::ThinVec;
+use crate::errors as errs;
use crate::imports::{Import, ImportKind, ImportResolver};
use crate::late::{PatternSource, Rib};
use crate::path_names_to_string;
@@ -58,16 +60,32 @@ pub(crate) enum SuggestionTarget {
#[derive(Debug)]
pub(crate) struct TypoSuggestion {
pub candidate: Symbol,
+ /// The source location where the name is defined; None if the name is not defined
+ /// in source e.g. primitives
+ pub span: Option<Span>,
pub res: Res,
pub target: SuggestionTarget,
}
impl TypoSuggestion {
- pub(crate) fn typo_from_res(candidate: Symbol, res: Res) -> TypoSuggestion {
- Self { candidate, res, target: SuggestionTarget::SimilarlyNamed }
+ pub(crate) fn typo_from_ident(ident: Ident, res: Res) -> TypoSuggestion {
+ Self {
+ candidate: ident.name,
+ span: Some(ident.span),
+ res,
+ target: SuggestionTarget::SimilarlyNamed,
+ }
+ }
+ pub(crate) fn typo_from_name(candidate: Symbol, res: Res) -> TypoSuggestion {
+ Self { candidate, span: None, res, target: SuggestionTarget::SimilarlyNamed }
}
- pub(crate) fn single_item_from_res(candidate: Symbol, res: Res) -> TypoSuggestion {
- Self { candidate, res, target: SuggestionTarget::SingleItem }
+ pub(crate) fn single_item_from_ident(ident: Ident, res: Res) -> TypoSuggestion {
+ Self {
+ candidate: ident.name,
+ span: Some(ident.span),
+ res,
+ target: SuggestionTarget::SingleItem,
+ }
}
}
@@ -174,12 +192,12 @@ impl<'a> Resolver<'a> {
ModuleKind::Block => "block",
};
- let old_noun = match old_binding.is_import() {
+ let old_noun = match old_binding.is_import_user_facing() {
true => "import",
false => "definition",
};
- let new_participle = match new_binding.is_import() {
+ let new_participle = match new_binding.is_import_user_facing() {
true => "imported",
false => "defined",
};
@@ -210,7 +228,7 @@ impl<'a> Resolver<'a> {
true => struct_span_err!(self.session, span, E0254, "{}", msg),
false => struct_span_err!(self.session, span, E0260, "{}", msg),
},
- _ => match (old_binding.is_import(), new_binding.is_import()) {
+ _ => match (old_binding.is_import_user_facing(), new_binding.is_import_user_facing()) {
(false, false) => struct_span_err!(self.session, span, E0428, "{}", msg),
(true, true) => struct_span_err!(self.session, span, E0252, "{}", msg),
_ => struct_span_err!(self.session, span, E0255, "{}", msg),
@@ -225,21 +243,27 @@ impl<'a> Resolver<'a> {
));
err.span_label(span, format!("`{}` re{} here", name, new_participle));
- err.span_label(
- self.session.source_map().guess_head_span(old_binding.span),
- format!("previous {} of the {} `{}` here", old_noun, old_kind, name),
- );
+ if !old_binding.span.is_dummy() && old_binding.span != span {
+ err.span_label(
+ self.session.source_map().guess_head_span(old_binding.span),
+ format!("previous {} of the {} `{}` here", old_noun, old_kind, name),
+ );
+ }
// See https://github.com/rust-lang/rust/issues/32354
use NameBindingKind::Import;
+ let can_suggest = |binding: &NameBinding<'_>, import: &self::Import<'_>| {
+ !binding.span.is_dummy()
+ && !matches!(import.kind, ImportKind::MacroUse | ImportKind::MacroExport)
+ };
let import = match (&new_binding.kind, &old_binding.kind) {
// If there are two imports where one or both have attributes then prefer removing the
// import without attributes.
(Import { import: new, .. }, Import { import: old, .. })
if {
- !new_binding.span.is_dummy()
- && !old_binding.span.is_dummy()
- && (new.has_attributes || old.has_attributes)
+ (new.has_attributes || old.has_attributes)
+ && can_suggest(old_binding, old)
+ && can_suggest(new_binding, new)
} =>
{
if old.has_attributes {
@@ -249,10 +273,10 @@ impl<'a> Resolver<'a> {
}
}
// Otherwise prioritize the new binding.
- (Import { import, .. }, other) if !new_binding.span.is_dummy() => {
+ (Import { import, .. }, other) if can_suggest(new_binding, import) => {
Some((import, new_binding.span, other.is_import()))
}
- (other, Import { import, .. }) if !old_binding.span.is_dummy() => {
+ (other, Import { import, .. }) if can_suggest(old_binding, import) => {
Some((import, old_binding.span, other.is_import()))
}
_ => None,
@@ -337,7 +361,7 @@ impl<'a> Resolver<'a> {
}
}
}
- ImportKind::ExternCrate { source, target } => {
+ ImportKind::ExternCrate { source, target, .. } => {
suggestion = Some(format!(
"extern crate {} as {};",
source.unwrap_or(target.name),
@@ -490,7 +514,7 @@ impl<'a> Resolver<'a> {
if let Some(binding) = resolution.borrow().binding {
let res = binding.res();
if filter_fn(res) && ctxt.map_or(true, |ctxt| ctxt == key.ident.span.ctxt()) {
- names.push(TypoSuggestion::typo_from_res(key.ident.name, res));
+ names.push(TypoSuggestion::typo_from_ident(key.ident, res));
}
}
}
@@ -575,78 +599,41 @@ impl<'a> Resolver<'a> {
err
}
- ResolutionError::NameAlreadyUsedInParameterList(name, first_use_span) => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0403,
- "the name `{}` is already used for a generic \
- parameter in this item's generic parameters",
- name,
- );
- err.span_label(span, "already used");
- err.span_label(first_use_span, format!("first use of `{}`", name));
- err
- }
+ ResolutionError::NameAlreadyUsedInParameterList(name, first_use_span) => self
+ .session
+ .create_err(errs::NameAlreadyUsedInParameterList { span, first_use_span, name }),
ResolutionError::MethodNotMemberOfTrait(method, trait_, candidate) => {
- let mut err = struct_span_err!(
- self.session,
+ self.session.create_err(errs::MethodNotMemberOfTrait {
span,
- E0407,
- "method `{}` is not a member of trait `{}`",
method,
- trait_
- );
- err.span_label(span, format!("not a member of trait `{}`", trait_));
- if let Some(candidate) = candidate {
- err.span_suggestion(
- method.span,
- "there is an associated function with a similar name",
- candidate.to_ident_string(),
- Applicability::MaybeIncorrect,
- );
- }
- err
+ trait_,
+ sub: candidate.map(|c| errs::AssociatedFnWithSimilarNameExists {
+ span: method.span,
+ candidate: c,
+ }),
+ })
}
ResolutionError::TypeNotMemberOfTrait(type_, trait_, candidate) => {
- let mut err = struct_span_err!(
- self.session,
+ self.session.create_err(errs::TypeNotMemberOfTrait {
span,
- E0437,
- "type `{}` is not a member of trait `{}`",
type_,
- trait_
- );
- err.span_label(span, format!("not a member of trait `{}`", trait_));
- if let Some(candidate) = candidate {
- err.span_suggestion(
- type_.span,
- "there is an associated type with a similar name",
- candidate.to_ident_string(),
- Applicability::MaybeIncorrect,
- );
- }
- err
+ trait_,
+ sub: candidate.map(|c| errs::AssociatedTypeWithSimilarNameExists {
+ span: type_.span,
+ candidate: c,
+ }),
+ })
}
ResolutionError::ConstNotMemberOfTrait(const_, trait_, candidate) => {
- let mut err = struct_span_err!(
- self.session,
+ self.session.create_err(errs::ConstNotMemberOfTrait {
span,
- E0438,
- "const `{}` is not a member of trait `{}`",
const_,
- trait_
- );
- err.span_label(span, format!("not a member of trait `{}`", trait_));
- if let Some(candidate) = candidate {
- err.span_suggestion(
- const_.span,
- "there is an associated constant with a similar name",
- candidate.to_ident_string(),
- Applicability::MaybeIncorrect,
- );
- }
- err
+ trait_,
+ sub: candidate.map(|c| errs::AssociatedConstWithSimilarNameExists {
+ span: const_.span,
+ candidate: c,
+ }),
+ })
}
ResolutionError::VariableNotBoundInPattern(binding_error, parent_scope) => {
let BindingError { name, target, origin, could_be_path } = binding_error;
@@ -708,128 +695,78 @@ impl<'a> Resolver<'a> {
err
}
ResolutionError::VariableBoundWithDifferentMode(variable_name, first_binding_span) => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0409,
- "variable `{}` is bound inconsistently across alternatives separated by `|`",
- variable_name
- );
- err.span_label(span, "bound in different ways");
- err.span_label(first_binding_span, "first binding");
- err
- }
- ResolutionError::IdentifierBoundMoreThanOnceInParameterList(identifier) => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0415,
- "identifier `{}` is bound more than once in this parameter list",
- identifier
- );
- err.span_label(span, "used as parameter more than once");
- err
- }
- ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(identifier) => {
- let mut err = struct_span_err!(
- self.session,
+ self.session.create_err(errs::VariableBoundWithDifferentMode {
span,
- E0416,
- "identifier `{}` is bound more than once in the same pattern",
- identifier
- );
- err.span_label(span, "used in a pattern more than once");
- err
- }
+ first_binding_span,
+ variable_name,
+ })
+ }
+ ResolutionError::IdentifierBoundMoreThanOnceInParameterList(identifier) => self
+ .session
+ .create_err(errs::IdentifierBoundMoreThanOnceInParameterList { span, identifier }),
+ ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(identifier) => self
+ .session
+ .create_err(errs::IdentifierBoundMoreThanOnceInSamePattern { span, identifier }),
ResolutionError::UndeclaredLabel { name, suggestion } => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0426,
- "use of undeclared label `{}`",
- name
- );
-
- err.span_label(span, format!("undeclared label `{}`", name));
-
- match suggestion {
+ let ((sub_reachable, sub_reachable_suggestion), sub_unreachable) = match suggestion
+ {
// A reachable label with a similar name exists.
- Some((ident, true)) => {
- err.span_label(ident.span, "a label with a similar name is reachable");
- err.span_suggestion(
- span,
- "try using similarly named label",
- ident.name,
- Applicability::MaybeIncorrect,
- );
- }
+ Some((ident, true)) => (
+ (
+ Some(errs::LabelWithSimilarNameReachable(ident.span)),
+ Some(errs::TryUsingSimilarlyNamedLabel {
+ span,
+ ident_name: ident.name,
+ }),
+ ),
+ None,
+ ),
// An unreachable label with a similar name exists.
- Some((ident, false)) => {
- err.span_label(
- ident.span,
- "a label with a similar name exists but is unreachable",
- );
- }
+ Some((ident, false)) => (
+ (None, None),
+ Some(errs::UnreachableLabelWithSimilarNameExists {
+ ident_span: ident.span,
+ }),
+ ),
// No similarly-named labels exist.
- None => (),
- }
-
- err
+ None => ((None, None), None),
+ };
+ self.session.create_err(errs::UndeclaredLabel {
+ span,
+ name,
+ sub_reachable,
+ sub_reachable_suggestion,
+ sub_unreachable,
+ })
}
ResolutionError::SelfImportsOnlyAllowedWithin { root, span_with_rename } => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0429,
- "{}",
- "`self` imports are only allowed within a { } list"
- );
-
// None of the suggestions below would help with a case like `use self`.
- if !root {
+ let (suggestion, mpart_suggestion) = if root {
+ (None, None)
+ } else {
// use foo::bar::self -> foo::bar
// use foo::bar::self as abc -> foo::bar as abc
- err.span_suggestion(
- span,
- "consider importing the module directly",
- "",
- Applicability::MachineApplicable,
- );
+ let suggestion = errs::SelfImportsOnlyAllowedWithinSuggestion { span };
// use foo::bar::self -> foo::bar::{self}
// use foo::bar::self as abc -> foo::bar::{self as abc}
- let braces = vec![
- (span_with_rename.shrink_to_lo(), "{".to_string()),
- (span_with_rename.shrink_to_hi(), "}".to_string()),
- ];
- err.multipart_suggestion(
- "alternatively, use the multi-path `use` syntax to import `self`",
- braces,
- Applicability::MachineApplicable,
- );
- }
- err
+ let mpart_suggestion = errs::SelfImportsOnlyAllowedWithinMultipartSuggestion {
+ multipart_start: span_with_rename.shrink_to_lo(),
+ multipart_end: span_with_rename.shrink_to_hi(),
+ };
+ (Some(suggestion), Some(mpart_suggestion))
+ };
+ self.session.create_err(errs::SelfImportsOnlyAllowedWithin {
+ span,
+ suggestion,
+ mpart_suggestion,
+ })
}
ResolutionError::SelfImportCanOnlyAppearOnceInTheList => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0430,
- "`self` import can only appear once in an import list"
- );
- err.span_label(span, "can only appear once in an import list");
- err
+ self.session.create_err(errs::SelfImportCanOnlyAppearOnceInTheList { span })
}
ResolutionError::SelfImportOnlyInImportListWithNonEmptyPrefix => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0431,
- "`self` import can only appear in an import list with \
- a non-empty prefix"
- );
- err.span_label(span, "can only appear in an import list with a non-empty prefix");
- err
+ self.session.create_err(errs::SelfImportOnlyInImportListWithNonEmptyPrefix { span })
}
ResolutionError::FailedToResolve { label, suggestion } => {
let mut err =
@@ -847,23 +784,9 @@ impl<'a> Resolver<'a> {
err
}
ResolutionError::CannotCaptureDynamicEnvironmentInFnItem => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0434,
- "{}",
- "can't capture dynamic environment in a fn item"
- );
- err.help("use the `|| { ... }` closure form instead");
- err
+ self.session.create_err(errs::CannotCaptureDynamicEnvironmentInFnItem { span })
}
- ResolutionError::AttemptToUseNonConstantValueInConstant(ident, sugg, current) => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0435,
- "attempt to use a non-constant value in a constant"
- );
+ ResolutionError::AttemptToUseNonConstantValueInConstant(ident, suggestion, current) => {
// let foo =...
// ^^^ given this Span
// ------- get this Span to have an applicable suggestion
@@ -877,23 +800,34 @@ impl<'a> Resolver<'a> {
.source_map()
.span_extend_to_prev_str(ident.span, current, true, false);
- match sp {
+ let ((with, with_label), without) = match sp {
Some(sp) if !self.session.source_map().is_multiline(sp) => {
let sp = sp.with_lo(BytePos(sp.lo().0 - (current.len() as u32)));
- err.span_suggestion(
- sp,
- &format!("consider using `{}` instead of `{}`", sugg, current),
- format!("{} {}", sugg, ident),
- Applicability::MaybeIncorrect,
- );
- err.span_label(span, "non-constant value");
- }
- _ => {
- err.span_label(ident.span, &format!("this would need to be a `{}`", sugg));
+ (
+ (Some(errs::AttemptToUseNonConstantValueInConstantWithSuggestion {
+ span: sp,
+ ident,
+ suggestion,
+ current,
+ }), Some(errs::AttemptToUseNonConstantValueInConstantLabelWithSuggestion {span})),
+ None,
+ )
}
- }
+ _ => (
+ (None, None),
+ Some(errs::AttemptToUseNonConstantValueInConstantWithoutSuggestion {
+ ident_span: ident.span,
+ suggestion,
+ }),
+ ),
+ };
- err
+ self.session.create_err(errs::AttemptToUseNonConstantValueInConstant {
+ span,
+ with,
+ with_label,
+ without,
+ })
}
ResolutionError::BindingShadowsSomethingUnacceptable {
shadowing_binding,
@@ -902,135 +836,80 @@ impl<'a> Resolver<'a> {
article,
shadowed_binding,
shadowed_binding_span,
- } => {
- let shadowed_binding_descr = shadowed_binding.descr();
- let mut err = struct_span_err!(
- self.session,
- span,
- E0530,
- "{}s cannot shadow {}s",
- shadowing_binding.descr(),
- shadowed_binding_descr,
- );
- err.span_label(
- span,
- format!("cannot be named the same as {} {}", article, shadowed_binding_descr),
- );
- match (shadowing_binding, shadowed_binding) {
+ } => self.session.create_err(errs::BindingShadowsSomethingUnacceptable {
+ span,
+ shadowing_binding,
+ shadowed_binding,
+ article,
+ sub_suggestion: match (shadowing_binding, shadowed_binding) {
(
PatternSource::Match,
Res::Def(DefKind::Ctor(CtorOf::Variant | CtorOf::Struct, CtorKind::Fn), _),
- ) => {
- err.span_suggestion(
- span,
- "try specify the pattern arguments",
- format!("{}(..)", name),
- Applicability::Unspecified,
- );
- }
- _ => (),
- }
- let msg =
- format!("the {} `{}` is {} here", shadowed_binding_descr, name, participle);
- err.span_label(shadowed_binding_span, msg);
- err
- }
+ ) => Some(errs::BindingShadowsSomethingUnacceptableSuggestion { span, name }),
+ _ => None,
+ },
+ shadowed_binding_span,
+ participle,
+ name,
+ }),
ResolutionError::ForwardDeclaredGenericParam => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0128,
- "generic parameters with a default cannot use \
- forward declared identifiers"
- );
- err.span_label(span, "defaulted generic parameters cannot be forward declared");
- err
+ self.session.create_err(errs::ForwardDeclaredGenericParam { span })
}
ResolutionError::ParamInTyOfConstParam(name) => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0770,
- "the type of const parameters must not depend on other generic parameters"
- );
- err.span_label(
- span,
- format!("the type must not depend on the parameter `{}`", name),
- );
- err
+ self.session.create_err(errs::ParamInTyOfConstParam { span, name })
}
ResolutionError::ParamInNonTrivialAnonConst { name, is_type } => {
- let mut err = self.session.struct_span_err(
+ self.session.create_err(errs::ParamInNonTrivialAnonConst {
span,
- "generic parameters may not be used in const operations",
- );
- err.span_label(span, &format!("cannot perform const operation using `{}`", name));
-
- if is_type {
- err.note("type parameters may not be used in const expressions");
- } else {
- err.help(&format!(
- "const parameters may only be used as standalone arguments, i.e. `{}`",
- name
- ));
- }
-
- if self.session.is_nightly_build() {
- err.help(
- "use `#![feature(generic_const_exprs)]` to allow generic const expressions",
- );
- }
-
- err
+ name,
+ sub_is_type: if is_type {
+ errs::ParamInNonTrivialAnonConstIsType::AType
+ } else {
+ errs::ParamInNonTrivialAnonConstIsType::NotAType { name }
+ },
+ help: self
+ .session
+ .is_nightly_build()
+ .then_some(errs::ParamInNonTrivialAnonConstHelp),
+ })
}
ResolutionError::SelfInGenericParamDefault => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0735,
- "generic parameters cannot use `Self` in their defaults"
- );
- err.span_label(span, "`Self` in generic parameter default");
- err
+ self.session.create_err(errs::SelfInGenericParamDefault { span })
}
ResolutionError::UnreachableLabel { name, definition_span, suggestion } => {
- let mut err = struct_span_err!(
- self.session,
+ let ((sub_suggestion_label, sub_suggestion), sub_unreachable_label) =
+ match suggestion {
+ // A reachable label with a similar name exists.
+ Some((ident, true)) => (
+ (
+ Some(errs::UnreachableLabelSubLabel { ident_span: ident.span }),
+ Some(errs::UnreachableLabelSubSuggestion {
+ span,
+ // intentionally taking 'ident.name' instead of 'ident' itself, as this
+ // could be used in suggestion context
+ ident_name: ident.name,
+ }),
+ ),
+ None,
+ ),
+ // An unreachable label with a similar name exists.
+ Some((ident, false)) => (
+ (None, None),
+ Some(errs::UnreachableLabelSubLabelUnreachable {
+ ident_span: ident.span,
+ }),
+ ),
+ // No similarly-named labels exist.
+ None => ((None, None), None),
+ };
+ self.session.create_err(errs::UnreachableLabel {
span,
- E0767,
- "use of unreachable label `{}`",
name,
- );
-
- err.span_label(definition_span, "unreachable label defined here");
- err.span_label(span, format!("unreachable label `{}`", name));
- err.note(
- "labels are unreachable through functions, closures, async blocks and modules",
- );
-
- match suggestion {
- // A reachable label with a similar name exists.
- Some((ident, true)) => {
- err.span_label(ident.span, "a label with a similar name is reachable");
- err.span_suggestion(
- span,
- "try using similarly named label",
- ident.name,
- Applicability::MaybeIncorrect,
- );
- }
- // An unreachable label with a similar name exists.
- Some((ident, false)) => {
- err.span_label(
- ident.span,
- "a label with a similar name exists but is also unreachable",
- );
- }
- // No similarly-named labels exist.
- None => (),
- }
-
- err
+ definition_span,
+ sub_suggestion,
+ sub_suggestion_label,
+ sub_unreachable_label,
+ })
}
ResolutionError::TraitImplMismatch {
name,
@@ -1051,25 +930,10 @@ impl<'a> Resolver<'a> {
err.span_label(trait_item_span, "item in trait");
err
}
- ResolutionError::TraitImplDuplicate { name, trait_item_span, old_span } => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0201,
- "duplicate definitions with name `{}`:",
- name,
- );
- err.span_label(old_span, "previous definition here");
- err.span_label(trait_item_span, "item in trait");
- err.span_label(span, "duplicate definition");
- err
- }
- ResolutionError::InvalidAsmSym => {
- let mut err = self.session.struct_span_err(span, "invalid `sym` operand");
- err.span_label(span, "is a local variable");
- err.help("`sym` operands must refer to either a function or a static");
- err
- }
+ ResolutionError::TraitImplDuplicate { name, trait_item_span, old_span } => self
+ .session
+ .create_err(errs::TraitImplDuplicate { span, name, trait_item_span, old_span }),
+ ResolutionError::InvalidAsmSym => self.session.create_err(errs::InvalidAsmSym { span }),
}
}
@@ -1079,48 +943,27 @@ impl<'a> Resolver<'a> {
) -> ErrorGuaranteed {
match vis_resolution_error {
VisResolutionError::Relative2018(span, path) => {
- let mut err = self.session.struct_span_err(
+ self.session.create_err(errs::Relative2018 {
span,
- "relative paths are not supported in visibilities in 2018 edition or later",
- );
- err.span_suggestion(
- path.span,
- "try",
- format!("crate::{}", pprust::path_to_string(&path)),
- Applicability::MaybeIncorrect,
- );
- err
+ path_span: path.span,
+ // intentionally converting to String, as the text would also be used as
+ // in suggestion context
+ path_str: pprust::path_to_string(&path),
+ })
+ }
+ VisResolutionError::AncestorOnly(span) => {
+ self.session.create_err(errs::AncestorOnly(span))
}
- VisResolutionError::AncestorOnly(span) => struct_span_err!(
- self.session,
- span,
- E0742,
- "visibilities can only be restricted to ancestor modules"
- ),
VisResolutionError::FailedToResolve(span, label, suggestion) => {
self.into_struct_error(span, ResolutionError::FailedToResolve { label, suggestion })
}
VisResolutionError::ExpectedFound(span, path_str, res) => {
- let mut err = struct_span_err!(
- self.session,
- span,
- E0577,
- "expected module, found {} `{}`",
- res.descr(),
- path_str
- );
- err.span_label(span, "not a module");
- err
+ self.session.create_err(errs::ExpectedFound { span, res, path_str })
}
- VisResolutionError::Indeterminate(span) => struct_span_err!(
- self.session,
- span,
- E0578,
- "cannot determine resolution for the visibility"
- ),
- VisResolutionError::ModuleOnly(span) => {
- self.session.struct_span_err(span, "visibility must resolve to a module")
+ VisResolutionError::Indeterminate(span) => {
+ self.session.create_err(errs::Indeterminate(span))
}
+ VisResolutionError::ModuleOnly(span) => self.session.create_err(errs::ModuleOnly(span)),
}
.emit()
}
@@ -1145,7 +988,7 @@ impl<'a> Resolver<'a> {
.get(&expn_id)
.into_iter()
.flatten()
- .map(|ident| TypoSuggestion::typo_from_res(ident.name, res)),
+ .map(|ident| TypoSuggestion::typo_from_ident(*ident, res)),
);
}
}
@@ -1164,7 +1007,7 @@ impl<'a> Resolver<'a> {
suggestions.extend(
ext.helper_attrs
.iter()
- .map(|name| TypoSuggestion::typo_from_res(*name, res)),
+ .map(|name| TypoSuggestion::typo_from_name(*name, res)),
);
}
}
@@ -1174,8 +1017,8 @@ impl<'a> Resolver<'a> {
if let MacroRulesScope::Binding(macro_rules_binding) = macro_rules_scope.get() {
let res = macro_rules_binding.binding.res();
if filter_fn(res) {
- suggestions.push(TypoSuggestion::typo_from_res(
- macro_rules_binding.ident.name,
+ suggestions.push(TypoSuggestion::typo_from_ident(
+ macro_rules_binding.ident,
res,
))
}
@@ -1193,7 +1036,7 @@ impl<'a> Resolver<'a> {
suggestions.extend(this.macro_use_prelude.iter().filter_map(
|(name, binding)| {
let res = binding.res();
- filter_fn(res).then_some(TypoSuggestion::typo_from_res(*name, res))
+ filter_fn(res).then_some(TypoSuggestion::typo_from_name(*name, res))
},
));
}
@@ -1203,14 +1046,14 @@ impl<'a> Resolver<'a> {
suggestions.extend(
BUILTIN_ATTRIBUTES
.iter()
- .map(|attr| TypoSuggestion::typo_from_res(attr.name, res)),
+ .map(|attr| TypoSuggestion::typo_from_name(attr.name, res)),
);
}
}
Scope::ExternPrelude => {
suggestions.extend(this.extern_prelude.iter().filter_map(|(ident, _)| {
let res = Res::Def(DefKind::Mod, CRATE_DEF_ID.to_def_id());
- filter_fn(res).then_some(TypoSuggestion::typo_from_res(ident.name, res))
+ filter_fn(res).then_some(TypoSuggestion::typo_from_ident(*ident, res))
}));
}
Scope::ToolPrelude => {
@@ -1218,7 +1061,7 @@ impl<'a> Resolver<'a> {
suggestions.extend(
this.registered_tools
.iter()
- .map(|ident| TypoSuggestion::typo_from_res(ident.name, res)),
+ .map(|ident| TypoSuggestion::typo_from_ident(*ident, res)),
);
}
Scope::StdLibPrelude => {
@@ -1235,7 +1078,8 @@ impl<'a> Resolver<'a> {
Scope::BuiltinTypes => {
suggestions.extend(PrimTy::ALL.iter().filter_map(|prim_ty| {
let res = Res::PrimTy(*prim_ty);
- filter_fn(res).then_some(TypoSuggestion::typo_from_res(prim_ty.name(), res))
+ filter_fn(res)
+ .then_some(TypoSuggestion::typo_from_name(prim_ty.name(), res))
}))
}
}
@@ -1272,7 +1116,7 @@ impl<'a> Resolver<'a> {
{
let mut candidates = Vec::new();
let mut seen_modules = FxHashSet::default();
- let mut worklist = vec![(start_module, Vec::<ast::PathSegment>::new(), true)];
+ let mut worklist = vec![(start_module, ThinVec::<ast::PathSegment>::new(), true)];
let mut worklist_via_import = vec![];
while let Some((in_module, path_segments, accessible)) = match worklist.pop() {
@@ -1666,7 +1510,7 @@ impl<'a> Resolver<'a> {
let a = if built_in.is_empty() { res.article() } else { "a" };
format!("{a}{built_in} {thing}{from}", thing = res.descr())
} else {
- let introduced = if b.is_import() { "imported" } else { "defined" };
+ let introduced = if b.is_import_user_facing() { "imported" } else { "defined" };
format!("the {thing} {introduced} here", thing = res.descr())
}
}
@@ -1725,10 +1569,10 @@ impl<'a> Resolver<'a> {
/// If the binding refers to a tuple struct constructor with fields,
/// returns the span of its fields.
fn ctor_fields_span(&self, binding: &NameBinding<'_>) -> Option<Span> {
- if let NameBindingKind::Res(
- Res::Def(DefKind::Ctor(CtorOf::Struct, CtorKind::Fn), ctor_def_id),
- _,
- ) = binding.kind
+ if let NameBindingKind::Res(Res::Def(
+ DefKind::Ctor(CtorOf::Struct, CtorKind::Fn),
+ ctor_def_id,
+ )) = binding.kind
{
let def_id = self.parent(ctor_def_id);
let fields = self.field_names.get(&def_id)?;
@@ -1772,7 +1616,9 @@ impl<'a> Resolver<'a> {
next_ident = source;
Some(binding)
}
- ImportKind::Glob { .. } | ImportKind::MacroUse => Some(binding),
+ ImportKind::Glob { .. } | ImportKind::MacroUse | ImportKind::MacroExport => {
+ Some(binding)
+ }
ImportKind::ExternCrate { .. } => None,
},
_ => None,
@@ -1994,13 +1840,16 @@ impl<'a> Resolver<'a> {
(format!("use of undeclared type `{}`", ident), suggestion)
} else {
- let suggestion = if ident.name == sym::alloc {
- Some((
+ let mut suggestion = None;
+ if ident.name == sym::alloc {
+ suggestion = Some((
vec![],
String::from("add `extern crate alloc` to use the `alloc` crate"),
Applicability::MaybeIncorrect,
))
- } else {
+ }
+
+ suggestion = suggestion.or_else(|| {
self.find_similarly_named_module_or_crate(ident.name, &parent_scope.module).map(
|sugg| {
(
@@ -2010,7 +1859,7 @@ impl<'a> Resolver<'a> {
)
},
)
- };
+ });
(format!("use of undeclared crate or module `{}`", ident), suggestion)
}
}
diff --git a/compiler/rustc_resolve/src/effective_visibilities.rs b/compiler/rustc_resolve/src/effective_visibilities.rs
index c40669ac9..85399385d 100644
--- a/compiler/rustc_resolve/src/effective_visibilities.rs
+++ b/compiler/rustc_resolve/src/effective_visibilities.rs
@@ -1,41 +1,120 @@
-use crate::{ImportKind, NameBindingKind, Resolver};
+use crate::{NameBinding, NameBindingKind, Resolver, ResolverTree};
use rustc_ast::ast;
use rustc_ast::visit;
use rustc_ast::visit::Visitor;
use rustc_ast::Crate;
use rustc_ast::EnumDef;
+use rustc_data_structures::intern::Interned;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::def_id::CRATE_DEF_ID;
-use rustc_middle::middle::privacy::Level;
+use rustc_middle::middle::privacy::{EffectiveVisibilities, EffectiveVisibility};
+use rustc_middle::middle::privacy::{IntoDefIdTree, Level};
use rustc_middle::ty::{DefIdTree, Visibility};
+use std::mem;
+
+type ImportId<'a> = Interned<'a, NameBinding<'a>>;
+
+#[derive(Clone, Copy)]
+enum ParentId<'a> {
+ Def(LocalDefId),
+ Import(ImportId<'a>),
+}
+
+impl ParentId<'_> {
+ fn level(self) -> Level {
+ match self {
+ ParentId::Def(_) => Level::Direct,
+ ParentId::Import(_) => Level::Reexported,
+ }
+ }
+}
pub struct EffectiveVisibilitiesVisitor<'r, 'a> {
r: &'r mut Resolver<'a>,
+ def_effective_visibilities: EffectiveVisibilities,
+ /// While walking import chains we need to track effective visibilities per-binding, and def id
+ /// keys in `Resolver::effective_visibilities` are not enough for that, because multiple
+ /// bindings can correspond to a single def id in imports. So we keep a separate table.
+ import_effective_visibilities: EffectiveVisibilities<ImportId<'a>>,
+ // It's possible to recalculate this at any point, but it's relatively expensive.
+ current_private_vis: Visibility,
changed: bool,
}
+impl Resolver<'_> {
+ fn nearest_normal_mod(&mut self, def_id: LocalDefId) -> LocalDefId {
+ self.get_nearest_non_block_module(def_id.to_def_id()).nearest_parent_mod().expect_local()
+ }
+
+ fn private_vis_import(&mut self, binding: ImportId<'_>) -> Visibility {
+ let NameBindingKind::Import { import, .. } = binding.kind else { unreachable!() };
+ Visibility::Restricted(
+ import
+ .id()
+ .map(|id| self.nearest_normal_mod(self.local_def_id(id)))
+ .unwrap_or(CRATE_DEF_ID),
+ )
+ }
+
+ fn private_vis_def(&mut self, def_id: LocalDefId) -> Visibility {
+ // For mod items `nearest_normal_mod` returns its argument, but we actually need its parent.
+ let normal_mod_id = self.nearest_normal_mod(def_id);
+ if normal_mod_id == def_id {
+ self.opt_local_parent(def_id).map_or(Visibility::Public, Visibility::Restricted)
+ } else {
+ Visibility::Restricted(normal_mod_id)
+ }
+ }
+}
+
+impl<'a, 'b> IntoDefIdTree for &'b mut Resolver<'a> {
+ type Tree = &'b Resolver<'a>;
+ fn tree(self) -> Self::Tree {
+ self
+ }
+}
+
impl<'r, 'a> EffectiveVisibilitiesVisitor<'r, 'a> {
/// Fills the `Resolver::effective_visibilities` table with public & exported items
/// For now, this doesn't resolve macros (FIXME) and cannot resolve Impl, as we
/// need access to a TyCtxt for that.
pub fn compute_effective_visibilities<'c>(r: &'r mut Resolver<'a>, krate: &'c Crate) {
- let mut visitor = EffectiveVisibilitiesVisitor { r, changed: false };
-
- visitor.update(CRATE_DEF_ID, Visibility::Public, CRATE_DEF_ID, Level::Direct);
+ let mut visitor = EffectiveVisibilitiesVisitor {
+ r,
+ def_effective_visibilities: Default::default(),
+ import_effective_visibilities: Default::default(),
+ current_private_vis: Visibility::Public,
+ changed: false,
+ };
+
+ visitor.update(CRATE_DEF_ID, CRATE_DEF_ID);
+ visitor.current_private_vis = Visibility::Restricted(CRATE_DEF_ID);
visitor.set_bindings_effective_visibilities(CRATE_DEF_ID);
while visitor.changed {
- visitor.reset();
+ visitor.changed = false;
visit::walk_crate(&mut visitor, krate);
}
+ visitor.r.effective_visibilities = visitor.def_effective_visibilities;
+
+ // Update visibilities for import def ids. These are not used during the
+ // `EffectiveVisibilitiesVisitor` pass, because we have more detailed binding-based
+ // information, but are used by later passes. Effective visibility of an import def id
+ // is the maximum value among visibilities of bindings corresponding to that def id.
+ for (binding, eff_vis) in visitor.import_effective_visibilities.iter() {
+ let NameBindingKind::Import { import, .. } = binding.kind else { unreachable!() };
+ if let Some(node_id) = import.id() {
+ r.effective_visibilities.update_eff_vis(
+ r.local_def_id(node_id),
+ eff_vis,
+ ResolverTree(&r.definitions, &r.crate_loader),
+ )
+ }
+ }
info!("resolve::effective_visibilities: {:#?}", r.effective_visibilities);
}
- fn reset(&mut self) {
- self.changed = false;
- }
-
/// Update effective visibilities of bindings in the given module,
/// including their whole reexport chains.
fn set_bindings_effective_visibilities(&mut self, module_id: LocalDefId) {
@@ -48,73 +127,68 @@ impl<'r, 'a> EffectiveVisibilitiesVisitor<'r, 'a> {
// Set the given effective visibility level to `Level::Direct` and
// sets the rest of the `use` chain to `Level::Reexported` until
// we hit the actual exported item.
+ let mut parent_id = ParentId::Def(module_id);
+ while let NameBindingKind::Import { binding: nested_binding, .. } = binding.kind {
+ let binding_id = ImportId::new_unchecked(binding);
+ self.update_import(binding_id, parent_id);
- // FIXME: tag and is_public() condition should be removed, but assertions occur.
- let tag = if binding.is_import() { Level::Reexported } else { Level::Direct };
- if binding.vis.is_public() {
- let mut prev_parent_id = module_id;
- let mut level = Level::Direct;
- while let NameBindingKind::Import { binding: nested_binding, import, .. } =
- binding.kind
- {
- let mut update = |node_id| self.update(
- self.r.local_def_id(node_id),
- binding.vis.expect_local(),
- prev_parent_id,
- level,
- );
- // In theory all the import IDs have individual visibilities and effective
- // visibilities, but in practice these IDs go straigth to HIR where all
- // their few uses assume that their (effective) visibility applies to the
- // whole syntactic `use` item. So we update them all to the maximum value
- // among the potential individual effective visibilities. Maybe HIR for
- // imports shouldn't use three IDs at all.
- update(import.id);
- if let ImportKind::Single { additional_ids, .. } = import.kind {
- update(additional_ids.0);
- update(additional_ids.1);
- }
-
- level = Level::Reexported;
- prev_parent_id = self.r.local_def_id(import.id);
- binding = nested_binding;
- }
+ parent_id = ParentId::Import(binding_id);
+ binding = nested_binding;
}
if let Some(def_id) = binding.res().opt_def_id().and_then(|id| id.as_local()) {
- self.update(def_id, binding.vis.expect_local(), module_id, tag);
+ self.update_def(def_id, binding.vis.expect_local(), parent_id);
}
}
}
}
- fn update(
- &mut self,
- def_id: LocalDefId,
- nominal_vis: Visibility,
- parent_id: LocalDefId,
- tag: Level,
- ) {
- let module_id = self
- .r
- .get_nearest_non_block_module(def_id.to_def_id())
- .nearest_parent_mod()
- .expect_local();
- if nominal_vis == Visibility::Restricted(module_id)
- || self.r.visibilities[&parent_id] == Visibility::Restricted(module_id)
- {
- return;
+ fn cheap_private_vis(&self, parent_id: ParentId<'_>) -> Option<Visibility> {
+ matches!(parent_id, ParentId::Def(_)).then_some(self.current_private_vis)
+ }
+
+ fn effective_vis_or_private(&mut self, parent_id: ParentId<'a>) -> EffectiveVisibility {
+ // Private nodes are only added to the table for caching, they could be added or removed at
+ // any moment without consequences, so we don't set `changed` to true when adding them.
+ *match parent_id {
+ ParentId::Def(def_id) => self
+ .def_effective_visibilities
+ .effective_vis_or_private(def_id, || self.r.private_vis_def(def_id)),
+ ParentId::Import(binding) => self
+ .import_effective_visibilities
+ .effective_vis_or_private(binding, || self.r.private_vis_import(binding)),
}
- let mut effective_visibilities = std::mem::take(&mut self.r.effective_visibilities);
- self.changed |= effective_visibilities.update(
+ }
+
+ fn update_import(&mut self, binding: ImportId<'a>, parent_id: ParentId<'a>) {
+ let nominal_vis = binding.vis.expect_local();
+ let private_vis = self.cheap_private_vis(parent_id);
+ let inherited_eff_vis = self.effective_vis_or_private(parent_id);
+ self.changed |= self.import_effective_visibilities.update(
+ binding,
+ nominal_vis,
+ |r| (private_vis.unwrap_or_else(|| r.private_vis_import(binding)), r),
+ inherited_eff_vis,
+ parent_id.level(),
+ &mut *self.r,
+ );
+ }
+
+ fn update_def(&mut self, def_id: LocalDefId, nominal_vis: Visibility, parent_id: ParentId<'a>) {
+ let private_vis = self.cheap_private_vis(parent_id);
+ let inherited_eff_vis = self.effective_vis_or_private(parent_id);
+ self.changed |= self.def_effective_visibilities.update(
def_id,
nominal_vis,
- || Visibility::Restricted(module_id),
- parent_id,
- tag,
- &*self.r,
+ |r| (private_vis.unwrap_or_else(|| r.private_vis_def(def_id)), r),
+ inherited_eff_vis,
+ parent_id.level(),
+ &mut *self.r,
);
- self.r.effective_visibilities = effective_visibilities;
+ }
+
+ fn update(&mut self, def_id: LocalDefId, parent_id: LocalDefId) {
+ self.update_def(def_id, self.r.visibilities[&def_id], ParentId::Def(parent_id));
}
}
@@ -132,22 +206,12 @@ impl<'r, 'ast> Visitor<'ast> for EffectiveVisibilitiesVisitor<'ast, 'r> {
"ast::ItemKind::MacCall encountered, this should not anymore appear at this stage"
),
- // Foreign modules inherit level from parents.
- ast::ItemKind::ForeignMod(..) => {
- let parent_id = self.r.local_parent(def_id);
- self.update(def_id, Visibility::Public, parent_id, Level::Direct);
- }
-
- // Only exported `macro_rules!` items are public, but they always are
- ast::ItemKind::MacroDef(ref macro_def) if macro_def.macro_rules => {
- let parent_id = self.r.local_parent(def_id);
- let vis = self.r.visibilities[&def_id];
- self.update(def_id, vis, parent_id, Level::Direct);
- }
-
ast::ItemKind::Mod(..) => {
+ let prev_private_vis =
+ mem::replace(&mut self.current_private_vis, Visibility::Restricted(def_id));
self.set_bindings_effective_visibilities(def_id);
visit::walk_item(self, item);
+ self.current_private_vis = prev_private_vis;
}
ast::ItemKind::Enum(EnumDef { ref variants }, _) => {
@@ -155,18 +219,14 @@ impl<'r, 'ast> Visitor<'ast> for EffectiveVisibilitiesVisitor<'ast, 'r> {
for variant in variants {
let variant_def_id = self.r.local_def_id(variant.id);
for field in variant.data.fields() {
- let field_def_id = self.r.local_def_id(field.id);
- let vis = self.r.visibilities[&field_def_id];
- self.update(field_def_id, vis, variant_def_id, Level::Direct);
+ self.update(self.r.local_def_id(field.id), variant_def_id);
}
}
}
ast::ItemKind::Struct(ref def, _) | ast::ItemKind::Union(ref def, _) => {
for field in def.fields() {
- let field_def_id = self.r.local_def_id(field.id);
- let vis = self.r.visibilities[&field_def_id];
- self.update(field_def_id, vis, def_id, Level::Direct);
+ self.update(self.r.local_def_id(field.id), def_id);
}
}
@@ -182,6 +242,7 @@ impl<'r, 'ast> Visitor<'ast> for EffectiveVisibilitiesVisitor<'ast, 'r> {
| ast::ItemKind::TyAlias(..)
| ast::ItemKind::TraitAlias(..)
| ast::ItemKind::MacroDef(..)
+ | ast::ItemKind::ForeignMod(..)
| ast::ItemKind::Fn(..) => return,
}
}
diff --git a/compiler/rustc_resolve/src/errors.rs b/compiler/rustc_resolve/src/errors.rs
new file mode 100644
index 000000000..2c4427746
--- /dev/null
+++ b/compiler/rustc_resolve/src/errors.rs
@@ -0,0 +1,474 @@
+use rustc_macros::{Diagnostic, Subdiagnostic};
+use rustc_span::{
+ symbol::{Ident, Symbol},
+ Span,
+};
+
+use crate::{late::PatternSource, Res};
+
+#[derive(Diagnostic)]
+#[diag(resolve_parent_module_reset_for_binding, code = "E0637")]
+pub(crate) struct ParentModuleResetForBinding;
+
+#[derive(Diagnostic)]
+#[diag(resolve_ampersand_used_without_explicit_lifetime_name, code = "E0637")]
+#[note]
+pub(crate) struct AmpersandUsedWithoutExplicitLifetimeName(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_underscore_lifetime_name_cannot_be_used_here, code = "E0637")]
+#[note]
+pub(crate) struct UnderscoreLifetimeNameCannotBeUsedHere(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_crate_may_not_be_imported)]
+pub(crate) struct CrateMayNotBeImprted(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_crate_root_imports_must_be_named_explicitly)]
+pub(crate) struct CrateRootNamesMustBeNamedExplicitly(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_crate_root_imports_must_be_named_explicitly)]
+pub(crate) struct ResolutionError(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_name_is_already_used_as_generic_parameter, code = "E0403")]
+pub(crate) struct NameAlreadyUsedInParameterList {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ #[label(first_use_of_name)]
+ pub(crate) first_use_span: Span,
+ pub(crate) name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_method_not_member_of_trait, code = "E0407")]
+pub(crate) struct MethodNotMemberOfTrait {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) method: Ident,
+ pub(crate) trait_: String,
+ #[subdiagnostic]
+ pub(crate) sub: Option<AssociatedFnWithSimilarNameExists>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_associated_fn_with_similar_name_exists,
+ code = "{candidate}",
+ applicability = "maybe-incorrect"
+)]
+pub(crate) struct AssociatedFnWithSimilarNameExists {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) candidate: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_type_not_member_of_trait, code = "E0437")]
+pub(crate) struct TypeNotMemberOfTrait {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) type_: Ident,
+ pub(crate) trait_: String,
+ #[subdiagnostic]
+ pub(crate) sub: Option<AssociatedTypeWithSimilarNameExists>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_associated_type_with_similar_name_exists,
+ code = "{candidate}",
+ applicability = "maybe-incorrect"
+)]
+pub(crate) struct AssociatedTypeWithSimilarNameExists {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) candidate: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_const_not_member_of_trait, code = "E0438")]
+pub(crate) struct ConstNotMemberOfTrait {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) const_: Ident,
+ pub(crate) trait_: String,
+ #[subdiagnostic]
+ pub(crate) sub: Option<AssociatedConstWithSimilarNameExists>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_associated_const_with_similar_name_exists,
+ code = "{candidate}",
+ applicability = "maybe-incorrect"
+)]
+pub(crate) struct AssociatedConstWithSimilarNameExists {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) candidate: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_variable_bound_with_different_mode, code = "E0409")]
+pub(crate) struct VariableBoundWithDifferentMode {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ #[label(first_binding_span)]
+ pub(crate) first_binding_span: Span,
+ pub(crate) variable_name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_ident_bound_more_than_once_in_parameter_list, code = "E0415")]
+pub(crate) struct IdentifierBoundMoreThanOnceInParameterList {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) identifier: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_ident_bound_more_than_once_in_same_pattern, code = "E0416")]
+pub(crate) struct IdentifierBoundMoreThanOnceInSamePattern {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) identifier: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_undeclared_label, code = "E0426")]
+pub(crate) struct UndeclaredLabel {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) name: Symbol,
+ #[subdiagnostic]
+ pub(crate) sub_reachable: Option<LabelWithSimilarNameReachable>,
+ #[subdiagnostic]
+ pub(crate) sub_reachable_suggestion: Option<TryUsingSimilarlyNamedLabel>,
+ #[subdiagnostic]
+ pub(crate) sub_unreachable: Option<UnreachableLabelWithSimilarNameExists>,
+}
+
+#[derive(Subdiagnostic)]
+#[label(resolve_label_with_similar_name_reachable)]
+pub(crate) struct LabelWithSimilarNameReachable(#[primary_span] pub(crate) Span);
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_try_using_similarly_named_label,
+ code = "{ident_name}",
+ applicability = "maybe-incorrect"
+)]
+pub(crate) struct TryUsingSimilarlyNamedLabel {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) ident_name: Symbol,
+}
+
+#[derive(Subdiagnostic)]
+#[label(resolve_unreachable_label_with_similar_name_exists)]
+pub(crate) struct UnreachableLabelWithSimilarNameExists {
+ #[primary_span]
+ pub(crate) ident_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_self_import_can_only_appear_once_in_the_list, code = "E0430")]
+pub(crate) struct SelfImportCanOnlyAppearOnceInTheList {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_self_import_only_in_import_list_with_non_empty_prefix, code = "E0431")]
+pub(crate) struct SelfImportOnlyInImportListWithNonEmptyPrefix {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_cannot_capture_dynamic_environment_in_fn_item, code = "E0434")]
+#[help]
+pub(crate) struct CannotCaptureDynamicEnvironmentInFnItem {
+ #[primary_span]
+ pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_attempt_to_use_non_constant_value_in_constant, code = "E0435")]
+pub(crate) struct AttemptToUseNonConstantValueInConstant<'a> {
+ #[primary_span]
+ pub(crate) span: Span,
+ #[subdiagnostic]
+ pub(crate) with: Option<AttemptToUseNonConstantValueInConstantWithSuggestion<'a>>,
+ #[subdiagnostic]
+ pub(crate) with_label: Option<AttemptToUseNonConstantValueInConstantLabelWithSuggestion>,
+ #[subdiagnostic]
+ pub(crate) without: Option<AttemptToUseNonConstantValueInConstantWithoutSuggestion<'a>>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_attempt_to_use_non_constant_value_in_constant_with_suggestion,
+ code = "{suggestion} {ident}",
+ applicability = "maybe-incorrect"
+)]
+pub(crate) struct AttemptToUseNonConstantValueInConstantWithSuggestion<'a> {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) ident: Ident,
+ pub(crate) suggestion: &'a str,
+ pub(crate) current: &'a str,
+}
+
+#[derive(Subdiagnostic)]
+#[label(resolve_attempt_to_use_non_constant_value_in_constant_label_with_suggestion)]
+pub(crate) struct AttemptToUseNonConstantValueInConstantLabelWithSuggestion {
+ #[primary_span]
+ pub(crate) span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[label(resolve_attempt_to_use_non_constant_value_in_constant_without_suggestion)]
+pub(crate) struct AttemptToUseNonConstantValueInConstantWithoutSuggestion<'a> {
+ #[primary_span]
+ pub(crate) ident_span: Span,
+ pub(crate) suggestion: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_self_imports_only_allowed_within, code = "E0429")]
+pub(crate) struct SelfImportsOnlyAllowedWithin {
+ #[primary_span]
+ pub(crate) span: Span,
+ #[subdiagnostic]
+ pub(crate) suggestion: Option<SelfImportsOnlyAllowedWithinSuggestion>,
+ #[subdiagnostic]
+ pub(crate) mpart_suggestion: Option<SelfImportsOnlyAllowedWithinMultipartSuggestion>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_self_imports_only_allowed_within_suggestion,
+ code = "",
+ applicability = "machine-applicable"
+)]
+pub(crate) struct SelfImportsOnlyAllowedWithinSuggestion {
+ #[primary_span]
+ pub(crate) span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+ resolve_self_imports_only_allowed_within_multipart_suggestion,
+ applicability = "machine-applicable"
+)]
+pub(crate) struct SelfImportsOnlyAllowedWithinMultipartSuggestion {
+ #[suggestion_part(code = "{{")]
+ pub(crate) multipart_start: Span,
+ #[suggestion_part(code = "}}")]
+ pub(crate) multipart_end: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_binding_shadows_something_unacceptable, code = "E0530")]
+pub(crate) struct BindingShadowsSomethingUnacceptable<'a> {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) shadowing_binding: PatternSource,
+ pub(crate) shadowed_binding: Res,
+ pub(crate) article: &'a str,
+ #[subdiagnostic]
+ pub(crate) sub_suggestion: Option<BindingShadowsSomethingUnacceptableSuggestion>,
+ #[label(label_shadowed_binding)]
+ pub(crate) shadowed_binding_span: Span,
+ pub(crate) participle: &'a str,
+ pub(crate) name: Symbol,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_binding_shadows_something_unacceptable_suggestion,
+ code = "{name}(..)",
+ applicability = "unspecified"
+)]
+pub(crate) struct BindingShadowsSomethingUnacceptableSuggestion {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_forward_declared_generic_param, code = "E0128")]
+pub(crate) struct ForwardDeclaredGenericParam {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_param_in_ty_of_const_param, code = "E0770")]
+pub(crate) struct ParamInTyOfConstParam {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_self_in_generic_param_default, code = "E0735")]
+pub(crate) struct SelfInGenericParamDefault {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_param_in_non_trivial_anon_const)]
+pub(crate) struct ParamInNonTrivialAnonConst {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) name: Symbol,
+ #[subdiagnostic]
+ pub(crate) sub_is_type: ParamInNonTrivialAnonConstIsType,
+ #[subdiagnostic]
+ pub(crate) help: Option<ParamInNonTrivialAnonConstHelp>,
+}
+
+#[derive(Subdiagnostic)]
+#[help(resolve_param_in_non_trivial_anon_const_help)]
+pub(crate) struct ParamInNonTrivialAnonConstHelp;
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ParamInNonTrivialAnonConstIsType {
+ #[note(resolve_param_in_non_trivial_anon_const_sub_type)]
+ AType,
+ #[help(resolve_param_in_non_trivial_anon_const_sub_non_type)]
+ NotAType { name: Symbol },
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_unreachable_label, code = "E0767")]
+#[note]
+pub(crate) struct UnreachableLabel {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) name: Symbol,
+ #[label(label_definition_span)]
+ pub(crate) definition_span: Span,
+ #[subdiagnostic]
+ pub(crate) sub_suggestion: Option<UnreachableLabelSubSuggestion>,
+ #[subdiagnostic]
+ pub(crate) sub_suggestion_label: Option<UnreachableLabelSubLabel>,
+ #[subdiagnostic]
+ pub(crate) sub_unreachable_label: Option<UnreachableLabelSubLabelUnreachable>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+ resolve_unreachable_label_suggestion_use_similarly_named,
+ code = "{ident_name}",
+ applicability = "maybe-incorrect"
+)]
+pub(crate) struct UnreachableLabelSubSuggestion {
+ #[primary_span]
+ pub(crate) span: Span,
+ pub(crate) ident_name: Symbol,
+}
+
+#[derive(Subdiagnostic)]
+#[label(resolve_unreachable_label_similar_name_reachable)]
+pub(crate) struct UnreachableLabelSubLabel {
+ #[primary_span]
+ pub(crate) ident_span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[label(resolve_unreachable_label_similar_name_unreachable)]
+pub(crate) struct UnreachableLabelSubLabelUnreachable {
+ #[primary_span]
+ pub(crate) ident_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_trait_impl_mismatch, code = "{code}")]
+pub(crate) struct TraitImplMismatch {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) name: Symbol,
+ pub(crate) kind: String,
+ #[label(label_trait_item)]
+ pub(crate) trait_item_span: Span,
+ pub(crate) trait_path: String,
+ pub(crate) code: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_invalid_asm_sym)]
+#[help]
+pub(crate) struct InvalidAsmSym {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_trait_impl_duplicate, code = "E0201")]
+pub(crate) struct TraitImplDuplicate {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ #[label(old_span_label)]
+ pub(crate) old_span: Span,
+ #[label(trait_item_span)]
+ pub(crate) trait_item_span: Span,
+ pub(crate) name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_relative_2018)]
+pub(crate) struct Relative2018 {
+ #[primary_span]
+ pub(crate) span: Span,
+ #[suggestion(code = "crate::{path_str}", applicability = "maybe-incorrect")]
+ pub(crate) path_span: Span,
+ pub(crate) path_str: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_ancestor_only, code = "E0742")]
+pub(crate) struct AncestorOnly(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_expected_found, code = "E0577")]
+pub(crate) struct ExpectedFound {
+ #[primary_span]
+ #[label]
+ pub(crate) span: Span,
+ pub(crate) res: Res,
+ pub(crate) path_str: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(resolve_indeterminate, code = "E0578")]
+pub(crate) struct Indeterminate(#[primary_span] pub(crate) Span);
+
+#[derive(Diagnostic)]
+#[diag(resolve_module_only)]
+pub(crate) struct ModuleOnly(#[primary_span] pub(crate) Span);
diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs
index e0542d547..759818856 100644
--- a/compiler/rustc_resolve/src/ident.rs
+++ b/compiler/rustc_resolve/src/ident.rs
@@ -19,7 +19,7 @@ use crate::late::{
};
use crate::macros::{sub_namespace_match, MacroRulesScope};
use crate::{AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, Determinacy, Finalize};
-use crate::{ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot};
+use crate::{Import, ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot};
use crate::{NameBinding, NameBindingKind, ParentScope, PathResult, PrivacyError, Res};
use crate::{ResolutionError, Resolver, Scope, ScopeSet, Segment, ToNameBinding, Weak};
@@ -915,7 +915,11 @@ impl<'a> Resolver<'a> {
}
if !restricted_shadowing && binding.expansion != LocalExpnId::ROOT {
- if let NameBindingKind::Res(_, true) = binding.kind {
+ if let NameBindingKind::Import {
+ import: Import { kind: ImportKind::MacroExport, .. },
+ ..
+ } = binding.kind
+ {
self.macro_expanded_macro_export_errors.insert((path_span, binding.span));
}
}
diff --git a/compiler/rustc_resolve/src/imports.rs b/compiler/rustc_resolve/src/imports.rs
index f2cc50c19..b100a8c17 100644
--- a/compiler/rustc_resolve/src/imports.rs
+++ b/compiler/rustc_resolve/src/imports.rs
@@ -4,7 +4,10 @@ use crate::diagnostics::{import_candidates, Suggestion};
use crate::Determinacy::{self, *};
use crate::Namespace::*;
use crate::{module_to_string, names_to_string, ImportSuggestion};
-use crate::{AmbiguityKind, BindingKey, ModuleKind, ResolutionError, Resolver, Segment};
+use crate::{
+ AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, BindingKey, ModuleKind, ResolutionError,
+ Resolver, Segment,
+};
use crate::{Finalize, Module, ModuleOrUniformRoot, ParentScope, PerNS, ScopeSet};
use crate::{NameBinding, NameBindingKind, PathResult};
@@ -44,20 +47,33 @@ pub enum ImportKind<'a> {
type_ns_only: bool,
/// Did this import result from a nested import? ie. `use foo::{bar, baz};`
nested: bool,
- /// Additional `NodeId`s allocated to a `ast::UseTree` for automatically generated `use` statement
- /// (eg. implicit struct constructors)
- additional_ids: (NodeId, NodeId),
+ /// The ID of the `UseTree` that imported this `Import`.
+ ///
+ /// In the case where the `Import` was expanded from a "nested" use tree,
+ /// this id is the ID of the leaf tree. For example:
+ ///
+ /// ```ignore (pacify the merciless tidy)
+ /// use foo::bar::{a, b}
+ /// ```
+ ///
+ /// If this is the import for `foo::bar::a`, we would have the ID of the `UseTree`
+ /// for `a` in this field.
+ id: NodeId,
},
Glob {
is_prelude: bool,
- max_vis: Cell<Option<ty::Visibility>>, // The visibility of the greatest re-export.
- // n.b. `max_vis` is only used in `finalize_import` to check for re-export errors.
+ // The visibility of the greatest re-export.
+ // n.b. `max_vis` is only used in `finalize_import` to check for re-export errors.
+ max_vis: Cell<Option<ty::Visibility>>,
+ id: NodeId,
},
ExternCrate {
source: Option<Symbol>,
target: Ident,
+ id: NodeId,
},
MacroUse,
+ MacroExport,
}
/// Manually implement `Debug` for `ImportKind` because the `source/target_bindings`
@@ -71,7 +87,7 @@ impl<'a> std::fmt::Debug for ImportKind<'a> {
ref target,
ref type_ns_only,
ref nested,
- ref additional_ids,
+ ref id,
// Ignore the following to avoid an infinite loop while printing.
source_bindings: _,
target_bindings: _,
@@ -81,19 +97,22 @@ impl<'a> std::fmt::Debug for ImportKind<'a> {
.field("target", target)
.field("type_ns_only", type_ns_only)
.field("nested", nested)
- .field("additional_ids", additional_ids)
+ .field("id", id)
.finish_non_exhaustive(),
- Glob { ref is_prelude, ref max_vis } => f
+ Glob { ref is_prelude, ref max_vis, ref id } => f
.debug_struct("Glob")
.field("is_prelude", is_prelude)
.field("max_vis", max_vis)
+ .field("id", id)
.finish(),
- ExternCrate { ref source, ref target } => f
+ ExternCrate { ref source, ref target, ref id } => f
.debug_struct("ExternCrate")
.field("source", source)
.field("target", target)
+ .field("id", id)
.finish(),
MacroUse => f.debug_struct("MacroUse").finish(),
+ MacroExport => f.debug_struct("MacroExport").finish(),
}
}
}
@@ -103,24 +122,15 @@ impl<'a> std::fmt::Debug for ImportKind<'a> {
pub(crate) struct Import<'a> {
pub kind: ImportKind<'a>,
- /// The ID of the `extern crate`, `UseTree` etc that imported this `Import`.
- ///
- /// In the case where the `Import` was expanded from a "nested" use tree,
- /// this id is the ID of the leaf tree. For example:
- ///
- /// ```ignore (pacify the merciless tidy)
+ /// Node ID of the "root" use item -- this is always the same as `ImportKind`'s `id`
+ /// (if it exists) except in the case of "nested" use trees, in which case
+ /// it will be the ID of the root use tree. e.g., in the example
+ /// ```ignore (incomplete code)
/// use foo::bar::{a, b}
/// ```
- ///
- /// If this is the import for `foo::bar::a`, we would have the ID of the `UseTree`
- /// for `a` in this field.
- pub id: NodeId,
-
- /// The `id` of the "root" use-kind -- this is always the same as
- /// `id` except in the case of "nested" use trees, in which case
- /// it will be the `id` of the root use tree. e.g., in the example
- /// from `id`, this would be the ID of the `use foo::bar`
- /// `UseTree` node.
+ /// this would be the ID of the `use foo::bar` `UseTree` node.
+ /// In case of imports without their own node ID it's the closest node that can be used,
+ /// for example, for reporting lints.
pub root_id: NodeId,
/// Span of the entire use statement.
@@ -161,6 +171,15 @@ impl<'a> Import<'a> {
pub(crate) fn expect_vis(&self) -> ty::Visibility {
self.vis.get().expect("encountered cleared import visibility")
}
+
+ pub(crate) fn id(&self) -> Option<NodeId> {
+ match self.kind {
+ ImportKind::Single { id, .. }
+ | ImportKind::Glob { id, .. }
+ | ImportKind::ExternCrate { id, .. } => Some(id),
+ ImportKind::MacroUse | ImportKind::MacroExport => None,
+ }
+ }
}
/// Records information about the resolution of a name in a namespace of a module.
@@ -175,7 +194,7 @@ pub(crate) struct NameResolution<'a> {
}
impl<'a> NameResolution<'a> {
- // Returns the binding for the name if it is known or None if it not known.
+ /// Returns the binding for the name if it is known or None if it not known.
pub(crate) fn binding(&self) -> Option<&'a NameBinding<'a>> {
self.binding.and_then(|binding| {
if !binding.is_glob_import() || self.single_imports.is_empty() {
@@ -207,8 +226,8 @@ fn pub_use_of_private_extern_crate_hack(import: &Import<'_>, binding: &NameBindi
}
impl<'a> Resolver<'a> {
- // Given a binding and an import that resolves to it,
- // return the corresponding binding defined by the import.
+ /// Given a binding and an import that resolves to it,
+ /// return the corresponding binding defined by the import.
pub(crate) fn import(
&self,
binding: &'a NameBinding<'a>,
@@ -240,7 +259,7 @@ impl<'a> Resolver<'a> {
})
}
- // Define the name or return the existing binding if there is a collision.
+ /// Define the name or return the existing binding if there is a collision.
pub(crate) fn try_define(
&mut self,
module: Module<'a>,
@@ -368,7 +387,9 @@ impl<'a> Resolver<'a> {
self.record_use(target, dummy_binding, false);
} else if import.imported_module.get().is_none() {
import.used.set(true);
- self.used_imports.insert(import.id);
+ if let Some(id) = import.id() {
+ self.used_imports.insert(id);
+ }
}
}
}
@@ -450,7 +471,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
{
// In the case of a new import line, throw a diagnostic message
// for the previous line.
- self.throw_unresolved_import_error(errors, None);
+ self.throw_unresolved_import_error(errors);
errors = vec![];
}
if seen_spans.insert(err.span) {
@@ -482,29 +503,21 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
}
if !errors.is_empty() {
- self.throw_unresolved_import_error(errors, None);
+ self.throw_unresolved_import_error(errors);
}
}
- fn throw_unresolved_import_error(
- &self,
- errors: Vec<(String, UnresolvedImportError)>,
- span: Option<MultiSpan>,
- ) {
+ fn throw_unresolved_import_error(&self, errors: Vec<(String, UnresolvedImportError)>) {
+ if errors.is_empty() {
+ return;
+ }
+
/// Upper limit on the number of `span_label` messages.
const MAX_LABEL_COUNT: usize = 10;
- let (span, msg) = if errors.is_empty() {
- (span.unwrap(), "unresolved import".to_string())
- } else {
- let span = MultiSpan::from_spans(errors.iter().map(|(_, err)| err.span).collect());
-
- let paths = errors.iter().map(|(path, _)| format!("`{}`", path)).collect::<Vec<_>>();
-
- let msg = format!("unresolved import{} {}", pluralize!(paths.len()), paths.join(", "),);
-
- (span, msg)
- };
+ let span = MultiSpan::from_spans(errors.iter().map(|(_, err)| err.span).collect());
+ let paths = errors.iter().map(|(path, _)| format!("`{}`", path)).collect::<Vec<_>>();
+ let msg = format!("unresolved import{} {}", pluralize!(paths.len()), paths.join(", "),);
let mut diag = struct_span_err!(self.r.session, span, E0432, "{}", &msg);
@@ -718,47 +731,51 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
PathResult::Indeterminate => unreachable!(),
};
- let (ident, target, source_bindings, target_bindings, type_ns_only) = match import.kind {
- ImportKind::Single {
- source,
- target,
- ref source_bindings,
- ref target_bindings,
- type_ns_only,
- ..
- } => (source, target, source_bindings, target_bindings, type_ns_only),
- ImportKind::Glob { is_prelude, ref max_vis } => {
- if import.module_path.len() <= 1 {
- // HACK(eddyb) `lint_if_path_starts_with_module` needs at least
- // 2 segments, so the `resolve_path` above won't trigger it.
- let mut full_path = import.module_path.clone();
- full_path.push(Segment::from_ident(Ident::empty()));
- self.r.lint_if_path_starts_with_module(Some(finalize), &full_path, None);
- }
+ let (ident, target, source_bindings, target_bindings, type_ns_only, import_id) =
+ match import.kind {
+ ImportKind::Single {
+ source,
+ target,
+ ref source_bindings,
+ ref target_bindings,
+ type_ns_only,
+ id,
+ ..
+ } => (source, target, source_bindings, target_bindings, type_ns_only, id),
+ ImportKind::Glob { is_prelude, ref max_vis, id } => {
+ if import.module_path.len() <= 1 {
+ // HACK(eddyb) `lint_if_path_starts_with_module` needs at least
+ // 2 segments, so the `resolve_path` above won't trigger it.
+ let mut full_path = import.module_path.clone();
+ full_path.push(Segment::from_ident(Ident::empty()));
+ self.r.lint_if_path_starts_with_module(Some(finalize), &full_path, None);
+ }
- if let ModuleOrUniformRoot::Module(module) = module {
- if ptr::eq(module, import.parent_scope.module) {
- // Importing a module into itself is not allowed.
- return Some(UnresolvedImportError {
- span: import.span,
- label: Some(String::from("cannot glob-import a module into itself")),
- note: None,
- suggestion: None,
- candidate: None,
- });
+ if let ModuleOrUniformRoot::Module(module) = module {
+ if ptr::eq(module, import.parent_scope.module) {
+ // Importing a module into itself is not allowed.
+ return Some(UnresolvedImportError {
+ span: import.span,
+ label: Some(String::from(
+ "cannot glob-import a module into itself",
+ )),
+ note: None,
+ suggestion: None,
+ candidate: None,
+ });
+ }
}
- }
- if !is_prelude
+ if !is_prelude
&& let Some(max_vis) = max_vis.get()
&& !max_vis.is_at_least(import.expect_vis(), &*self.r)
{
let msg = "glob import doesn't reexport anything because no candidate is public enough";
- self.r.lint_buffer.buffer_lint(UNUSED_IMPORTS, import.id, import.span, msg);
+ self.r.lint_buffer.buffer_lint(UNUSED_IMPORTS, id, import.span, msg);
}
- return None;
- }
- _ => unreachable!(),
- };
+ return None;
+ }
+ _ => unreachable!(),
+ };
let mut all_ns_err = true;
self.r.per_ns(|this, ns| {
@@ -777,7 +794,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
match binding {
Ok(binding) => {
// Consistency checks, analogous to `finalize_macro_resolutions`.
- let initial_res = source_bindings[ns].get().map(|initial_binding| {
+ let initial_binding = source_bindings[ns].get().map(|initial_binding| {
all_ns_err = false;
if let Some(target_binding) = target_bindings[ns].get() {
if target.name == kw::Underscore
@@ -791,12 +808,20 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
);
}
}
- initial_binding.res()
+ initial_binding
});
let res = binding.res();
- if let Ok(initial_res) = initial_res {
+ if let Ok(initial_binding) = initial_binding {
+ let initial_res = initial_binding.res();
if res != initial_res && this.ambiguity_errors.is_empty() {
- span_bug!(import.span, "inconsistent resolution for an import");
+ this.ambiguity_errors.push(AmbiguityError {
+ kind: AmbiguityKind::Import,
+ ident,
+ b1: initial_binding,
+ b2: binding,
+ misc1: AmbiguityErrorMisc::None,
+ misc2: AmbiguityErrorMisc::None,
+ });
}
} else if res != Res::Err
&& this.ambiguity_errors.is_empty()
@@ -858,7 +883,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
match binding.kind {
// Never suggest the name that has binding error
// i.e., the name that cannot be previously resolved
- NameBindingKind::Res(Res::Err, _) => None,
+ NameBindingKind::Res(Res::Err) => None,
_ => Some(i.name),
}
}
@@ -960,7 +985,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
);
self.r.lint_buffer.buffer_lint(
PUB_USE_OF_PRIVATE_EXTERN_CRATE,
- import.id,
+ import_id,
import.span,
&msg,
);
@@ -989,7 +1014,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
let mut err =
struct_span_err!(self.r.session, import.span, E0364, "{error_msg}");
match binding.kind {
- NameBindingKind::Res(Res::Def(DefKind::Macro(_), def_id), _)
+ NameBindingKind::Res(Res::Def(DefKind::Macro(_), def_id))
// exclude decl_macro
if self.r.get_macro_by_def_id(def_id).macro_rules =>
{
@@ -1029,7 +1054,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
// purposes it's good enough to just favor one over the other.
self.r.per_ns(|this, ns| {
if let Ok(binding) = source_bindings[ns].get() {
- this.import_res_map.entry(import.id).or_default()[ns] = Some(binding.res());
+ this.import_res_map.entry(import_id).or_default()[ns] = Some(binding.res());
}
});
@@ -1047,6 +1072,9 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
target_bindings: &PerNS<Cell<Option<&'b NameBinding<'b>>>>,
target: Ident,
) {
+ // This function is only called for single imports.
+ let ImportKind::Single { id, .. } = import.kind else { unreachable!() };
+
// Skip if the import was produced by a macro.
if import.parent_scope.expansion != LocalExpnId::ROOT {
return;
@@ -1094,7 +1122,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
redundant_spans.dedup();
self.r.lint_buffer.buffer_lint_with_diagnostic(
UNUSED_IMPORTS,
- import.id,
+ id,
import.span,
&format!("the item `{}` is imported redundantly", ident),
BuiltinLintDiagnostics::RedundantImport(redundant_spans, ident),
@@ -1103,6 +1131,9 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
}
fn resolve_glob_import(&mut self, import: &'b Import<'b>) {
+ // This function is only called for glob imports.
+ let ImportKind::Glob { id, is_prelude, .. } = import.kind else { unreachable!() };
+
let ModuleOrUniformRoot::Module(module) = import.imported_module.get().unwrap() else {
self.r.session.span_err(import.span, "cannot glob-import all possible crates");
return;
@@ -1113,7 +1144,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
return;
} else if ptr::eq(module, import.parent_scope.module) {
return;
- } else if let ImportKind::Glob { is_prelude: true, .. } = import.kind {
+ } else if is_prelude {
self.r.prelude = Some(module);
return;
}
@@ -1145,7 +1176,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
}
// Record the destination of this import
- self.r.record_partial_res(import.id, PartialRes::new(module.res().unwrap()));
+ self.r.record_partial_res(id, PartialRes::new(module.res().unwrap()));
}
// Miscellaneous post-processing, including recording re-exports,
@@ -1204,5 +1235,6 @@ fn import_kind_to_string(import_kind: &ImportKind<'_>) -> String {
ImportKind::Glob { .. } => "*".to_string(),
ImportKind::ExternCrate { .. } => "<extern crate>".to_string(),
ImportKind::MacroUse => "#[macro_use]".to_string(),
+ ImportKind::MacroExport => "#[macro_export]".to_string(),
}
}
diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs
index 00eb768ad..cf3e59460 100644
--- a/compiler/rustc_resolve/src/late.rs
+++ b/compiler/rustc_resolve/src/late.rs
@@ -16,7 +16,7 @@ use rustc_ast::ptr::P;
use rustc_ast::visit::{self, AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor};
use rustc_ast::*;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
-use rustc_errors::DiagnosticId;
+use rustc_errors::{DiagnosticArgValue, DiagnosticId, IntoDiagnosticArg};
use rustc_hir::def::Namespace::{self, *};
use rustc_hir::def::{self, CtorKind, DefKind, LifetimeRes, PartialRes, PerNS};
use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID, LOCAL_CRATE};
@@ -31,8 +31,9 @@ use smallvec::{smallvec, SmallVec};
use rustc_span::source_map::{respan, Spanned};
use std::assert_matches::debug_assert_matches;
+use std::borrow::Cow;
use std::collections::{hash_map::Entry, BTreeSet};
-use std::mem::{replace, take};
+use std::mem::{replace, swap, take};
mod diagnostics;
@@ -78,6 +79,12 @@ impl PatternSource {
}
}
+impl IntoDiagnosticArg for PatternSource {
+ fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
+ DiagnosticArgValue::Str(Cow::Borrowed(self.descr()))
+ }
+}
+
/// Denotes whether the context for the set of already bound bindings is a `Product`
/// or `Or` context. This is used in e.g., `fresh_binding` and `resolve_pattern_inner`.
/// See those functions for more information.
@@ -527,6 +534,7 @@ struct DiagnosticMetadata<'ast> {
/// Used to detect possible new binding written without `let` and to provide structured suggestion.
in_assignment: Option<&'ast Expr>,
+ is_assign_rhs: bool,
/// If we are currently in a trait object definition. Used to point at the bounds when
/// encountering a struct or enum.
@@ -647,7 +655,7 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
}
TyKind::Path(ref qself, ref path) => {
self.diagnostic_metadata.current_type_path = Some(ty);
- self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
+ self.smart_resolve_path(ty.id, &qself, path, PathSource::Type);
// Check whether we should interpret this as a bare trait object.
if qself.is_none()
@@ -748,7 +756,7 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
this.visit_generic_params(&tref.bound_generic_params, false);
this.smart_resolve_path(
tref.trait_ref.ref_id,
- None,
+ &None,
&tref.trait_ref.path,
PathSource::Trait(AliasPossibility::Maybe),
);
@@ -977,7 +985,7 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
|this| {
this.smart_resolve_path(
ty.id,
- qself.as_ref(),
+ qself,
path,
PathSource::Expr(None),
);
@@ -1137,12 +1145,7 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
self.with_rib(ValueNS, InlineAsmSymRibKind, |this| {
this.with_rib(TypeNS, InlineAsmSymRibKind, |this| {
this.with_label_rib(InlineAsmSymRibKind, |this| {
- this.smart_resolve_path(
- sym.id,
- sym.qself.as_ref(),
- &sym.path,
- PathSource::Expr(None),
- );
+ this.smart_resolve_path(sym.id, &sym.qself, &sym.path, PathSource::Expr(None));
visit::walk_inline_asm_sym(this, sym);
});
})
@@ -1835,6 +1838,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
let outer_failures = take(&mut self.diagnostic_metadata.current_elision_failures);
let output_rib = if let Ok(res) = elision_lifetime.as_ref() {
+ self.r.lifetime_elision_allowed.insert(fn_id);
LifetimeRibKind::Elided(*res)
} else {
LifetimeRibKind::ElisionFailure
@@ -1923,7 +1927,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
// We have a single lifetime => success.
elision_lifetime = Elision::Param(res)
} else {
- // We have have multiple lifetimes => error.
+ // We have multiple lifetimes => error.
elision_lifetime = Elision::Err;
}
}
@@ -2356,8 +2360,8 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
if let GenericParamKind::Lifetime = param.kind {
// Record lifetime res, so lowering knows there is something fishy.
self.record_lifetime_param(param.id, LifetimeRes::Error);
- continue;
}
+ continue;
}
Entry::Vacant(entry) => {
entry.insert(param.ident.span);
@@ -2570,7 +2574,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
self.diagnostic_metadata.currently_processing_impl_trait =
Some((trait_ref.clone(), self_type.clone()));
let res = self.smart_resolve_path_fragment(
- None,
+ &None,
&path,
PathSource::Trait(AliasPossibility::No),
Finalize::new(trait_ref.ref_id, trait_ref.path.span),
@@ -3093,7 +3097,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
PatKind::TupleStruct(ref qself, ref path, ref sub_patterns) => {
self.smart_resolve_path(
pat.id,
- qself.as_ref(),
+ qself,
path,
PathSource::TupleStruct(
pat.span,
@@ -3102,10 +3106,10 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
);
}
PatKind::Path(ref qself, ref path) => {
- self.smart_resolve_path(pat.id, qself.as_ref(), path, PathSource::Pat);
+ self.smart_resolve_path(pat.id, qself, path, PathSource::Pat);
}
PatKind::Struct(ref qself, ref path, ..) => {
- self.smart_resolve_path(pat.id, qself.as_ref(), path, PathSource::Struct);
+ self.smart_resolve_path(pat.id, qself, path, PathSource::Struct);
}
PatKind::Or(ref ps) => {
// Add a new set of bindings to the stack. `Or` here records that when a
@@ -3298,7 +3302,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
fn smart_resolve_path(
&mut self,
id: NodeId,
- qself: Option<&QSelf>,
+ qself: &Option<P<QSelf>>,
path: &Path,
source: PathSource<'ast>,
) {
@@ -3312,7 +3316,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
fn smart_resolve_path_fragment(
&mut self,
- qself: Option<&QSelf>,
+ qself: &Option<P<QSelf>>,
path: &[Segment],
source: PathSource<'ast>,
finalize: Finalize,
@@ -3361,18 +3365,13 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
// Before we start looking for candidates, we have to get our hands
// on the type user is trying to perform invocation on; basically:
// we're transforming `HashMap::new` into just `HashMap`.
- let path = match path.split_last() {
+ let prefix_path = match path.split_last() {
Some((_, path)) if !path.is_empty() => path,
_ => return Some(parent_err),
};
let (mut err, candidates) =
- this.smart_resolve_report_errors(path, path_span, PathSource::Type, None);
-
- if candidates.is_empty() {
- err.cancel();
- return Some(parent_err);
- }
+ this.smart_resolve_report_errors(prefix_path, path_span, PathSource::Type, None);
// There are two different error messages user might receive at
// this point:
@@ -3383,37 +3382,74 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
// latter one - for paths in expression-position.
//
// Thus (since we're in expression-position at this point), not to
- // confuse the user, we want to keep the *message* from E0432 (so
+ // confuse the user, we want to keep the *message* from E0433 (so
// `parent_err`), but we want *hints* from E0412 (so `err`).
//
// And that's what happens below - we're just mixing both messages
// into a single one.
let mut parent_err = this.r.into_struct_error(parent_err.span, parent_err.node);
+ // overwrite all properties with the parent's error message
err.message = take(&mut parent_err.message);
err.code = take(&mut parent_err.code);
+ swap(&mut err.span, &mut parent_err.span);
err.children = take(&mut parent_err.children);
+ err.sort_span = parent_err.sort_span;
+ err.is_lint = parent_err.is_lint;
+
+ // merge the parent's suggestions with the typo suggestions
+ fn append_result<T, E>(res1: &mut Result<Vec<T>, E>, res2: Result<Vec<T>, E>) {
+ match res1 {
+ Ok(vec1) => match res2 {
+ Ok(mut vec2) => vec1.append(&mut vec2),
+ Err(e) => *res1 = Err(e),
+ },
+ Err(_) => (),
+ };
+ }
+ append_result(&mut err.suggestions, parent_err.suggestions.clone());
parent_err.cancel();
let def_id = this.parent_scope.module.nearest_parent_mod();
if this.should_report_errs() {
- this.r.use_injections.push(UseError {
- err,
- candidates,
- def_id,
- instead: false,
- suggestion: None,
- path: path.into(),
- is_call: source.is_call(),
- });
+ if candidates.is_empty() {
+ if path.len() == 2 && prefix_path.len() == 1 {
+ // Delay to check whether methond name is an associated function or not
+ // ```
+ // let foo = Foo {};
+ // foo::bar(); // possibly suggest to foo.bar();
+ //```
+ err.stash(
+ prefix_path[0].ident.span,
+ rustc_errors::StashKey::CallAssocMethod,
+ );
+ } else {
+ // When there is no suggested imports, we can just emit the error
+ // and suggestions immediately. Note that we bypass the usually error
+ // reporting routine (ie via `self.r.report_error`) because we need
+ // to post-process the `ResolutionError` above.
+ err.emit();
+ }
+ } else {
+ // If there are suggested imports, the error reporting is delayed
+ this.r.use_injections.push(UseError {
+ err,
+ candidates,
+ def_id,
+ instead: false,
+ suggestion: None,
+ path: prefix_path.into(),
+ is_call: source.is_call(),
+ });
+ }
} else {
err.cancel();
}
// We don't return `Some(parent_err)` here, because the error will
- // be already printed as part of the `use` injections
+ // be already printed either immediately or as part of the `use` injections
None
};
@@ -3513,7 +3549,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
// Resolve in alternative namespaces if resolution in the primary namespace fails.
fn resolve_qpath_anywhere(
&mut self,
- qself: Option<&QSelf>,
+ qself: &Option<P<QSelf>>,
path: &[Segment],
primary_ns: Namespace,
span: Span,
@@ -3557,7 +3593,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
/// Handles paths that may refer to associated items.
fn resolve_qpath(
&mut self,
- qself: Option<&QSelf>,
+ qself: &Option<P<QSelf>>,
path: &[Segment],
ns: Namespace,
finalize: Finalize,
@@ -3587,7 +3623,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
// but with `qself` set to `None`.
let ns = if qself.position + 1 == path.len() { ns } else { TypeNS };
let partial_res = self.smart_resolve_path_fragment(
- None,
+ &None,
&path[..=qself.position],
PathSource::TraitItem(ns),
Finalize::with_root_span(finalize.node_id, finalize.path_span, qself.path_span),
@@ -3770,12 +3806,12 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
// Next, resolve the node.
match expr.kind {
ExprKind::Path(ref qself, ref path) => {
- self.smart_resolve_path(expr.id, qself.as_ref(), path, PathSource::Expr(parent));
+ self.smart_resolve_path(expr.id, qself, path, PathSource::Expr(parent));
visit::walk_expr(self, expr);
}
ExprKind::Struct(ref se) => {
- self.smart_resolve_path(expr.id, se.qself.as_ref(), &se.path, PathSource::Struct);
+ self.smart_resolve_path(expr.id, &se.qself, &se.path, PathSource::Struct);
visit::walk_expr(self, expr);
}
@@ -3818,7 +3854,9 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
}
}
- ExprKind::Loop(ref block, label) => self.resolve_labeled_block(label, expr.id, &block),
+ ExprKind::Loop(ref block, label, _) => {
+ self.resolve_labeled_block(label, expr.id, &block)
+ }
ExprKind::While(ref cond, ref block, label) => {
self.with_resolved_label(label, expr.id, |this| {
@@ -3845,12 +3883,12 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
ExprKind::Field(ref subexpression, _) => {
self.resolve_expr(subexpression, Some(expr));
}
- ExprKind::MethodCall(ref segment, ref receiver, ref arguments, _) => {
+ ExprKind::MethodCall(box MethodCall { ref seg, ref receiver, ref args, .. }) => {
self.resolve_expr(receiver, Some(expr));
- for argument in arguments {
- self.resolve_expr(argument, None);
+ for arg in args {
+ self.resolve_expr(arg, None);
}
- self.visit_path_segment(segment);
+ self.visit_path_segment(seg);
}
ExprKind::Call(ref callee, ref arguments) => {
@@ -3889,10 +3927,15 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
visit::walk_expr(self, expr);
self.diagnostic_metadata.current_type_ascription.pop();
}
- // `async |x| ...` gets desugared to `|x| future_from_generator(|| ...)`, so we need to
+ // `async |x| ...` gets desugared to `|x| async {...}`, so we need to
// resolve the arguments within the proper scopes so that usages of them inside the
// closure are detected as upvars rather than normal closure arg usages.
- ExprKind::Closure(_, _, Async::Yes { .. }, _, ref fn_decl, ref body, _span) => {
+ ExprKind::Closure(box ast::Closure {
+ asyncness: Async::Yes { .. },
+ ref fn_decl,
+ ref body,
+ ..
+ }) => {
self.with_rib(ValueNS, NormalRibKind, |this| {
this.with_label_rib(ClosureOrAsyncRibKind, |this| {
// Resolve arguments:
@@ -3912,7 +3955,10 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
});
}
// For closures, ClosureOrAsyncRibKind is added in visit_fn
- ExprKind::Closure(ClosureBinder::For { ref generic_params, span }, ..) => {
+ ExprKind::Closure(box ast::Closure {
+ binder: ClosureBinder::For { ref generic_params, span },
+ ..
+ }) => {
self.with_generic_param_rib(
&generic_params,
NormalRibKind,
@@ -3943,10 +3989,15 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
self.resolve_expr(elem, Some(expr));
self.visit_expr(idx);
}
- ExprKind::Assign(..) => {
- let old = self.diagnostic_metadata.in_assignment.replace(expr);
- visit::walk_expr(self, expr);
- self.diagnostic_metadata.in_assignment = old;
+ ExprKind::Assign(ref lhs, ref rhs, _) => {
+ if !self.diagnostic_metadata.is_assign_rhs {
+ self.diagnostic_metadata.in_assignment = Some(expr);
+ }
+ self.visit_expr(lhs);
+ self.diagnostic_metadata.is_assign_rhs = true;
+ self.diagnostic_metadata.in_assignment = None;
+ self.visit_expr(rhs);
+ self.diagnostic_metadata.is_assign_rhs = false;
}
_ => {
visit::walk_expr(self, expr);
@@ -3964,9 +4015,9 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
let traits = self.traits_in_scope(ident, ValueNS);
self.r.trait_map.insert(expr.id, traits);
}
- ExprKind::MethodCall(ref segment, ..) => {
+ ExprKind::MethodCall(ref call) => {
debug!("(recording candidate traits for expr) recording traits for {}", expr.id);
- let traits = self.traits_in_scope(segment.ident, ValueNS);
+ let traits = self.traits_in_scope(call.seg.ident, ValueNS);
self.r.trait_map.insert(expr.id, traits);
}
_ => {
diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs
index 850f023b1..df59a350e 100644
--- a/compiler/rustc_resolve/src/late/diagnostics.rs
+++ b/compiler/rustc_resolve/src/late/diagnostics.rs
@@ -8,7 +8,7 @@ use crate::{PathResult, PathSource, Segment};
use rustc_ast::visit::{FnCtxt, FnKind, LifetimeCtxt};
use rustc_ast::{
self as ast, AssocItemKind, Expr, ExprKind, GenericParam, GenericParamKind, Item, ItemKind,
- NodeId, Path, Ty, TyKind, DUMMY_NODE_ID,
+ MethodCall, NodeId, Path, Ty, TyKind, DUMMY_NODE_ID,
};
use rustc_ast_pretty::pprust::path_segment_to_string;
use rustc_data_structures::fx::FxHashSet;
@@ -21,6 +21,7 @@ use rustc_hir::def::Namespace::{self, *};
use rustc_hir::def::{self, CtorKind, CtorOf, DefKind};
use rustc_hir::def_id::{DefId, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::PrimTy;
+use rustc_middle::ty::DefIdTree;
use rustc_session::lint;
use rustc_session::parse::feature_err;
use rustc_session::Session;
@@ -33,6 +34,8 @@ use rustc_span::{BytePos, Span};
use std::iter;
use std::ops::Deref;
+use thin_vec::ThinVec;
+
type Res = def::Res<ast::NodeId>;
/// A field or associated item from self type suggested in case of resolution failure.
@@ -78,7 +81,7 @@ fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, Str
let path_len = suggestion.path.segments.len();
let enum_path = ast::Path {
span: suggestion.path.span,
- segments: suggestion.path.segments[0..path_len - 1].to_vec(),
+ segments: suggestion.path.segments[0..path_len - 1].iter().cloned().collect(),
tokens: None,
};
let enum_path_string = path_names_to_string(&enum_path);
@@ -150,7 +153,7 @@ struct BaseError {
#[derive(Debug)]
enum TypoCandidate {
Typo(TypoSuggestion),
- Shadowed(Res),
+ Shadowed(Res, Option<Span>),
None,
}
@@ -158,7 +161,7 @@ impl TypoCandidate {
fn to_opt_suggestion(self) -> Option<TypoSuggestion> {
match self {
TypoCandidate::Typo(sugg) => Some(sugg),
- TypoCandidate::Shadowed(_) | TypoCandidate::None => None,
+ TypoCandidate::Shadowed(_, _) | TypoCandidate::None => None,
}
}
}
@@ -219,26 +222,26 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let (mod_prefix, mod_str, suggestion) = if path.len() == 1 {
debug!(?self.diagnostic_metadata.current_impl_items);
debug!(?self.diagnostic_metadata.current_function);
- let suggestion = if let Some(items) = self.diagnostic_metadata.current_impl_items
+ let suggestion = if self.current_trait_ref.is_none()
&& let Some((fn_kind, _)) = self.diagnostic_metadata.current_function
- && self.current_trait_ref.is_none()
&& let Some(FnCtxt::Assoc(_)) = fn_kind.ctxt()
+ && let Some(items) = self.diagnostic_metadata.current_impl_items
&& let Some(item) = items.iter().find(|i| {
- if let AssocItemKind::Fn(fn_) = &i.kind
- && !fn_.sig.decl.has_self()
- && i.ident.name == item_str.name
+ if let AssocItemKind::Fn(_) = &i.kind && i.ident.name == item_str.name
{
debug!(?item_str.name);
- debug!(?fn_.sig.decl.inputs);
return true
}
false
})
+ && let AssocItemKind::Fn(fn_) = &item.kind
{
+ debug!(?fn_);
+ let self_sugg = if fn_.sig.decl.has_self() { "self." } else { "Self::" };
Some((
- item_span,
+ item_span.shrink_to_lo(),
"consider using the associated function",
- format!("Self::{}", item.ident)
+ self_sugg.to_string()
))
} else {
None
@@ -279,6 +282,14 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
"you may want to use a bool value instead",
format!("{}", item_typo),
))
+ // FIXME(vincenzopalazzo): make the check smarter,
+ // and maybe expand with levenshtein distance checks
+ } else if item_str.as_str() == "printf" {
+ Some((
+ item_span,
+ "you may have meant to use the `print` macro",
+ "print!".to_owned(),
+ ))
} else {
suggestion
};
@@ -322,7 +333,12 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
}
self.suggest_bare_struct_literal(&mut err);
- self.suggest_pattern_match_with_let(&mut err, source, span);
+
+ if self.suggest_pattern_match_with_let(&mut err, source, span) {
+ // Fallback label.
+ err.span_label(base_error.span, &base_error.fallback_label);
+ return (err, Vec::new());
+ }
self.suggest_self_or_self_ref(&mut err, path, span);
self.detect_assoct_type_constraint_meant_as_path(&mut err, &base_error);
@@ -341,7 +357,11 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
if !self.type_ascription_suggestion(&mut err, base_error.span) {
let mut fallback =
self.suggest_trait_and_bounds(&mut err, source, res, span, &base_error);
+
+ // if we have suggested using pattern matching, then don't add needless suggestions
+ // for typos.
fallback |= self.suggest_typo(&mut err, source, path, span, &base_error);
+
if fallback {
// Fallback label.
err.span_label(base_error.span, &base_error.fallback_label);
@@ -387,11 +407,13 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
}
fn suggest_self_or_self_ref(&mut self, err: &mut Diagnostic, path: &[Segment], span: Span) {
- let is_assoc_fn = self.self_type_is_available();
+ if !self.self_type_is_available() {
+ return;
+ }
let Some(path_last_segment) = path.last() else { return };
let item_str = path_last_segment.ident;
// Emit help message for fake-self from other languages (e.g., `this` in Javascript).
- if ["this", "my"].contains(&item_str.as_str()) && is_assoc_fn {
+ if ["this", "my"].contains(&item_str.as_str()) {
err.span_suggestion_short(
span,
"you might have meant to use `self` here instead",
@@ -428,7 +450,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn try_lookup_name_relaxed(
&mut self,
- err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
+ err: &mut Diagnostic,
source: PathSource<'_>,
path: &[Segment],
span: Span,
@@ -442,7 +464,6 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let is_enum_variant = &|res| matches!(res, Res::Def(DefKind::Variant, _));
let path_str = Segment::names_to_string(path);
let ident_span = path.last().map_or(span, |ident| ident.ident.span);
-
let mut candidates = self
.r
.lookup_import_candidates(ident, ns, &self.parent_scope, is_expected)
@@ -488,7 +509,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
.contains(span)
{
// Already reported this issue on the lhs of the type ascription.
- err.delay_as_bug();
+ err.downgrade_to_delayed_bug();
return (true, candidates);
}
}
@@ -607,7 +628,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn suggest_trait_and_bounds(
&mut self,
- err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
+ err: &mut Diagnostic,
source: PathSource<'_>,
res: Option<Res>,
span: Span,
@@ -682,7 +703,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn suggest_typo(
&mut self,
- err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
+ err: &mut Diagnostic,
source: PathSource<'_>,
path: &[Segment],
span: Span,
@@ -691,9 +712,20 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let is_expected = &|res| source.is_expected(res);
let ident_span = path.last().map_or(span, |ident| ident.ident.span);
let typo_sugg = self.lookup_typo_candidate(path, source.namespace(), is_expected);
- if let TypoCandidate::Shadowed(res) = typo_sugg
- && let Some(id) = res.opt_def_id()
- && let Some(sugg_span) = self.r.opt_span(id)
+ let is_in_same_file = &|sp1, sp2| {
+ let source_map = self.r.session.source_map();
+ let file1 = source_map.span_to_filename(sp1);
+ let file2 = source_map.span_to_filename(sp2);
+ file1 == file2
+ };
+ // print 'you might have meant' if the candidate is (1) is a shadowed name with
+ // accessible definition and (2) either defined in the same crate as the typo
+ // (could be in a different file) or introduced in the same file as the typo
+ // (could belong to a different crate)
+ if let TypoCandidate::Shadowed(res, Some(sugg_span)) = typo_sugg
+ && res
+ .opt_def_id()
+ .map_or(false, |id| id.is_local() || is_in_same_file(span, sugg_span))
{
err.span_label(
sugg_span,
@@ -730,7 +762,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn err_code_special_cases(
&mut self,
- err: &mut DiagnosticBuilder<'_, ErrorGuaranteed>,
+ err: &mut Diagnostic,
source: PathSource<'_>,
path: &[Segment],
span: Span,
@@ -784,19 +816,18 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
return false;
}
err.code(rustc_errors::error_code!(E0411));
- err.span_label(
- span,
- "`Self` is only available in impls, traits, and type definitions".to_string(),
- );
+ err.span_label(span, "`Self` is only available in impls, traits, and type definitions");
if let Some(item_kind) = self.diagnostic_metadata.current_item {
- err.span_label(
- item_kind.ident.span,
- format!(
- "`Self` not allowed in {} {}",
- item_kind.kind.article(),
- item_kind.kind.descr()
- ),
- );
+ if !item_kind.ident.span.is_dummy() {
+ err.span_label(
+ item_kind.ident.span,
+ format!(
+ "`Self` not allowed in {} {}",
+ item_kind.kind.article(),
+ item_kind.kind.descr()
+ ),
+ );
+ }
}
true
}
@@ -929,7 +960,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
err: &mut Diagnostic,
source: PathSource<'_>,
span: Span,
- ) {
+ ) -> bool {
if let PathSource::Expr(_) = source &&
let Some(Expr {
span: expr_span,
@@ -946,8 +977,10 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
"let ",
Applicability::MaybeIncorrect,
);
+ return true;
}
}
+ false
}
fn get_single_associated_item(
@@ -973,10 +1006,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
.collect();
if targets.len() == 1 {
let target = targets[0];
- return Some(TypoSuggestion::single_item_from_res(
- target.0.ident.name,
- target.1,
- ));
+ return Some(TypoSuggestion::single_item_from_ident(target.0.ident, target.1));
}
}
}
@@ -1003,11 +1033,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
};
// Confirm that the target is an associated type.
- let (ty, position, path) = if let ast::TyKind::Path(
- Some(ast::QSelf { ty, position, .. }),
- path,
- ) = &bounded_ty.kind
- {
+ let (ty, position, path) = if let ast::TyKind::Path(Some(qself), path) = &bounded_ty.kind {
// use this to verify that ident is a type param.
let Some(partial_res) = self.r.partial_res_map.get(&bounded_ty.id) else {
return false;
@@ -1018,7 +1044,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
) {
return false;
}
- (ty, position, path)
+ (&qself.ty, qself.position, path)
} else {
return false;
};
@@ -1054,12 +1080,12 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
.source_map()
.span_to_snippet(ty.span) // Account for `<&'a T as Foo>::Bar`.
.unwrap_or_else(|_| constrain_ident.to_string()),
- path.segments[..*position]
+ path.segments[..position]
.iter()
.map(|segment| path_segment_to_string(segment))
.collect::<Vec<_>>()
.join("::"),
- path.segments[*position..]
+ path.segments[position..]
.iter()
.map(|segment| path_segment_to_string(segment))
.collect::<Vec<_>>()
@@ -1151,7 +1177,9 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let (lhs_span, rhs_span) = match &expr.kind {
ExprKind::Field(base, ident) => (base.span, ident.span),
- ExprKind::MethodCall(_, receiver, _, span) => (receiver.span, *span),
+ ExprKind::MethodCall(box MethodCall { receiver, span, .. }) => {
+ (receiver.span, *span)
+ }
_ => return false,
};
@@ -1423,13 +1451,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
err.span_label(span, "constructor is not visible here due to private fields");
}
- (
- Res::Def(
- DefKind::Union | DefKind::Variant | DefKind::Ctor(_, CtorKind::Fictive),
- def_id,
- ),
- _,
- ) if ns == ValueNS => {
+ (Res::Def(DefKind::Union | DefKind::Variant, def_id), _) if ns == ValueNS => {
bad_struct_syntax_suggestion(def_id);
}
(Res::Def(DefKind::Ctor(_, CtorKind::Const), def_id), _) if ns == ValueNS => {
@@ -1449,7 +1471,8 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
_ => return false,
}
}
- (Res::Def(DefKind::Ctor(_, CtorKind::Fn), def_id), _) if ns == ValueNS => {
+ (Res::Def(DefKind::Ctor(_, CtorKind::Fn), ctor_def_id), _) if ns == ValueNS => {
+ let def_id = self.r.parent(ctor_def_id);
if let Some(span) = self.def_span(def_id) {
err.span_label(span, &format!("`{}` defined here", path_str));
}
@@ -1526,7 +1549,6 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
_ => None,
}
}
-
// Fields are generally expected in the same contexts as locals.
if filter_fn(Res::Local(ast::DUMMY_NODE_ID)) {
if let Some(node_id) =
@@ -1618,7 +1640,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
// Locals and type parameters
for (ident, &res) in &rib.bindings {
if filter_fn(res) && ident.span.ctxt() == rib_ctxt {
- names.push(TypoSuggestion::typo_from_res(ident.name, res));
+ names.push(TypoSuggestion::typo_from_ident(*ident, res));
}
}
@@ -1647,9 +1669,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
Res::Def(DefKind::Mod, crate_id.as_def_id());
if filter_fn(crate_mod) {
- Some(TypoSuggestion::typo_from_res(
- ident.name, crate_mod,
- ))
+ Some(TypoSuggestion::typo_from_ident(*ident, crate_mod))
} else {
None
}
@@ -1668,7 +1688,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
// Add primitive types to the mix
if filter_fn(Res::PrimTy(PrimTy::Bool)) {
names.extend(PrimTy::ALL.iter().map(|prim_ty| {
- TypoSuggestion::typo_from_res(prim_ty.name(), Res::PrimTy(*prim_ty))
+ TypoSuggestion::typo_from_name(prim_ty.name(), Res::PrimTy(*prim_ty))
}))
}
} else {
@@ -1695,7 +1715,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
return TypoCandidate::None;
};
if found == name {
- TypoCandidate::Shadowed(sugg.res)
+ TypoCandidate::Shadowed(sugg.res, sugg.span)
} else {
TypoCandidate::Typo(sugg)
}
@@ -1796,35 +1816,28 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
false
}
- fn let_binding_suggestion(&self, err: &mut Diagnostic, ident_span: Span) -> bool {
- // try to give a suggestion for this pattern: `name = 1`, which is common in other languages
- let mut added_suggestion = false;
- if let Some(Expr { kind: ExprKind::Assign(lhs, _rhs, _), .. }) = self.diagnostic_metadata.in_assignment &&
+ // try to give a suggestion for this pattern: `name = blah`, which is common in other languages
+ // suggest `let name = blah` to introduce a new binding
+ fn let_binding_suggestion(&mut self, err: &mut Diagnostic, ident_span: Span) -> bool {
+ if let Some(Expr { kind: ExprKind::Assign(lhs, .. ), .. }) = self.diagnostic_metadata.in_assignment &&
let ast::ExprKind::Path(None, _) = lhs.kind {
- let sm = self.r.session.source_map();
- let line_span = sm.span_extend_to_line(ident_span);
- let ident_name = sm.span_to_snippet(ident_span).unwrap();
- // HACK(chenyukang): make sure ident_name is at the starting of the line to protect against macros
- if sm
- .span_to_snippet(line_span)
- .map_or(false, |s| s.trim().starts_with(&ident_name))
- {
+ if !ident_span.from_expansion() {
err.span_suggestion_verbose(
ident_span.shrink_to_lo(),
"you might have meant to introduce a new binding",
"let ".to_string(),
Applicability::MaybeIncorrect,
);
- added_suggestion = true;
+ return true;
}
}
- added_suggestion
+ false
}
fn find_module(&mut self, def_id: DefId) -> Option<(Module<'a>, ImportSuggestion)> {
let mut result = None;
let mut seen_modules = FxHashSet::default();
- let mut worklist = vec![(self.r.graph_root, Vec::new())];
+ let mut worklist = vec![(self.r.graph_root, ThinVec::new())];
while let Some((in_module, path_segments)) = worklist.pop() {
// abort if the module is already found
@@ -1927,7 +1940,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
err.span_suggestions(
span,
&msg,
- suggestable_variants.into_iter(),
+ suggestable_variants,
Applicability::MaybeIncorrect,
);
}
@@ -1950,11 +1963,12 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
));
}
} else {
- let needs_placeholder = |def_id: DefId, kind: CtorKind| {
+ let needs_placeholder = |ctor_def_id: DefId, kind: CtorKind| {
+ let def_id = self.r.parent(ctor_def_id);
let has_no_fields = self.r.field_names.get(&def_id).map_or(false, |f| f.is_empty());
match kind {
CtorKind::Const => false,
- CtorKind::Fn | CtorKind::Fictive if has_no_fields => false,
+ CtorKind::Fn if has_no_fields => false,
_ => true,
}
};
@@ -1966,7 +1980,6 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
.map(|(variant, kind)| match kind {
CtorKind::Const => variant,
CtorKind::Fn => format!("({}())", variant),
- CtorKind::Fictive => format!("({} {{}})", variant),
})
.collect::<Vec<_>>();
let no_suggestable_variant = suggestable_variants.is_empty();
@@ -1981,7 +1994,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
err.span_suggestions(
span,
msg,
- suggestable_variants.into_iter(),
+ suggestable_variants,
Applicability::MaybeIncorrect,
);
}
@@ -1992,7 +2005,6 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
.map(|(variant, _, kind)| (path_names_to_string(variant), kind))
.filter_map(|(variant, kind)| match kind {
CtorKind::Fn => Some(format!("({}(/* fields */))", variant)),
- CtorKind::Fictive => Some(format!("({} {{ /* fields */ }})", variant)),
_ => None,
})
.collect::<Vec<_>>();
@@ -2011,7 +2023,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
err.span_suggestions(
span,
msg,
- suggestable_variants_with_placeholders.into_iter(),
+ suggestable_variants_with_placeholders,
Applicability::HasPlaceholders,
);
}
diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs
index 8aebb7da1..a0fa61c45 100644
--- a/compiler/rustc_resolve/src/lib.rs
+++ b/compiler/rustc_resolve/src/lib.rs
@@ -73,6 +73,7 @@ mod check_unused;
mod def_collector;
mod diagnostics;
mod effective_visibilities;
+mod errors;
mod ident;
mod imports;
mod late;
@@ -646,7 +647,7 @@ impl<'a> ToNameBinding<'a> for &'a NameBinding<'a> {
#[derive(Clone, Debug)]
enum NameBindingKind<'a> {
- Res(Res, /* is_macro_export */ bool),
+ Res(Res),
Module(Module<'a>),
Import { binding: &'a NameBinding<'a>, import: &'a Import<'a>, used: Cell<bool> },
}
@@ -745,7 +746,7 @@ impl<'a> NameBinding<'a> {
fn res(&self) -> Res {
match self.kind {
- NameBindingKind::Res(res, _) => res,
+ NameBindingKind::Res(res) => res,
NameBindingKind::Module(module) => module.res().unwrap(),
NameBindingKind::Import { binding, .. } => binding.res(),
}
@@ -762,10 +763,10 @@ impl<'a> NameBinding<'a> {
fn is_possibly_imported_variant(&self) -> bool {
match self.kind {
NameBindingKind::Import { binding, .. } => binding.is_possibly_imported_variant(),
- NameBindingKind::Res(
- Res::Def(DefKind::Variant | DefKind::Ctor(CtorOf::Variant, ..), _),
+ NameBindingKind::Res(Res::Def(
+ DefKind::Variant | DefKind::Ctor(CtorOf::Variant, ..),
_,
- ) => true,
+ )) => true,
NameBindingKind::Res(..) | NameBindingKind::Module(..) => false,
}
}
@@ -788,6 +789,13 @@ impl<'a> NameBinding<'a> {
matches!(self.kind, NameBindingKind::Import { .. })
}
+ /// The binding introduced by `#[macro_export] macro_rules` is a public import, but it might
+ /// not be perceived as such by users, so treat it as a non-import in some diagnostics.
+ fn is_import_user_facing(&self) -> bool {
+ matches!(self.kind, NameBindingKind::Import { import, .. }
+ if !matches!(import.kind, ImportKind::MacroExport))
+ }
+
fn is_glob_import(&self) -> bool {
match self.kind {
NameBindingKind::Import { import, .. } => import.is_glob(),
@@ -1030,6 +1038,8 @@ pub struct Resolver<'a> {
/// they are declared in the static array generated by proc_macro_harness.
proc_macros: Vec<NodeId>,
confused_type_with_std_module: FxHashMap<Span, Span>,
+ /// Whether lifetime elision was successful.
+ lifetime_elision_allowed: FxHashSet<NodeId>,
effective_visibilities: EffectiveVisibilities,
}
@@ -1101,17 +1111,30 @@ impl<'a> AsMut<Resolver<'a>> for Resolver<'a> {
}
}
-impl<'a, 'b> DefIdTree for &'a Resolver<'b> {
+/// A minimal subset of resolver that can implemenent `DefIdTree`, sometimes
+/// required to satisfy borrow checker by avoiding borrowing the whole resolver.
+#[derive(Clone, Copy)]
+struct ResolverTree<'a, 'b>(&'a Definitions, &'a CrateLoader<'b>);
+
+impl DefIdTree for ResolverTree<'_, '_> {
#[inline]
fn opt_parent(self, id: DefId) -> Option<DefId> {
+ let ResolverTree(definitions, crate_loader) = self;
match id.as_local() {
- Some(id) => self.definitions.def_key(id).parent,
- None => self.cstore().def_key(id).parent,
+ Some(id) => definitions.def_key(id).parent,
+ None => crate_loader.cstore().def_key(id).parent,
}
.map(|index| DefId { index, ..id })
}
}
+impl<'a, 'b> DefIdTree for &'a Resolver<'b> {
+ #[inline]
+ fn opt_parent(self, id: DefId) -> Option<DefId> {
+ ResolverTree(&self.definitions, &self.crate_loader).opt_parent(id)
+ }
+}
+
impl Resolver<'_> {
fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
self.node_id_to_def_id.get(&node).copied()
@@ -1175,7 +1198,7 @@ impl<'a> Resolver<'a> {
pub fn new(
session: &'a Session,
krate: &Crate,
- crate_name: &str,
+ crate_name: Symbol,
metadata_loader: Box<MetadataLoaderDyn>,
arenas: &'a ResolverArenas<'a>,
) -> Resolver<'a> {
@@ -1283,7 +1306,7 @@ impl<'a> Resolver<'a> {
arenas,
dummy_binding: arenas.alloc_name_binding(NameBinding {
- kind: NameBindingKind::Res(Res::Err, false),
+ kind: NameBindingKind::Res(Res::Err),
ambiguity: None,
expansion: LocalExpnId::ROOT,
span: DUMMY_SP,
@@ -1335,6 +1358,7 @@ impl<'a> Resolver<'a> {
trait_impls: Default::default(),
proc_macros: Default::default(),
confused_type_with_std_module: Default::default(),
+ lifetime_elision_allowed: Default::default(),
effective_visibilities: Default::default(),
};
@@ -1429,6 +1453,7 @@ impl<'a> Resolver<'a> {
def_id_to_node_id: self.def_id_to_node_id,
trait_map: self.trait_map,
builtin_macro_kinds: self.builtin_macro_kinds,
+ lifetime_elision_allowed: self.lifetime_elision_allowed,
};
ResolverOutputs { definitions, global_ctxt, ast_lowering }
}
@@ -1472,6 +1497,7 @@ impl<'a> Resolver<'a> {
def_id_to_node_id: self.def_id_to_node_id.clone(),
trait_map: self.trait_map.clone(),
builtin_macro_kinds: self.builtin_macro_kinds.clone(),
+ lifetime_elision_allowed: self.lifetime_elision_allowed.clone(),
};
ResolverOutputs { definitions, global_ctxt, ast_lowering }
}
@@ -1613,10 +1639,12 @@ impl<'a> Resolver<'a> {
) -> SmallVec<[LocalDefId; 1]> {
let mut import_ids = smallvec![];
while let NameBindingKind::Import { import, binding, .. } = kind {
- let id = self.local_def_id(import.id);
- self.maybe_unused_trait_imports.insert(id);
+ if let Some(node_id) = import.id() {
+ let def_id = self.local_def_id(node_id);
+ self.maybe_unused_trait_imports.insert(def_id);
+ import_ids.push(def_id);
+ }
self.add_to_glob_map(&import, trait_name);
- import_ids.push(id);
kind = &binding.kind;
}
import_ids
@@ -1683,7 +1711,9 @@ impl<'a> Resolver<'a> {
}
used.set(true);
import.used.set(true);
- self.used_imports.insert(import.id);
+ if let Some(id) = import.id() {
+ self.used_imports.insert(id);
+ }
self.add_to_glob_map(&import, ident);
self.record_use(ident, binding, false);
}
@@ -1691,8 +1721,8 @@ impl<'a> Resolver<'a> {
#[inline]
fn add_to_glob_map(&mut self, import: &Import<'_>, ident: Ident) {
- if import.is_glob() {
- let def_id = self.local_def_id(import.id);
+ if let ImportKind::Glob { id, .. } = import.kind {
+ let def_id = self.local_def_id(id);
self.glob_map.entry(def_id).or_default().insert(ident.name);
}
}
@@ -1897,7 +1927,7 @@ impl<'a> Resolver<'a> {
if let Some(def_id) = def_id.as_local() {
self.reexport_map.get(&def_id).cloned().unwrap_or_default()
} else {
- self.cstore().module_children_untracked(def_id, self.session)
+ self.cstore().module_children_untracked(def_id, self.session).collect()
}
}
@@ -1961,7 +1991,7 @@ impl<'a> Resolver<'a> {
.find(|a| a.has_name(sym::rustc_legacy_const_generics))?;
let mut ret = Vec::new();
for meta in attr.meta_item_list()? {
- match meta.literal()?.kind {
+ match meta.lit()?.kind {
LitKind::Int(a, _) => ret.push(a as usize),
_ => panic!("invalid arg index"),
}
@@ -1999,11 +2029,7 @@ impl<'a> Resolver<'a> {
// Items that go to reexport table encoded to metadata and visible through it to other crates.
fn is_reexport(&self, binding: &NameBinding<'a>) -> Option<def::Res<!>> {
- // FIXME: Consider changing the binding inserted by `#[macro_export] macro_rules`
- // into the crate root to actual `NameBindingKind::Import`.
- if binding.is_import()
- || matches!(binding.kind, NameBindingKind::Res(_, _is_macro_export @ true))
- {
+ if binding.is_import() {
let res = binding.res().expect_non_local();
// Ambiguous imports are treated as errors at this point and are
// not exposed to other crates (see #36837 for more details).
diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs
index 9526296f9..8c7972f8e 100644
--- a/compiler/rustc_resolve/src/macros.rs
+++ b/compiler/rustc_resolve/src/macros.rs
@@ -356,7 +356,7 @@ impl<'a> ResolverExpand for Resolver<'a> {
has_derive_copy: false,
});
let parent_scope = self.invocation_parent_scopes[&expn_id];
- for (i, (path, _, opt_ext)) in entry.resolutions.iter_mut().enumerate() {
+ for (i, (path, _, opt_ext, _)) in entry.resolutions.iter_mut().enumerate() {
if opt_ext.is_none() {
*opt_ext = Some(
match self.resolve_macro_path(
diff --git a/compiler/rustc_save_analysis/src/dump_visitor.rs b/compiler/rustc_save_analysis/src/dump_visitor.rs
index df5d992f6..9ae07cb00 100644
--- a/compiler/rustc_save_analysis/src/dump_visitor.rs
+++ b/compiler/rustc_save_analysis/src/dump_visitor.rs
@@ -111,7 +111,7 @@ impl<'tcx> DumpVisitor<'tcx> {
self.save_ctxt.lookup_def_id(ref_id)
}
- pub fn dump_crate_info(&mut self, name: &str) {
+ pub fn dump_crate_info(&mut self, name: Symbol) {
let source_file = self.tcx.sess.local_crate_source_file.as_ref();
let crate_root = source_file.map(|source_file| {
let source_file = Path::new(source_file);
@@ -124,7 +124,7 @@ impl<'tcx> DumpVisitor<'tcx> {
let data = CratePreludeData {
crate_id: GlobalCrateId {
- name: name.into(),
+ name: name.to_string(),
disambiguator: (self.tcx.sess.local_stable_crate_id().to_u64(), 0),
},
crate_root: crate_root.unwrap_or_else(|| "<no source>".to_owned()),
@@ -135,7 +135,7 @@ impl<'tcx> DumpVisitor<'tcx> {
self.dumper.crate_prelude(data);
}
- pub fn dump_compilation_options(&mut self, input: &Input, crate_name: &str) {
+ pub fn dump_compilation_options(&mut self, input: &Input, crate_name: Symbol) {
// Apply possible `remap-path-prefix` remapping to the input source file
// (and don't include remapping args anymore)
let (program, arguments) = {
@@ -185,13 +185,13 @@ impl<'tcx> DumpVisitor<'tcx> {
}
}
- fn write_sub_paths(&mut self, path: &'tcx hir::Path<'tcx>) {
+ fn write_sub_paths<R>(&mut self, path: &'tcx hir::Path<'tcx, R>) {
self.write_segments(path.segments)
}
// As write_sub_paths, but does not process the last ident in the path (assuming it
// will be processed elsewhere). See note on write_sub_paths about global.
- fn write_sub_paths_truncated(&mut self, path: &'tcx hir::Path<'tcx>) {
+ fn write_sub_paths_truncated<R>(&mut self, path: &'tcx hir::Path<'tcx, R>) {
if let [segments @ .., _] = path.segments {
self.write_segments(segments)
}
@@ -527,9 +527,9 @@ impl<'tcx> DumpVisitor<'tcx> {
let value = format!("{}::{} {{ {} }}", enum_data.name, name, fields_str);
if !self.span.filter_generated(name_span) {
let span = self.span_from_span(name_span);
- let id = id_from_hir_id(variant.id, &self.save_ctxt);
+ let id = id_from_hir_id(variant.hir_id, &self.save_ctxt);
let parent = Some(id_from_def_id(item.owner_id.to_def_id()));
- let attrs = self.tcx.hir().attrs(variant.id);
+ let attrs = self.tcx.hir().attrs(variant.hir_id);
self.dumper.dump_def(
&access,
@@ -552,7 +552,7 @@ impl<'tcx> DumpVisitor<'tcx> {
}
ref v => {
let mut value = format!("{}::{}", enum_data.name, name);
- if let hir::VariantData::Tuple(fields, _) = v {
+ if let hir::VariantData::Tuple(fields, _, _) = v {
value.push('(');
value.push_str(
&fields
@@ -565,9 +565,9 @@ impl<'tcx> DumpVisitor<'tcx> {
}
if !self.span.filter_generated(name_span) {
let span = self.span_from_span(name_span);
- let id = id_from_hir_id(variant.id, &self.save_ctxt);
+ let id = id_from_hir_id(variant.hir_id, &self.save_ctxt);
let parent = Some(id_from_def_id(item.owner_id.to_def_id()));
- let attrs = self.tcx.hir().attrs(variant.id);
+ let attrs = self.tcx.hir().attrs(variant.hir_id);
self.dumper.dump_def(
&access,
@@ -591,7 +591,7 @@ impl<'tcx> DumpVisitor<'tcx> {
}
for field in variant.data.fields() {
- self.process_struct_field_def(field, variant.id);
+ self.process_struct_field_def(field, variant.hir_id);
self.visit_ty(field.ty);
}
}
@@ -1029,7 +1029,7 @@ impl<'tcx> DumpVisitor<'tcx> {
trait_item.hir_id(),
trait_item.ident,
Some(bounds),
- default_ty.as_ref().map(|ty| &**ty),
+ default_ty.as_deref(),
&self.save_ctxt,
),
attributes: lower_attributes(attrs.to_vec(), &self.save_ctxt),
diff --git a/compiler/rustc_save_analysis/src/lib.rs b/compiler/rustc_save_analysis/src/lib.rs
index d0155c908..7735c5713 100644
--- a/compiler/rustc_save_analysis/src/lib.rs
+++ b/compiler/rustc_save_analysis/src/lib.rs
@@ -94,8 +94,8 @@ impl<'tcx> SaveContext<'tcx> {
}
}
- // Returns path to the compilation output (e.g., libfoo-12345678.rmeta)
- pub fn compilation_output(&self, crate_name: &str) -> PathBuf {
+ /// Returns path to the compilation output (e.g., libfoo-12345678.rmeta)
+ pub fn compilation_output(&self, crate_name: Symbol) -> PathBuf {
let sess = &self.tcx.sess;
// Save-analysis is emitted per whole session, not per each crate type
let crate_type = sess.crate_types()[0];
@@ -112,7 +112,7 @@ impl<'tcx> SaveContext<'tcx> {
}
}
- // List external crates used by the current crate.
+ /// List external crates used by the current crate.
pub fn get_external_crates(&self) -> Vec<ExternalCrateData> {
let mut result = Vec::with_capacity(self.tcx.crates(()).len());
@@ -319,7 +319,7 @@ impl<'tcx> SaveContext<'tcx> {
qualname,
value,
parent: None,
- children: def.variants.iter().map(|v| id_from_hir_id(v.id, self)).collect(),
+ children: def.variants.iter().map(|v| id_from_hir_id(v.hir_id, self)).collect(),
decl_id: None,
docs: self.docs_for_attrs(attrs),
sig: sig::item_signature(item, self),
@@ -594,7 +594,9 @@ impl<'tcx> SaveContext<'tcx> {
match self.tcx.hir().get(hir_id) {
Node::TraitRef(tr) => tr.path.res,
- Node::Item(&hir::Item { kind: hir::ItemKind::Use(path, _), .. }) => path.res,
+ Node::Item(&hir::Item { kind: hir::ItemKind::Use(path, _), .. }) => {
+ path.res.get(0).copied().unwrap_or(Res::Err)
+ }
Node::PathSegment(seg) => {
if seg.res != Res::Err {
seg.res
@@ -892,8 +894,8 @@ pub struct DumpHandler<'a> {
}
impl<'a> DumpHandler<'a> {
- pub fn new(odir: Option<&'a Path>, cratename: &str) -> DumpHandler<'a> {
- DumpHandler { odir, cratename: cratename.to_owned() }
+ pub fn new(odir: Option<&'a Path>, cratename: Symbol) -> DumpHandler<'a> {
+ DumpHandler { odir, cratename: cratename.to_string() }
}
fn output_file(&self, ctx: &SaveContext<'_>) -> (BufWriter<File>, PathBuf) {
@@ -958,7 +960,7 @@ impl SaveHandler for CallbackHandler<'_> {
pub fn process_crate<'l, 'tcx, H: SaveHandler>(
tcx: TyCtxt<'tcx>,
- cratename: &str,
+ cratename: Symbol,
input: &'l Input,
config: Option<Config>,
mut handler: H,
diff --git a/compiler/rustc_save_analysis/src/sig.rs b/compiler/rustc_save_analysis/src/sig.rs
index 83c51d213..9197a28c1 100644
--- a/compiler/rustc_save_analysis/src/sig.rs
+++ b/compiler/rustc_save_analysis/src/sig.rs
@@ -167,9 +167,9 @@ impl<'hir> Sig for hir::Ty<'hir> {
}
hir::TyKind::Rptr(ref lifetime, ref mt) => {
let mut prefix = "&".to_owned();
- prefix.push_str(&lifetime.name.ident().to_string());
+ prefix.push_str(&lifetime.ident.to_string());
prefix.push(' ');
- if let hir::Mutability::Mut = mt.mutbl {
+ if mt.mutbl.is_mut() {
prefix.push_str("mut ");
};
@@ -332,7 +332,7 @@ impl<'hir> Sig for hir::Item<'hir> {
match self.kind {
hir::ItemKind::Static(ref ty, m, ref body) => {
let mut text = "static ".to_owned();
- if m == hir::Mutability::Mut {
+ if m.is_mut() {
text.push_str("mut ");
}
let name = self.ident.to_string();
@@ -693,7 +693,7 @@ impl<'hir> Sig for hir::Variant<'hir> {
text.push('}');
Ok(Signature { text, defs, refs })
}
- hir::VariantData::Tuple(fields, id) => {
+ hir::VariantData::Tuple(fields, id, _) => {
let name_def = SigElement {
id: id_from_hir_id(id, scx),
start: offset,
@@ -712,7 +712,7 @@ impl<'hir> Sig for hir::Variant<'hir> {
text.push(')');
Ok(Signature { text, defs, refs })
}
- hir::VariantData::Unit(id) => {
+ hir::VariantData::Unit(id, _) => {
let name_def = SigElement {
id: id_from_hir_id(id, scx),
start: offset,
diff --git a/compiler/rustc_serialize/Cargo.toml b/compiler/rustc_serialize/Cargo.toml
index 3b0b3144f..db0ef7354 100644
--- a/compiler/rustc_serialize/Cargo.toml
+++ b/compiler/rustc_serialize/Cargo.toml
@@ -6,7 +6,7 @@ edition = "2021"
[dependencies]
indexmap = "1.9.1"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
-thin-vec = "0.2.8"
+thin-vec = "0.2.9"
[dev-dependencies]
rustc_macros = { path = "../rustc_macros" }
diff --git a/compiler/rustc_serialize/src/leb128.rs b/compiler/rustc_serialize/src/leb128.rs
index 08b3c0542..7dad9aa01 100644
--- a/compiler/rustc_serialize/src/leb128.rs
+++ b/compiler/rustc_serialize/src/leb128.rs
@@ -1,22 +1,19 @@
-#![macro_use]
-
-macro_rules! max_leb128_len {
- ($int_ty:ty) => {
- // The longest LEB128 encoding for an integer uses 7 bits per byte.
- (std::mem::size_of::<$int_ty>() * 8 + 6) / 7
- };
+/// Returns the length of the longest LEB128 encoding for `T`, assuming `T` is an integer type
+pub const fn max_leb128_len<T>() -> usize {
+ // The longest LEB128 encoding for an integer uses 7 bits per byte.
+ (std::mem::size_of::<T>() * 8 + 6) / 7
}
-// Returns the longest LEB128 encoding of all supported integer types.
-pub const fn max_leb128_len() -> usize {
- max_leb128_len!(u128)
+/// Returns the length of the longest LEB128 encoding of all supported integer types.
+pub const fn largest_max_leb128_len() -> usize {
+ max_leb128_len::<u128>()
}
macro_rules! impl_write_unsigned_leb128 {
($fn_name:ident, $int_ty:ty) => {
#[inline]
pub fn $fn_name(
- out: &mut [::std::mem::MaybeUninit<u8>; max_leb128_len!($int_ty)],
+ out: &mut [::std::mem::MaybeUninit<u8>; max_leb128_len::<$int_ty>()],
mut value: $int_ty,
) -> &[u8] {
let mut i = 0;
@@ -90,7 +87,7 @@ macro_rules! impl_write_signed_leb128 {
($fn_name:ident, $int_ty:ty) => {
#[inline]
pub fn $fn_name(
- out: &mut [::std::mem::MaybeUninit<u8>; max_leb128_len!($int_ty)],
+ out: &mut [::std::mem::MaybeUninit<u8>; max_leb128_len::<$int_ty>()],
mut value: $int_ty,
) -> &[u8] {
let mut i = 0;
diff --git a/compiler/rustc_serialize/src/opaque.rs b/compiler/rustc_serialize/src/opaque.rs
index f35cc08f4..0afeb86fc 100644
--- a/compiler/rustc_serialize/src/opaque.rs
+++ b/compiler/rustc_serialize/src/opaque.rs
@@ -1,4 +1,4 @@
-use crate::leb128::{self, max_leb128_len};
+use crate::leb128::{self, largest_max_leb128_len};
use crate::serialize::{Decodable, Decoder, Encodable, Encoder};
use std::convert::TryInto;
use std::fs::File;
@@ -32,7 +32,7 @@ impl MemEncoder {
macro_rules! write_leb128 {
($enc:expr, $value:expr, $int_ty:ty, $fun:ident) => {{
- const MAX_ENCODED_LEN: usize = max_leb128_len!($int_ty);
+ const MAX_ENCODED_LEN: usize = $crate::leb128::max_leb128_len::<$int_ty>();
let old_len = $enc.data.len();
if MAX_ENCODED_LEN > $enc.data.capacity() - old_len {
@@ -155,19 +155,19 @@ impl Encoder for MemEncoder {
pub type FileEncodeResult = Result<usize, io::Error>;
-// `FileEncoder` encodes data to file via fixed-size buffer.
-//
-// When encoding large amounts of data to a file, using `FileEncoder` may be
-// preferred over using `MemEncoder` to encode to a `Vec`, and then writing the
-// `Vec` to file, as the latter uses as much memory as there is encoded data,
-// while the former uses the fixed amount of memory allocated to the buffer.
-// `FileEncoder` also has the advantage of not needing to reallocate as data
-// is appended to it, but the disadvantage of requiring more error handling,
-// which has some runtime overhead.
+/// `FileEncoder` encodes data to file via fixed-size buffer.
+///
+/// When encoding large amounts of data to a file, using `FileEncoder` may be
+/// preferred over using `MemEncoder` to encode to a `Vec`, and then writing the
+/// `Vec` to file, as the latter uses as much memory as there is encoded data,
+/// while the former uses the fixed amount of memory allocated to the buffer.
+/// `FileEncoder` also has the advantage of not needing to reallocate as data
+/// is appended to it, but the disadvantage of requiring more error handling,
+/// which has some runtime overhead.
pub struct FileEncoder {
- // The input buffer. For adequate performance, we need more control over
- // buffering than `BufWriter` offers. If `BufWriter` ever offers a raw
- // buffer access API, we can use it, and remove `buf` and `buffered`.
+ /// The input buffer. For adequate performance, we need more control over
+ /// buffering than `BufWriter` offers. If `BufWriter` ever offers a raw
+ /// buffer access API, we can use it, and remove `buf` and `buffered`.
buf: Box<[MaybeUninit<u8>]>,
buffered: usize,
flushed: usize,
@@ -186,12 +186,12 @@ impl FileEncoder {
pub fn with_capacity<P: AsRef<Path>>(path: P, capacity: usize) -> io::Result<Self> {
// Require capacity at least as large as the largest LEB128 encoding
// here, so that we don't have to check or handle this on every write.
- assert!(capacity >= max_leb128_len());
+ assert!(capacity >= largest_max_leb128_len());
// Require capacity small enough such that some capacity checks can be
// done using guaranteed non-overflowing add rather than sub, which
// shaves an instruction off those code paths (on x86 at least).
- assert!(capacity <= usize::MAX - max_leb128_len());
+ assert!(capacity <= usize::MAX - largest_max_leb128_len());
// Create the file for reading and writing, because some encoders do both
// (e.g. the metadata encoder when -Zmeta-stats is enabled)
@@ -411,7 +411,7 @@ impl Drop for FileEncoder {
macro_rules! file_encoder_write_leb128 {
($enc:expr, $value:expr, $int_ty:ty, $fun:ident) => {{
- const MAX_ENCODED_LEN: usize = max_leb128_len!($int_ty);
+ const MAX_ENCODED_LEN: usize = $crate::leb128::max_leb128_len::<$int_ty>();
// We ensure this during `FileEncoder` construction.
debug_assert!($enc.capacity() >= MAX_ENCODED_LEN);
@@ -711,7 +711,7 @@ impl<'a> Decodable<MemDecoder<'a>> for Vec<u8> {
}
}
-// An integer that will always encode to 8 bytes.
+/// An integer that will always encode to 8 bytes.
pub struct IntEncodedWithFixedSize(pub u64);
impl IntEncodedWithFixedSize {
diff --git a/compiler/rustc_session/Cargo.toml b/compiler/rustc_session/Cargo.toml
index 6b1eaa4d3..cbbba2252 100644
--- a/compiler/rustc_session/Cargo.toml
+++ b/compiler/rustc_session/Cargo.toml
@@ -17,3 +17,11 @@ rustc_span = { path = "../rustc_span" }
rustc_fs_util = { path = "../rustc_fs_util" }
rustc_ast = { path = "../rustc_ast" }
rustc_lint_defs = { path = "../rustc_lint_defs" }
+smallvec = "1.8.1"
+termize = "0.1.1"
+
+[target.'cfg(unix)'.dependencies]
+libc = "0.2"
+
+[target.'cfg(windows)'.dependencies]
+winapi = { version = "0.3", features = ["libloaderapi"] }
diff --git a/compiler/rustc_session/src/cgu_reuse_tracker.rs b/compiler/rustc_session/src/cgu_reuse_tracker.rs
index 2336d9936..8703e5754 100644
--- a/compiler/rustc_session/src/cgu_reuse_tracker.rs
+++ b/compiler/rustc_session/src/cgu_reuse_tracker.rs
@@ -121,7 +121,7 @@ impl CguReuseTracker {
let at_least = if at_least { 1 } else { 0 };
IncorrectCguReuseType {
span: error_span.0,
- cgu_user_name: &cgu_user_name,
+ cgu_user_name,
actual_reuse,
expected_reuse,
at_least,
diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs
index f2ee52262..7a20100fd 100644
--- a/compiler/rustc_session/src/config.rs
+++ b/compiler/rustc_session/src/config.rs
@@ -10,8 +10,8 @@ use crate::{lint, HashStableContext};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
-use rustc_data_structures::stable_hasher::ToStableHashKey;
-use rustc_target::abi::{Align, TargetDataLayout};
+use rustc_data_structures::stable_hasher::{StableOrd, ToStableHashKey};
+use rustc_target::abi::Align;
use rustc_target::spec::{PanicStrategy, SanitizerSet, SplitDebuginfo};
use rustc_target::spec::{Target, TargetTriple, TargetWarnings, TARGETS};
@@ -288,6 +288,9 @@ pub enum OutputType {
DepInfo,
}
+// Safety: Trivial C-Style enums have a stable sort order across compilation sessions.
+unsafe impl StableOrd for OutputType {}
+
impl<HCX: HashStableContext> ToStableHashKey<HCX> for OutputType {
type KeyType = Self;
@@ -548,6 +551,7 @@ pub enum PrintRequest {
NativeStaticLibs,
StackProtectorStrategies,
LinkArgs,
+ SplitDebuginfo,
}
pub enum Input {
@@ -621,7 +625,7 @@ impl OutputFilenames {
/// should be placed on disk.
pub fn output_path(&self, flavor: OutputType) -> PathBuf {
let extension = flavor.extension();
- self.with_directory_and_extension(&self.out_directory, &extension)
+ self.with_directory_and_extension(&self.out_directory, extension)
}
/// Gets the path where a compilation artifact of the given type for the
@@ -658,7 +662,7 @@ impl OutputFilenames {
let temps_directory = self.temps_directory.as_ref().unwrap_or(&self.out_directory);
- self.with_directory_and_extension(&temps_directory, &extension)
+ self.with_directory_and_extension(temps_directory, &extension)
}
pub fn with_extension(&self, extension: &str) -> PathBuf {
@@ -738,7 +742,7 @@ impl Default for Options {
actually_rustdoc: false,
trimmed_def_paths: TrimmedDefPaths::default(),
cli_forced_codegen_units: None,
- cli_forced_thinlto_off: false,
+ cli_forced_local_thinlto_off: false,
remap_path_prefix: Vec::new(),
real_rust_source_base_dir: None,
edition: DEFAULT_EDITION,
@@ -794,6 +798,7 @@ impl UnstableOptions {
report_delayed_bugs: self.report_delayed_bugs,
macro_backtrace: self.macro_backtrace,
deduplicate_diagnostics: self.deduplicate_diagnostics,
+ track_diagnostics: self.track_diagnostics,
}
}
}
@@ -898,7 +903,7 @@ fn default_configuration(sess: &Session) -> CrateConfig {
let min_atomic_width = sess.target.min_atomic_width();
let max_atomic_width = sess.target.max_atomic_width();
let atomic_cas = sess.target.atomic_cas;
- let layout = TargetDataLayout::parse(&sess.target).unwrap_or_else(|err| {
+ let layout = sess.target.parse_data_layout().unwrap_or_else(|err| {
sess.emit_fatal(err);
});
@@ -1157,7 +1162,7 @@ impl CrateCheckConfig {
values_target_family
.extend(target.options.families.iter().map(|family| Symbol::intern(family)));
values_target_arch.insert(Symbol::intern(&target.arch));
- values_target_endian.insert(Symbol::intern(&target.options.endian.as_str()));
+ values_target_endian.insert(Symbol::intern(target.options.endian.as_str()));
values_target_env.insert(Symbol::intern(&target.options.env));
values_target_abi.insert(Symbol::intern(&target.options.abi));
values_target_vendor.insert(Symbol::intern(&target.options.vendor));
@@ -1478,7 +1483,7 @@ pub fn get_cmd_lint_options(
/// Parses the `--color` flag.
pub fn parse_color(matches: &getopts::Matches) -> ColorConfig {
- match matches.opt_str("color").as_ref().map(|s| &s[..]) {
+ match matches.opt_str("color").as_deref() {
Some("auto") => ColorConfig::Auto,
Some("always") => ColorConfig::Always,
Some("never") => ColorConfig::Never,
@@ -1587,7 +1592,7 @@ pub fn parse_error_format(
// is unstable, it will not be present. We have to use `opts_present` not
// `opt_present` because the latter will panic.
let error_format = if matches.opts_present(&["error-format".to_owned()]) {
- match matches.opt_str("error-format").as_ref().map(|s| &s[..]) {
+ match matches.opt_str("error-format").as_deref() {
None | Some("human") => {
ErrorOutputType::HumanReadable(HumanReadableErrorType::Default(color))
}
@@ -1720,7 +1725,7 @@ fn should_override_cgus_and_disable_thinlto(
error_format: ErrorOutputType,
mut codegen_units: Option<usize>,
) -> (bool, Option<usize>) {
- let mut disable_thinlto = false;
+ let mut disable_local_thinlto = false;
// Issue #30063: if user requests LLVM-related output to one
// particular path, disable codegen-units.
let incompatible: Vec<_> = output_types
@@ -1745,12 +1750,12 @@ fn should_override_cgus_and_disable_thinlto(
}
early_warn(error_format, "resetting to default -C codegen-units=1");
codegen_units = Some(1);
- disable_thinlto = true;
+ disable_local_thinlto = true;
}
}
_ => {
codegen_units = Some(1);
- disable_thinlto = true;
+ disable_local_thinlto = true;
}
}
}
@@ -1759,7 +1764,7 @@ fn should_override_cgus_and_disable_thinlto(
early_error(error_format, "value for codegen units must be a positive non-zero integer");
}
- (disable_thinlto, codegen_units)
+ (disable_local_thinlto, codegen_units)
}
fn check_thread_count(unstable_opts: &UnstableOptions, error_format: ErrorOutputType) {
@@ -1788,34 +1793,50 @@ fn collect_print_requests(
cg.target_feature = String::new();
}
- prints.extend(matches.opt_strs("print").into_iter().map(|s| match &*s {
- "crate-name" => PrintRequest::CrateName,
- "file-names" => PrintRequest::FileNames,
- "sysroot" => PrintRequest::Sysroot,
- "target-libdir" => PrintRequest::TargetLibdir,
- "cfg" => PrintRequest::Cfg,
- "calling-conventions" => PrintRequest::CallingConventions,
- "target-list" => PrintRequest::TargetList,
- "target-cpus" => PrintRequest::TargetCPUs,
- "target-features" => PrintRequest::TargetFeatures,
- "relocation-models" => PrintRequest::RelocationModels,
- "code-models" => PrintRequest::CodeModels,
- "tls-models" => PrintRequest::TlsModels,
- "native-static-libs" => PrintRequest::NativeStaticLibs,
- "stack-protector-strategies" => PrintRequest::StackProtectorStrategies,
- "target-spec-json" => {
- if unstable_opts.unstable_options {
- PrintRequest::TargetSpec
- } else {
+ const PRINT_REQUESTS: &[(&str, PrintRequest)] = &[
+ ("crate-name", PrintRequest::CrateName),
+ ("file-names", PrintRequest::FileNames),
+ ("sysroot", PrintRequest::Sysroot),
+ ("target-libdir", PrintRequest::TargetLibdir),
+ ("cfg", PrintRequest::Cfg),
+ ("calling-conventions", PrintRequest::CallingConventions),
+ ("target-list", PrintRequest::TargetList),
+ ("target-cpus", PrintRequest::TargetCPUs),
+ ("target-features", PrintRequest::TargetFeatures),
+ ("relocation-models", PrintRequest::RelocationModels),
+ ("code-models", PrintRequest::CodeModels),
+ ("tls-models", PrintRequest::TlsModels),
+ ("native-static-libs", PrintRequest::NativeStaticLibs),
+ ("stack-protector-strategies", PrintRequest::StackProtectorStrategies),
+ ("target-spec-json", PrintRequest::TargetSpec),
+ ("link-args", PrintRequest::LinkArgs),
+ ("split-debuginfo", PrintRequest::SplitDebuginfo),
+ ];
+
+ prints.extend(matches.opt_strs("print").into_iter().map(|req| {
+ match PRINT_REQUESTS.iter().find(|&&(name, _)| name == req) {
+ Some((_, PrintRequest::TargetSpec)) => {
+ if unstable_opts.unstable_options {
+ PrintRequest::TargetSpec
+ } else {
+ early_error(
+ error_format,
+ "the `-Z unstable-options` flag must also be passed to \
+ enable the target-spec-json print option",
+ );
+ }
+ }
+ Some(&(_, print_request)) => print_request,
+ None => {
+ let prints =
+ PRINT_REQUESTS.iter().map(|(name, _)| format!("`{name}`")).collect::<Vec<_>>();
+ let prints = prints.join(", ");
early_error(
error_format,
- "the `-Z unstable-options` flag must also be passed to \
- enable the target-spec-json print option",
+ &format!("unknown print request `{req}`. Valid print requests are: {prints}"),
);
}
}
- "link-args" => PrintRequest::LinkArgs,
- req => early_error(error_format, &format!("unknown print request `{req}`")),
}));
prints
@@ -1828,7 +1849,7 @@ pub fn parse_target_triple(
match matches.opt_str("target") {
Some(target) if target.ends_with(".json") => {
let path = Path::new(&target);
- TargetTriple::from_path(&path).unwrap_or_else(|_| {
+ TargetTriple::from_path(path).unwrap_or_else(|_| {
early_error(error_format, &format!("target file {path:?} does not exist"))
})
}
@@ -1974,7 +1995,7 @@ fn parse_native_lib_modifiers(
) -> (NativeLibKind, Option<bool>) {
let mut verbatim = None;
for modifier in modifiers.split(',') {
- let (modifier, value) = match modifier.strip_prefix(&['+', '-']) {
+ let (modifier, value) = match modifier.strip_prefix(['+', '-']) {
Some(m) => (m, modifier.starts_with('+')),
None => early_error(
error_format,
@@ -2011,10 +2032,7 @@ fn parse_native_lib_modifiers(
"linking modifier `bundle` is only compatible with `static` linking kind",
),
- ("verbatim", _) => {
- report_unstable_modifier();
- assign_modifier(&mut verbatim)
- }
+ ("verbatim", _) => assign_modifier(&mut verbatim),
("whole-archive", NativeLibKind::Static { whole_archive, .. }) => {
assign_modifier(whole_archive)
@@ -2249,7 +2267,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let output_types = parse_output_types(&unstable_opts, matches, error_format);
let mut cg = CodegenOptions::build(matches, error_format);
- let (disable_thinlto, mut codegen_units) = should_override_cgus_and_disable_thinlto(
+ let (disable_local_thinlto, mut codegen_units) = should_override_cgus_and_disable_thinlto(
&output_types,
matches,
error_format,
@@ -2406,7 +2424,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let mut search_paths = vec![];
for s in &matches.opt_strs("L") {
- search_paths.push(SearchPath::from_cli_opt(&s, error_format));
+ search_paths.push(SearchPath::from_cli_opt(s, error_format));
}
let libs = parse_libs(matches, error_format);
@@ -2431,7 +2449,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let sysroot = match &sysroot_opt {
Some(s) => s,
None => {
- tmp_buf = crate::filesearch::get_or_default_sysroot();
+ tmp_buf = crate::filesearch::get_or_default_sysroot().expect("Failed finding sysroot");
&tmp_buf
}
};
@@ -2492,7 +2510,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
actually_rustdoc: false,
trimmed_def_paths: TrimmedDefPaths::default(),
cli_forced_codegen_units: codegen_units,
- cli_forced_thinlto_off: disable_thinlto,
+ cli_forced_local_thinlto_off: disable_local_thinlto,
remap_path_prefix,
real_rust_source_base_dir,
edition,
diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs
index bf542faec..ee492f802 100644
--- a/compiler/rustc_session/src/errors.rs
+++ b/compiler/rustc_session/src/errors.rs
@@ -1,6 +1,9 @@
use std::num::NonZeroU32;
use crate::cgu_reuse_tracker::CguReuse;
+use crate::parse::ParseSess;
+use rustc_ast::token;
+use rustc_ast::util::literal::LitError;
use rustc_errors::MultiSpan;
use rustc_macros::Diagnostic;
use rustc_span::{Span, Symbol};
@@ -126,10 +129,10 @@ pub struct FileIsNotWriteable<'a> {
#[derive(Diagnostic)]
#[diag(session_crate_name_does_not_match)]
-pub struct CrateNameDoesNotMatch<'a> {
+pub struct CrateNameDoesNotMatch {
#[primary_span]
pub span: Span,
- pub s: &'a str,
+ pub s: Symbol,
pub name: Symbol,
}
@@ -148,11 +151,11 @@ pub struct CrateNameEmpty {
#[derive(Diagnostic)]
#[diag(session_invalid_character_in_create_name)]
-pub struct InvalidCharacterInCrateName<'a> {
+pub struct InvalidCharacterInCrateName {
#[primary_span]
pub span: Option<Span>,
pub character: char,
- pub crate_name: &'a str,
+ pub crate_name: Symbol,
}
#[derive(Subdiagnostic)]
@@ -191,3 +194,158 @@ pub enum UnleashedFeatureHelp {
span: Span,
},
}
+
+#[derive(Diagnostic)]
+#[diag(session_invalid_literal_suffix)]
+pub(crate) struct InvalidLiteralSuffix<'a> {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ // FIXME(#100717)
+ pub kind: &'a str,
+ pub suffix: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_invalid_int_literal_width)]
+#[help]
+pub(crate) struct InvalidIntLiteralWidth {
+ #[primary_span]
+ pub span: Span,
+ pub width: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_invalid_num_literal_base_prefix)]
+#[note]
+pub(crate) struct InvalidNumLiteralBasePrefix {
+ #[primary_span]
+ #[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
+ pub span: Span,
+ pub fixed: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_invalid_num_literal_suffix)]
+#[help]
+pub(crate) struct InvalidNumLiteralSuffix {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub suffix: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_invalid_float_literal_width)]
+#[help]
+pub(crate) struct InvalidFloatLiteralWidth {
+ #[primary_span]
+ pub span: Span,
+ pub width: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_invalid_float_literal_suffix)]
+#[help]
+pub(crate) struct InvalidFloatLiteralSuffix {
+ #[primary_span]
+ #[label]
+ pub span: Span,
+ pub suffix: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_int_literal_too_large)]
+pub(crate) struct IntLiteralTooLarge {
+ #[primary_span]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_hexadecimal_float_literal_not_supported)]
+pub(crate) struct HexadecimalFloatLiteralNotSupported {
+ #[primary_span]
+ #[label(session_not_supported)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_octal_float_literal_not_supported)]
+pub(crate) struct OctalFloatLiteralNotSupported {
+ #[primary_span]
+ #[label(session_not_supported)]
+ pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(session_binary_float_literal_not_supported)]
+pub(crate) struct BinaryFloatLiteralNotSupported {
+ #[primary_span]
+ #[label(session_not_supported)]
+ pub span: Span,
+}
+
+pub fn report_lit_error(sess: &ParseSess, err: LitError, lit: token::Lit, span: Span) {
+ // Checks if `s` looks like i32 or u1234 etc.
+ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
+ s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
+ }
+
+ // Try to lowercase the prefix if it's a valid base prefix.
+ fn fix_base_capitalisation(s: &str) -> Option<String> {
+ if let Some(stripped) = s.strip_prefix('B') {
+ Some(format!("0b{stripped}"))
+ } else if let Some(stripped) = s.strip_prefix('O') {
+ Some(format!("0o{stripped}"))
+ } else if let Some(stripped) = s.strip_prefix('X') {
+ Some(format!("0x{stripped}"))
+ } else {
+ None
+ }
+ }
+
+ let token::Lit { kind, suffix, .. } = lit;
+ match err {
+ // `LexerError` is an error, but it was already reported
+ // by lexer, so here we don't report it the second time.
+ LitError::LexerError => {}
+ LitError::InvalidSuffix => {
+ if let Some(suffix) = suffix {
+ sess.emit_err(InvalidLiteralSuffix { span, kind: kind.descr(), suffix });
+ }
+ }
+ LitError::InvalidIntSuffix => {
+ let suf = suffix.expect("suffix error with no suffix");
+ let suf = suf.as_str();
+ if looks_like_width_suffix(&['i', 'u'], suf) {
+ // If it looks like a width, try to be helpful.
+ sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
+ } else if let Some(fixed) = fix_base_capitalisation(suf) {
+ sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
+ } else {
+ sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
+ }
+ }
+ LitError::InvalidFloatSuffix => {
+ let suf = suffix.expect("suffix error with no suffix");
+ let suf = suf.as_str();
+ if looks_like_width_suffix(&['f'], suf) {
+ // If it looks like a width, try to be helpful.
+ sess.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
+ } else {
+ sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
+ }
+ }
+ LitError::NonDecimalFloat(base) => {
+ match base {
+ 16 => sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
+ 8 => sess.emit_err(OctalFloatLiteralNotSupported { span }),
+ 2 => sess.emit_err(BinaryFloatLiteralNotSupported { span }),
+ _ => unreachable!(),
+ };
+ }
+ LitError::IntTooLarge => {
+ sess.emit_err(IntLiteralTooLarge { span });
+ }
+ }
+}
diff --git a/compiler/rustc_session/src/filesearch.rs b/compiler/rustc_session/src/filesearch.rs
index e8edb38f5..1b66773be 100644
--- a/compiler/rustc_session/src/filesearch.rs
+++ b/compiler/rustc_session/src/filesearch.rs
@@ -1,5 +1,6 @@
//! A module for searching for libraries
+use smallvec::{smallvec, SmallVec};
use std::env;
use std::fs;
use std::iter::FromIterator;
@@ -62,9 +63,99 @@ pub fn make_target_lib_path(sysroot: &Path, target_triple: &str) -> PathBuf {
PathBuf::from_iter([sysroot, Path::new(&rustlib_path), Path::new("lib")])
}
+#[cfg(unix)]
+fn current_dll_path() -> Result<PathBuf, String> {
+ use std::ffi::{CStr, OsStr};
+ use std::os::unix::prelude::*;
+
+ unsafe {
+ let addr = current_dll_path as usize as *mut _;
+ let mut info = std::mem::zeroed();
+ if libc::dladdr(addr, &mut info) == 0 {
+ return Err("dladdr failed".into());
+ }
+ if info.dli_fname.is_null() {
+ return Err("dladdr returned null pointer".into());
+ }
+ let bytes = CStr::from_ptr(info.dli_fname).to_bytes();
+ let os = OsStr::from_bytes(bytes);
+ Ok(PathBuf::from(os))
+ }
+}
+
+#[cfg(windows)]
+fn current_dll_path() -> Result<PathBuf, String> {
+ use std::ffi::OsString;
+ use std::io;
+ use std::os::windows::prelude::*;
+ use std::ptr;
+
+ use winapi::um::libloaderapi::{
+ GetModuleFileNameW, GetModuleHandleExW, GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS,
+ };
+
+ unsafe {
+ let mut module = ptr::null_mut();
+ let r = GetModuleHandleExW(
+ GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS,
+ current_dll_path as usize as *mut _,
+ &mut module,
+ );
+ if r == 0 {
+ return Err(format!("GetModuleHandleExW failed: {}", io::Error::last_os_error()));
+ }
+ let mut space = Vec::with_capacity(1024);
+ let r = GetModuleFileNameW(module, space.as_mut_ptr(), space.capacity() as u32);
+ if r == 0 {
+ return Err(format!("GetModuleFileNameW failed: {}", io::Error::last_os_error()));
+ }
+ let r = r as usize;
+ if r >= space.capacity() {
+ return Err(format!("our buffer was too small? {}", io::Error::last_os_error()));
+ }
+ space.set_len(r);
+ let os = OsString::from_wide(&space);
+ Ok(PathBuf::from(os))
+ }
+}
+
+pub fn sysroot_candidates() -> SmallVec<[PathBuf; 2]> {
+ let target = crate::config::host_triple();
+ let mut sysroot_candidates: SmallVec<[PathBuf; 2]> =
+ smallvec![get_or_default_sysroot().expect("Failed finding sysroot")];
+ let path = current_dll_path().and_then(|s| Ok(s.canonicalize().map_err(|e| e.to_string())?));
+ if let Ok(dll) = path {
+ // use `parent` twice to chop off the file name and then also the
+ // directory containing the dll which should be either `lib` or `bin`.
+ if let Some(path) = dll.parent().and_then(|p| p.parent()) {
+ // The original `path` pointed at the `rustc_driver` crate's dll.
+ // Now that dll should only be in one of two locations. The first is
+ // in the compiler's libdir, for example `$sysroot/lib/*.dll`. The
+ // other is the target's libdir, for example
+ // `$sysroot/lib/rustlib/$target/lib/*.dll`.
+ //
+ // We don't know which, so let's assume that if our `path` above
+ // ends in `$target` we *could* be in the target libdir, and always
+ // assume that we may be in the main libdir.
+ sysroot_candidates.push(path.to_owned());
+
+ if path.ends_with(target) {
+ sysroot_candidates.extend(
+ path.parent() // chop off `$target`
+ .and_then(|p| p.parent()) // chop off `rustlib`
+ .and_then(|p| p.parent()) // chop off `lib`
+ .map(|s| s.to_owned()),
+ );
+ }
+ }
+ }
+
+ return sysroot_candidates;
+}
+
/// This function checks if sysroot is found using env::args().next(), and if it
-/// is not found, uses env::current_exe() to imply sysroot.
-pub fn get_or_default_sysroot() -> PathBuf {
+/// is not found, finds sysroot from current rustc_driver dll.
+pub fn get_or_default_sysroot() -> Result<PathBuf, String> {
// Follow symlinks. If the resolved path is relative, make it absolute.
fn canonicalize(path: PathBuf) -> PathBuf {
let path = fs::canonicalize(&path).unwrap_or(path);
@@ -74,17 +165,32 @@ pub fn get_or_default_sysroot() -> PathBuf {
fix_windows_verbatim_for_gcc(&path)
}
- // Use env::current_exe() to get the path of the executable following
- // symlinks/canonicalizing components.
- fn from_current_exe() -> PathBuf {
- match env::current_exe() {
- Ok(exe) => {
- let mut p = canonicalize(exe);
- p.pop();
- p.pop();
- p
- }
- Err(e) => panic!("failed to get current_exe: {e}"),
+ fn default_from_rustc_driver_dll() -> Result<PathBuf, String> {
+ let dll = current_dll_path().and_then(|s| Ok(canonicalize(s)))?;
+
+ // `dll` will be in one of the following two:
+ // - compiler's libdir: $sysroot/lib/*.dll
+ // - target's libdir: $sysroot/lib/rustlib/$target/lib/*.dll
+ //
+ // use `parent` twice to chop off the file name and then also the
+ // directory containing the dll
+ let dir = dll.parent().and_then(|p| p.parent()).ok_or(format!(
+ "Could not move 2 levels upper using `parent()` on {}",
+ dll.display()
+ ))?;
+
+ // if `dir` points target's dir, move up to the sysroot
+ if dir.ends_with(crate::config::host_triple()) {
+ dir.parent() // chop off `$target`
+ .and_then(|p| p.parent()) // chop off `rustlib`
+ .and_then(|p| p.parent()) // chop off `lib`
+ .map(|s| s.to_owned())
+ .ok_or(format!(
+ "Could not move 3 levels upper using `parent()` on {}",
+ dir.display()
+ ))
+ } else {
+ Ok(dir.to_owned())
}
}
@@ -118,7 +224,5 @@ pub fn get_or_default_sysroot() -> PathBuf {
}
}
- // Check if sysroot is found using env::args().next(), and if is not found,
- // use env::current_exe() to imply sysroot.
- from_env_args_next().unwrap_or_else(from_current_exe)
+ Ok(from_env_args_next().unwrap_or(default_from_rustc_driver_dll()?))
}
diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs
index 3f234a47a..8e9198b79 100644
--- a/compiler/rustc_session/src/options.rs
+++ b/compiler/rustc_session/src/options.rs
@@ -181,7 +181,7 @@ top_level_options!(
#[rustc_lint_opt_deny_field_access("use `Session::codegen_units` instead of this field")]
cli_forced_codegen_units: Option<usize> [UNTRACKED],
#[rustc_lint_opt_deny_field_access("use `Session::lto` instead of this field")]
- cli_forced_thinlto_off: bool [UNTRACKED],
+ cli_forced_local_thinlto_off: bool [UNTRACKED],
/// Remap source path prefixes in all output (messages, object files, debug, etc.).
remap_path_prefix: Vec<(PathBuf, PathBuf)> [TRACKED_NO_CRATE_HASH],
@@ -1382,6 +1382,9 @@ options! {
"list the symbols defined by a library crate (default: no)"),
macro_backtrace: bool = (false, parse_bool, [UNTRACKED],
"show macro backtraces (default: no)"),
+ maximal_hir_to_mir_coverage: bool = (false, parse_bool, [TRACKED],
+ "save as much information as possible about the correspondence between MIR and HIR \
+ as source scopes (default: no)"),
merge_functions: Option<MergeFunctions> = (None, parse_merge_functions, [TRACKED],
"control the operation of the MergeFunctions LLVM pass, taking \
the same values as the target option of the same name"),
@@ -1414,8 +1417,6 @@ options! {
"run all passes except codegen; no output"),
no_generate_arange_section: bool = (false, parse_no_flag, [TRACKED],
"omit DWARF address ranges that give faster lookups"),
- no_interleave_lints: bool = (false, parse_no_flag, [UNTRACKED],
- "execute lints separately; allows benchmarking individual lints"),
no_leak_check: bool = (false, parse_no_flag, [UNTRACKED],
"disable the 'leak check' for subtyping; unsound, but useful for tests"),
no_link: bool = (false, parse_no_flag, [TRACKED],
@@ -1587,6 +1588,8 @@ options! {
"choose the TLS model to use (`rustc --print tls-models` for details)"),
trace_macros: bool = (false, parse_bool, [UNTRACKED],
"for every macro invocation, print its name and arguments (default: no)"),
+ track_diagnostics: bool = (false, parse_bool, [UNTRACKED],
+ "tracks where in rustc a diagnostic was emitted"),
// Diagnostics are considered side-effects of a query (see `QuerySideEffects`) and are saved
// alongside query results and changes to translation options can affect diagnostics - so
// translation options should be tracked.
diff --git a/compiler/rustc_session/src/output.rs b/compiler/rustc_session/src/output.rs
index 2511bee46..8ee3057de 100644
--- a/compiler/rustc_session/src/output.rs
+++ b/compiler/rustc_session/src/output.rs
@@ -7,14 +7,14 @@ use crate::errors::{
use crate::Session;
use rustc_ast as ast;
use rustc_span::symbol::sym;
-use rustc_span::Span;
+use rustc_span::{Span, Symbol};
use std::path::{Path, PathBuf};
pub fn out_filename(
sess: &Session,
crate_type: CrateType,
outputs: &OutputFilenames,
- crate_name: &str,
+ crate_name: Symbol,
) -> PathBuf {
let default_filename = filename_for_input(sess, crate_type, crate_name, outputs);
let out_filename = outputs
@@ -45,9 +45,9 @@ fn is_writeable(p: &Path) -> bool {
}
}
-pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input) -> String {
- let validate = |s: String, span: Option<Span>| {
- validate_crate_name(sess, &s, span);
+pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input) -> Symbol {
+ let validate = |s: Symbol, span: Option<Span>| {
+ validate_crate_name(sess, s, span);
s
};
@@ -59,38 +59,39 @@ pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input)
sess.find_by_name(attrs, sym::crate_name).and_then(|at| at.value_str().map(|s| (at, s)));
if let Some(ref s) = sess.opts.crate_name {
+ let s = Symbol::intern(s);
if let Some((attr, name)) = attr_crate_name {
- if name.as_str() != s {
+ if name != s {
sess.emit_err(CrateNameDoesNotMatch { span: attr.span, s, name });
}
}
- return validate(s.clone(), None);
+ return validate(s, None);
}
if let Some((attr, s)) = attr_crate_name {
- return validate(s.to_string(), Some(attr.span));
+ return validate(s, Some(attr.span));
}
if let Input::File(ref path) = *input {
if let Some(s) = path.file_stem().and_then(|s| s.to_str()) {
if s.starts_with('-') {
sess.emit_err(CrateNameInvalid { s });
} else {
- return validate(s.replace('-', "_"), None);
+ return validate(Symbol::intern(&s.replace('-', "_")), None);
}
}
}
- "rust_out".to_string()
+ Symbol::intern("rust_out")
}
-pub fn validate_crate_name(sess: &Session, s: &str, sp: Option<Span>) {
+pub fn validate_crate_name(sess: &Session, s: Symbol, sp: Option<Span>) {
let mut err_count = 0;
{
if s.is_empty() {
err_count += 1;
sess.emit_err(CrateNameEmpty { span: sp });
}
- for c in s.chars() {
+ for c in s.as_str().chars() {
if c.is_alphanumeric() {
continue;
}
@@ -109,7 +110,7 @@ pub fn validate_crate_name(sess: &Session, s: &str, sp: Option<Span>) {
pub fn filename_for_metadata(
sess: &Session,
- crate_name: &str,
+ crate_name: Symbol,
outputs: &OutputFilenames,
) -> PathBuf {
// If the command-line specified the path, use that directly.
@@ -132,7 +133,7 @@ pub fn filename_for_metadata(
pub fn filename_for_input(
sess: &Session,
crate_type: CrateType,
- crate_name: &str,
+ crate_name: Symbol,
outputs: &OutputFilenames,
) -> PathBuf {
let libname = format!("{}{}", crate_name, sess.opts.cg.extra_filename);
diff --git a/compiler/rustc_session/src/parse.rs b/compiler/rustc_session/src/parse.rs
index a199947eb..f9f4f2979 100644
--- a/compiler/rustc_session/src/parse.rs
+++ b/compiler/rustc_session/src/parse.rs
@@ -97,6 +97,7 @@ pub fn feature_err<'a>(
///
/// This variant allows you to control whether it is a library or language feature.
/// Almost always, you want to use this for a language feature. If so, prefer `feature_err`.
+#[track_caller]
pub fn feature_err_issue<'a>(
sess: &'a ParseSess,
feature: Symbol,
@@ -332,6 +333,7 @@ impl ParseSess {
self.proc_macro_quoted_spans.lock().clone()
}
+ #[track_caller]
pub fn create_err<'a>(
&'a self,
err: impl IntoDiagnostic<'a>,
@@ -339,10 +341,12 @@ impl ParseSess {
err.into_diagnostic(&self.span_diagnostic)
}
+ #[track_caller]
pub fn emit_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> ErrorGuaranteed {
self.create_err(err).emit()
}
+ #[track_caller]
pub fn create_warning<'a>(
&'a self,
warning: impl IntoDiagnostic<'a, ()>,
@@ -350,6 +354,7 @@ impl ParseSess {
warning.into_diagnostic(&self.span_diagnostic)
}
+ #[track_caller]
pub fn emit_warning<'a>(&'a self, warning: impl IntoDiagnostic<'a, ()>) {
self.create_warning(warning).emit()
}
@@ -377,6 +382,7 @@ impl ParseSess {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_err(
&self,
msg: impl Into<DiagnosticMessage>,
diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs
index 100c66f63..4c049a8d6 100644
--- a/compiler/rustc_session/src/session.rs
+++ b/compiler/rustc_session/src/session.rs
@@ -286,6 +286,7 @@ impl Session {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_warn<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -294,6 +295,7 @@ impl Session {
self.diagnostic().struct_span_warn(sp, msg)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_warn_with_expectation<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -303,6 +305,7 @@ impl Session {
self.diagnostic().struct_span_warn_with_expectation(sp, msg, id)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_warn_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -312,10 +315,12 @@ impl Session {
self.diagnostic().struct_span_warn_with_code(sp, msg, code)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_warn(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
self.diagnostic().struct_warn(msg)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_warn_with_expectation(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -324,6 +329,7 @@ impl Session {
self.diagnostic().struct_warn_with_expectation(msg, id)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_allow<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -332,10 +338,12 @@ impl Session {
self.diagnostic().struct_span_allow(sp, msg)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_allow(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
self.diagnostic().struct_allow(msg)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_expect(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -344,6 +352,7 @@ impl Session {
self.diagnostic().struct_expect(msg, id)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_err<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -352,6 +361,7 @@ impl Session {
self.diagnostic().struct_span_err(sp, msg)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_err_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -362,12 +372,14 @@ impl Session {
}
// FIXME: This method should be removed (every error should have an associated error code).
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_err(
&self,
msg: impl Into<DiagnosticMessage>,
) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
self.parse_sess.struct_err(msg)
}
+ #[track_caller]
#[rustc_lint_diagnostics]
pub fn struct_err_with_code(
&self,
@@ -377,6 +389,7 @@ impl Session {
self.diagnostic().struct_err_with_code(msg, code)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_warn_with_code(
&self,
msg: impl Into<DiagnosticMessage>,
@@ -385,6 +398,7 @@ impl Session {
self.diagnostic().struct_warn_with_code(msg, code)
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn struct_span_fatal<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -407,6 +421,7 @@ impl Session {
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: impl Into<DiagnosticMessage>) -> ! {
self.diagnostic().span_fatal(sp, msg)
}
@@ -424,6 +439,7 @@ impl Session {
self.diagnostic().fatal(msg).raise()
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_err_or_warn<S: Into<MultiSpan>>(
&self,
is_warning: bool,
@@ -437,6 +453,7 @@ impl Session {
}
}
#[rustc_lint_diagnostics]
+ #[track_caller]
pub fn span_err<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -457,12 +474,14 @@ impl Session {
pub fn err(&self, msg: impl Into<DiagnosticMessage>) -> ErrorGuaranteed {
self.diagnostic().err(msg)
}
+ #[track_caller]
pub fn create_err<'a>(
&'a self,
err: impl IntoDiagnostic<'a>,
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
self.parse_sess.create_err(err)
}
+ #[track_caller]
pub fn create_feature_err<'a>(
&'a self,
err: impl IntoDiagnostic<'a>,
@@ -475,33 +494,40 @@ impl Session {
add_feature_diagnostics(&mut err, &self.parse_sess, feature);
err
}
+ #[track_caller]
pub fn emit_err<'a>(&'a self, err: impl IntoDiagnostic<'a>) -> ErrorGuaranteed {
self.parse_sess.emit_err(err)
}
+ #[track_caller]
pub fn create_warning<'a>(
&'a self,
err: impl IntoDiagnostic<'a, ()>,
) -> DiagnosticBuilder<'a, ()> {
self.parse_sess.create_warning(err)
}
+ #[track_caller]
pub fn emit_warning<'a>(&'a self, warning: impl IntoDiagnostic<'a, ()>) {
self.parse_sess.emit_warning(warning)
}
+ #[track_caller]
pub fn create_note<'a>(
&'a self,
note: impl IntoDiagnostic<'a, Noted>,
) -> DiagnosticBuilder<'a, Noted> {
self.parse_sess.create_note(note)
}
+ #[track_caller]
pub fn emit_note<'a>(&'a self, note: impl IntoDiagnostic<'a, Noted>) -> Noted {
self.parse_sess.emit_note(note)
}
+ #[track_caller]
pub fn create_fatal<'a>(
&'a self,
fatal: impl IntoDiagnostic<'a, !>,
) -> DiagnosticBuilder<'a, !> {
self.parse_sess.create_fatal(fatal)
}
+ #[track_caller]
pub fn emit_fatal<'a>(&'a self, fatal: impl IntoDiagnostic<'a, !>) -> ! {
self.parse_sess.emit_fatal(fatal)
}
@@ -512,9 +538,12 @@ impl Session {
pub fn has_errors(&self) -> Option<ErrorGuaranteed> {
self.diagnostic().has_errors()
}
- pub fn has_errors_or_delayed_span_bugs(&self) -> bool {
+ pub fn has_errors_or_delayed_span_bugs(&self) -> Option<ErrorGuaranteed> {
self.diagnostic().has_errors_or_delayed_span_bugs()
}
+ pub fn is_compilation_going_to_fail(&self) -> Option<ErrorGuaranteed> {
+ self.diagnostic().is_compilation_going_to_fail()
+ }
pub fn abort_if_errors(&self) {
self.diagnostic().abort_if_errors();
}
@@ -536,11 +565,15 @@ impl Session {
if self.err_count() == old_count {
Ok(result)
} else {
- Err(ErrorGuaranteed::unchecked_claim_error_was_emitted())
+ Err(self.delay_span_bug(
+ rustc_span::DUMMY_SP,
+ "`self.err_count()` changed but an error was not emitted",
+ ))
}
}
#[allow(rustc::untranslatable_diagnostic)]
#[allow(rustc::diagnostic_outside_of_impl)]
+ #[track_caller]
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: impl Into<DiagnosticMessage>) {
self.diagnostic().span_warn(sp, msg)
}
@@ -587,6 +620,8 @@ impl Session {
pub fn note_without_error(&self, msg: impl Into<DiagnosticMessage>) {
self.diagnostic().note_without_error(msg)
}
+
+ #[track_caller]
pub fn span_note_without_error<S: Into<MultiSpan>>(
&self,
sp: S,
@@ -917,6 +952,17 @@ impl Session {
) -> Option<Symbol> {
attrs.iter().find(|at| at.has_name(name)).and_then(|at| at.value_str())
}
+
+ pub fn diagnostic_width(&self) -> usize {
+ let default_column_width = 140;
+ if let Some(width) = self.opts.diagnostic_width {
+ width
+ } else if self.opts.unstable_opts.ui_testing {
+ default_column_width
+ } else {
+ termize::dimensions().map_or(default_column_width, |(w, _)| w)
+ }
+ }
}
// JUSTIFICATION: defn of the suggested wrapper fns
@@ -989,11 +1035,8 @@ impl Session {
return config::Lto::Fat;
}
config::LtoCli::Thin => {
- return if self.opts.cli_forced_thinlto_off {
- config::Lto::Fat
- } else {
- config::Lto::Thin
- };
+ // The user explicitly asked for ThinLTO
+ return config::Lto::Thin;
}
}
@@ -1005,7 +1048,7 @@ impl Session {
// If processing command line options determined that we're incompatible
// with ThinLTO (e.g., `-C lto --emit llvm-ir`) then return that option.
- if self.opts.cli_forced_thinlto_off {
+ if self.opts.cli_forced_local_thinlto_off {
return config::Lto::No;
}
@@ -1213,6 +1256,7 @@ fn default_emitter(
fallback_bundle: LazyFallbackBundle,
) -> Box<dyn Emitter + sync::Send> {
let macro_backtrace = sopts.unstable_opts.macro_backtrace;
+ let track_diagnostics = sopts.unstable_opts.track_diagnostics;
match sopts.error_format {
config::ErrorOutputType::HumanReadable(kind) => {
let (short, color_config) = kind.unzip();
@@ -1236,6 +1280,7 @@ fn default_emitter(
sopts.unstable_opts.teach,
sopts.diagnostic_width,
macro_backtrace,
+ track_diagnostics,
);
Box::new(emitter.ui_testing(sopts.unstable_opts.ui_testing))
}
@@ -1250,6 +1295,7 @@ fn default_emitter(
json_rendered,
sopts.diagnostic_width,
macro_backtrace,
+ track_diagnostics,
)
.ui_testing(sopts.unstable_opts.ui_testing),
),
@@ -1280,7 +1326,7 @@ pub fn build_session(
let sysroot = match &sopts.maybe_sysroot {
Some(sysroot) => sysroot.clone(),
- None => filesearch::get_or_default_sysroot(),
+ None => filesearch::get_or_default_sysroot().expect("Failed finding sysroot"),
};
let target_cfg = config::build_target_config(&sopts, target_override, &sysroot);
@@ -1325,7 +1371,7 @@ pub fn build_session(
let profiler = SelfProfiler::new(
directory,
sopts.crate_name.as_deref(),
- sopts.unstable_opts.self_profile_events.as_ref().map(|xs| &xs[..]),
+ sopts.unstable_opts.self_profile_events.as_deref(),
&sopts.unstable_opts.self_profile_counter,
);
match profiler {
@@ -1359,7 +1405,7 @@ pub fn build_session(
local_crate_source_file.map(|path| file_path_mapping.map_prefix(path).0);
let optimization_fuel = Lock::new(OptimizationFuel {
- remaining: sopts.unstable_opts.fuel.as_ref().map_or(0, |i| i.1),
+ remaining: sopts.unstable_opts.fuel.as_ref().map_or(0, |&(_, i)| i),
out_of_fuel: false,
});
let print_fuel = AtomicU64::new(0);
@@ -1552,11 +1598,18 @@ fn early_error_handler(output: config::ErrorOutputType) -> rustc_errors::Handler
false,
None,
false,
+ false,
))
}
- config::ErrorOutputType::Json { pretty, json_rendered } => {
- Box::new(JsonEmitter::basic(pretty, json_rendered, None, fallback_bundle, None, false))
- }
+ config::ErrorOutputType::Json { pretty, json_rendered } => Box::new(JsonEmitter::basic(
+ pretty,
+ json_rendered,
+ None,
+ fallback_bundle,
+ None,
+ false,
+ false,
+ )),
};
rustc_errors::Handler::with_emitter(true, None, emitter)
}
diff --git a/compiler/rustc_span/src/analyze_source_file.rs b/compiler/rustc_span/src/analyze_source_file.rs
index 5987fb2a1..d3c2c5113 100644
--- a/compiler/rustc_span/src/analyze_source_file.rs
+++ b/compiler/rustc_span/src/analyze_source_file.rs
@@ -41,7 +41,7 @@ pub fn analyze_source_file(
}
cfg_if::cfg_if! {
- if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64")))] {
+ if #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] {
fn analyze_source_file_dispatch(src: &str,
source_file_start_pos: BytePos,
lines: &mut Vec<BytePos>,
@@ -247,7 +247,7 @@ fn analyze_source_file_generic(
// The slow path:
// This is either ASCII control character "DEL" or the beginning of
// a multibyte char. Just decode to `char`.
- let c = (&src[i..]).chars().next().unwrap();
+ let c = src[i..].chars().next().unwrap();
char_len = c.len_utf8();
let pos = BytePos::from_usize(i) + output_offset;
diff --git a/compiler/rustc_span/src/caching_source_map_view.rs b/compiler/rustc_span/src/caching_source_map_view.rs
index fdabf404a..886112769 100644
--- a/compiler/rustc_span/src/caching_source_map_view.rs
+++ b/compiler/rustc_span/src/caching_source_map_view.rs
@@ -165,7 +165,7 @@ impl<'sm> CachingSourceMapView<'sm> {
Some(new_file_and_idx)
} else {
let file = &self.line_cache[oldest].file;
- if !file_contains(&file, span_data.hi) {
+ if !file_contains(file, span_data.hi) {
return None;
}
diff --git a/compiler/rustc_span/src/def_id.rs b/compiler/rustc_span/src/def_id.rs
index bbeabdb55..e62ce2c26 100644
--- a/compiler/rustc_span/src/def_id.rs
+++ b/compiler/rustc_span/src/def_id.rs
@@ -1,4 +1,4 @@
-use crate::HashStableContext;
+use crate::{HashStableContext, Symbol};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
use rustc_data_structures::AtomicRef;
@@ -34,7 +34,7 @@ impl CrateNum {
impl fmt::Display for CrateNum {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Display::fmt(&self.private, f)
+ fmt::Display::fmt(&self.as_u32(), f)
}
}
@@ -149,9 +149,11 @@ impl StableCrateId {
/// Computes the stable ID for a crate with the given name and
/// `-Cmetadata` arguments.
- pub fn new(crate_name: &str, is_exe: bool, mut metadata: Vec<String>) -> StableCrateId {
+ pub fn new(crate_name: Symbol, is_exe: bool, mut metadata: Vec<String>) -> StableCrateId {
let mut hasher = StableHasher::new();
- crate_name.hash(&mut hasher);
+ // We must hash the string text of the crate name, not the id, as the id is not stable
+ // across builds.
+ crate_name.as_str().hash(&mut hasher);
// We don't want the stable crate ID to depend on the order of
// -C metadata arguments, so sort them:
@@ -274,7 +276,7 @@ impl Ord for DefId {
impl PartialOrd for DefId {
#[inline]
fn partial_cmp(&self, other: &DefId) -> Option<std::cmp::Ordering> {
- Some(Ord::cmp(self, other))
+ Some(self.cmp(other))
}
}
diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs
index 191186af6..038699154 100644
--- a/compiler/rustc_span/src/hygiene.rs
+++ b/compiler/rustc_span/src/hygiene.rs
@@ -76,7 +76,7 @@ pub struct ExpnId {
impl fmt::Debug for ExpnId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Generate crate_::{{expn_}}.
- write!(f, "{:?}::{{{{expn{}}}}}", self.krate, self.local_id.private)
+ write!(f, "{:?}::{{{{expn{}}}}}", self.krate, self.local_id.as_u32())
}
}
@@ -381,7 +381,7 @@ impl HygieneData {
}
pub fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
- with_session_globals(|session_globals| f(&mut *session_globals.hygiene_data.borrow_mut()))
+ with_session_globals(|session_globals| f(&mut session_globals.hygiene_data.borrow_mut()))
}
#[inline]
diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs
index 322c7104b..cef4c6f79 100644
--- a/compiler/rustc_span/src/lib.rs
+++ b/compiler/rustc_span/src/lib.rs
@@ -78,10 +78,10 @@ use sha2::Sha256;
#[cfg(test)]
mod tests;
-// Per-session global variables: this struct is stored in thread-local storage
-// in such a way that it is accessible without any kind of handle to all
-// threads within the compilation session, but is not accessible outside the
-// session.
+/// Per-session global variables: this struct is stored in thread-local storage
+/// in such a way that it is accessible without any kind of handle to all
+/// threads within the compilation session, but is not accessible outside the
+/// session.
pub struct SessionGlobals {
symbol_interner: symbol::Interner,
span_interner: Lock<span_encoding::SpanInterner>,
@@ -217,9 +217,7 @@ impl RealFileName {
pub fn local_path(&self) -> Option<&Path> {
match self {
RealFileName::LocalPath(p) => Some(p),
- RealFileName::Remapped { local_path: p, virtual_name: _ } => {
- p.as_ref().map(PathBuf::as_path)
- }
+ RealFileName::Remapped { local_path, virtual_name: _ } => local_path.as_deref(),
}
}
@@ -240,7 +238,7 @@ impl RealFileName {
pub fn remapped_path_if_available(&self) -> &Path {
match self {
RealFileName::LocalPath(p)
- | RealFileName::Remapped { local_path: _, virtual_name: p } => &p,
+ | RealFileName::Remapped { local_path: _, virtual_name: p } => p,
}
}
@@ -361,8 +359,8 @@ impl FileName {
FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped }
}
- // This may include transient local filesystem information.
- // Must not be embedded in build outputs.
+ /// This may include transient local filesystem information.
+ /// Must not be embedded in build outputs.
pub fn prefer_local(&self) -> FileNameDisplay<'_> {
FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local }
}
@@ -753,7 +751,7 @@ impl Span {
/// Checks if a span is "internal" to a macro in which `unsafe`
/// can be used without triggering the `unsafe_code` lint.
- // (that is, a macro marked with `#[allow_internal_unsafe]`).
+ /// (that is, a macro marked with `#[allow_internal_unsafe]`).
pub fn allows_unsafe(self) -> bool {
self.ctxt().outer_expn_data().allow_internal_unsafe
}
diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs
index f9566eeee..2ae57d9e5 100644
--- a/compiler/rustc_span/src/source_map.rs
+++ b/compiler/rustc_span/src/source_map.rs
@@ -130,14 +130,14 @@ impl FileLoader for RealFileLoader {
/// different has no real downsides.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable, Debug)]
pub struct StableSourceFileId {
- // A hash of the source file's FileName. This is hash so that it's size
- // is more predictable than if we included the actual FileName value.
+ /// A hash of the source file's [`FileName`]. This is hash so that it's size
+ /// is more predictable than if we included the actual [`FileName`] value.
pub file_name_hash: u64,
- // The CrateNum of the crate this source file was originally parsed for.
- // We cannot include this information in the hash because at the time
- // of hashing we don't have the context to map from the CrateNum's numeric
- // value to a StableCrateId.
+ /// The [`CrateNum`] of the crate this source file was originally parsed for.
+ /// We cannot include this information in the hash because at the time
+ /// of hashing we don't have the context to map from the [`CrateNum`]'s numeric
+ /// value to a `StableCrateId`.
pub cnum: CrateNum,
}
@@ -402,7 +402,7 @@ impl SourceMap {
source_file
}
- // If there is a doctest offset, applies it to the line.
+ /// If there is a doctest offset, applies it to the line.
pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize {
match file {
FileName::DocTest(_, offset) => {
@@ -429,7 +429,7 @@ impl SourceMap {
Loc { file: sf, line, col, col_display }
}
- // If the corresponding `SourceFile` is empty, does not return a line number.
+ /// If the corresponding `SourceFile` is empty, does not return a line number.
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
let f = self.lookup_source_file(pos);
@@ -753,6 +753,67 @@ impl SourceMap {
}
}
+ /// Given a 'Span', tries to tell if it's wrapped by "<>" or "()"
+ /// the algorithm searches if the next character is '>' or ')' after skipping white space
+ /// then searches the previous charactoer to match '<' or '(' after skipping white space
+ /// return true if wrapped by '<>' or '()'
+ pub fn span_wrapped_by_angle_or_parentheses(&self, span: Span) -> bool {
+ self.span_to_source(span, |src, start_index, end_index| {
+ if src.get(start_index..end_index).is_none() {
+ return Ok(false);
+ }
+ // test the right side to match '>' after skipping white space
+ let end_src = &src[end_index..];
+ let mut i = 0;
+ let mut found_right_parentheses = false;
+ let mut found_right_angle = false;
+ while let Some(cc) = end_src.chars().nth(i) {
+ if cc == ' ' {
+ i = i + 1;
+ } else if cc == '>' {
+ // found > in the right;
+ found_right_angle = true;
+ break;
+ } else if cc == ')' {
+ found_right_parentheses = true;
+ break;
+ } else {
+ // failed to find '>' return false immediately
+ return Ok(false);
+ }
+ }
+ // test the left side to match '<' after skipping white space
+ i = start_index;
+ let start_src = &src[0..start_index];
+ while let Some(cc) = start_src.chars().nth(i) {
+ if cc == ' ' {
+ if i == 0 {
+ return Ok(false);
+ }
+ i = i - 1;
+ } else if cc == '<' {
+ // found < in the left
+ if !found_right_angle {
+ // skip something like "(< )>"
+ return Ok(false);
+ }
+ break;
+ } else if cc == '(' {
+ if !found_right_parentheses {
+ // skip something like "<(>)"
+ return Ok(false);
+ }
+ break;
+ } else {
+ // failed to find '<' return false immediately
+ return Ok(false);
+ }
+ }
+ return Ok(true);
+ })
+ .map_or(false, |is_accessible| is_accessible)
+ }
+
/// Given a `Span`, tries to get a shorter span ending just after the first occurrence of `char`
/// `c`.
pub fn span_through_char(&self, sp: Span, c: char) -> Span {
@@ -855,7 +916,8 @@ impl SourceMap {
/// Returns a new span representing the next character after the end-point of this span.
/// Special cases:
/// - if span is a dummy one, returns the same span
- /// - if next_point reached the end of source, return span with lo = hi
+ /// - if next_point reached the end of source, return a span exceeding the end of source,
+ /// which means sm.span_to_snippet(next_point) will get `Err`
/// - respect multi-byte characters
pub fn next_point(&self, sp: Span) -> Span {
if sp.is_dummy() {
@@ -864,9 +926,6 @@ impl SourceMap {
let start_of_next_point = sp.hi().0;
let width = self.find_width_of_character_at_span(sp, true);
- if width == 0 {
- return Span::new(sp.hi(), sp.hi(), sp.ctxt(), None);
- }
// If the width is 1, then the next span should only contain the next char besides current ending.
// However, in the case of a multibyte character, where the width != 1, the next span should
// span multiple bytes to include the whole character.
@@ -938,7 +997,7 @@ impl SourceMap {
// Ensure indexes are also not malformed.
if start_index > end_index || end_index > source_len - 1 {
debug!("find_width_of_character_at_span: source indexes are malformed");
- return 0;
+ return 1;
}
let src = local_begin.sf.external_src.borrow();
@@ -994,9 +1053,9 @@ impl SourceMap {
SourceFileAndBytePos { sf, pos: offset }
}
- // Returns the index of the `SourceFile` (in `self.files`) that contains `pos`.
- // This index is guaranteed to be valid for the lifetime of this `SourceMap`,
- // since `source_files` is a `MonotonicVec`
+ /// Returns the index of the [`SourceFile`] (in `self.files`) that contains `pos`.
+ /// This index is guaranteed to be valid for the lifetime of this `SourceMap`,
+ /// since `source_files` is a `MonotonicVec`
pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
self.files
.borrow()
diff --git a/compiler/rustc_span/src/source_map/tests.rs b/compiler/rustc_span/src/source_map/tests.rs
index 1fd81018f..3cab59e8d 100644
--- a/compiler/rustc_span/src/source_map/tests.rs
+++ b/compiler/rustc_span/src/source_map/tests.rs
@@ -511,16 +511,17 @@ fn test_next_point() {
assert_eq!(span.lo().0, 4);
assert_eq!(span.hi().0, 5);
- // A non-empty span at the last byte should advance to create an empty
- // span pointing at the end of the file.
+ // Reaching to the end of file, return a span that will get error with `span_to_snippet`
let span = Span::with_root_ctxt(BytePos(4), BytePos(5));
let span = sm.next_point(span);
assert_eq!(span.lo().0, 5);
- assert_eq!(span.hi().0, 5);
+ assert_eq!(span.hi().0, 6);
+ assert!(sm.span_to_snippet(span).is_err());
- // Empty span pointing just past the last byte.
+ // Reaching to the end of file, return a span that will get error with `span_to_snippet`
let span = Span::with_root_ctxt(BytePos(5), BytePos(5));
let span = sm.next_point(span);
assert_eq!(span.lo().0, 5);
- assert_eq!(span.hi().0, 5);
+ assert_eq!(span.hi().0, 6);
+ assert!(sm.span_to_snippet(span).is_err());
}
diff --git a/compiler/rustc_span/src/span_encoding.rs b/compiler/rustc_span/src/span_encoding.rs
index b3de67415..f0e91e5a6 100644
--- a/compiler/rustc_span/src/span_encoding.rs
+++ b/compiler/rustc_span/src/span_encoding.rs
@@ -166,5 +166,5 @@ impl SpanInterner {
// If an interner exists, return it. Otherwise, prepare a fresh one.
#[inline]
fn with_span_interner<T, F: FnOnce(&mut SpanInterner) -> T>(f: F) -> T {
- crate::with_session_globals(|session_globals| f(&mut *session_globals.span_interner.lock()))
+ crate::with_session_globals(|session_globals| f(&mut session_globals.span_interner.lock()))
}
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index 7f16da52b..9e446c96d 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -170,8 +170,6 @@ symbols! {
Count,
Cow,
Debug,
- DebugStruct,
- DebugTuple,
Decodable,
Decoder,
DecorateLint,
@@ -190,9 +188,6 @@ symbols! {
Error,
File,
FileType,
- Fn,
- FnMut,
- FnOnce,
FormatSpec,
Formatter,
From,
@@ -211,7 +206,6 @@ symbols! {
Input,
Into,
IntoDiagnostic,
- IntoFuture,
IntoIterator,
IoRead,
IoWrite,
@@ -256,7 +250,6 @@ symbols! {
Pointer,
Poll,
ProcMacro,
- ProcMacroHack,
ProceduralMasqueradeDummyType,
Range,
RangeFrom,
@@ -271,6 +264,7 @@ symbols! {
Relaxed,
Release,
Result,
+ ResumeTy,
Return,
Right,
Rust,
@@ -332,7 +326,6 @@ symbols! {
abi_vectorcall,
abi_x86_interrupt,
abort,
- aborts,
add,
add_assign,
add_with_overflow,
@@ -344,7 +337,6 @@ symbols! {
align,
align_offset,
alignment,
- alignstack,
all,
alloc,
alloc_error_handler,
@@ -433,7 +425,6 @@ symbols! {
bool,
borrowck_graphviz_format,
borrowck_graphviz_postflow,
- borrowck_graphviz_preflow,
box_free,
box_patterns,
box_syntax,
@@ -462,7 +453,6 @@ symbols! {
cfg_doctest,
cfg_eval,
cfg_hide,
- cfg_macro,
cfg_panic,
cfg_sanitize,
cfg_target_abi,
@@ -470,7 +460,6 @@ symbols! {
cfg_target_feature,
cfg_target_has_atomic,
cfg_target_has_atomic_equal_alignment,
- cfg_target_has_atomic_load_store,
cfg_target_thread_local,
cfg_target_vendor,
cfg_version,
@@ -495,19 +484,15 @@ symbols! {
cold,
collapse_debuginfo,
column,
- column_macro,
- compare_and_swap,
compare_exchange,
compare_exchange_weak,
compile_error,
- compile_error_macro,
compiler,
compiler_builtins,
compiler_fence,
concat,
concat_bytes,
concat_idents,
- concat_macro,
conservative_impl_trait,
console,
const_allocate,
@@ -528,7 +513,6 @@ symbols! {
const_fn_unsize,
const_for,
const_format_args,
- const_generic_defaults,
const_generics,
const_generics_defaults,
const_if_match,
@@ -547,22 +531,19 @@ symbols! {
const_trait,
const_trait_bound_opt_out,
const_trait_impl,
- const_transmute,
const_try,
constant,
constructor,
- contents,
context,
- convert,
copy,
copy_closures,
copy_nonoverlapping,
copysignf32,
copysignf64,
core,
- core_intrinsics,
core_panic,
core_panic_2015_macro,
+ core_panic_2021_macro,
core_panic_macro,
cosf32,
cosf64,
@@ -584,6 +565,7 @@ symbols! {
custom_attribute,
custom_derive,
custom_inner_attributes,
+ custom_mir,
custom_test_frameworks,
d,
d32,
@@ -597,7 +579,6 @@ symbols! {
debug_assertions,
debug_struct,
debug_struct_fields_finish,
- debug_trait_builder,
debug_tuple,
debug_tuple_fields_finish,
debugger_visualizer,
@@ -619,6 +600,7 @@ symbols! {
deref_mut,
deref_target,
derive,
+ derive_const,
derive_default_enum,
destruct,
destructuring_assignment,
@@ -628,7 +610,6 @@ symbols! {
discriminant_type,
discriminant_value,
dispatch_from_dyn,
- display_trait,
div,
div_assign,
doc,
@@ -659,7 +640,6 @@ symbols! {
dyn_star,
dyn_trait,
e,
- edition_macro_pats,
edition_panic,
eh_catch_typeinfo,
eh_personality,
@@ -672,7 +652,6 @@ symbols! {
encode,
end,
env,
- env_macro,
eprint_macro,
eprintln_macro,
eq,
@@ -694,6 +673,7 @@ symbols! {
export_name,
expr,
extended_key_value_attributes,
+ extended_varargs_abi_support,
extern_absolute_paths,
extern_crate_item_prelude,
extern_crate_self,
@@ -721,9 +701,7 @@ symbols! {
field,
field_init_shorthand,
file,
- file_macro,
fill,
- finish,
flags,
float,
float_to_int_unchecked,
@@ -732,8 +710,6 @@ symbols! {
fmaf32,
fmaf64,
fmt,
- fmt_as_str,
- fmt_internals,
fmul_fast,
fn_align,
fn_must_use,
@@ -748,13 +724,11 @@ symbols! {
format_args_macro,
format_args_nl,
format_macro,
- fp,
freeze,
freg,
frem_fast,
from,
from_desugaring,
- from_generator,
from_iter,
from_method,
from_output,
@@ -805,14 +779,16 @@ symbols! {
i64,
i8,
ident,
+ identity_future,
if_let,
if_let_guard,
if_while_or_patterns,
ignore,
impl_header_lifetime_elision,
impl_lint_pass,
- impl_macros,
impl_trait_in_bindings,
+ impl_trait_in_fn_trait_return,
+ impl_trait_projections,
implied_by,
import,
import_name_type,
@@ -822,7 +798,6 @@ symbols! {
include,
include_bytes,
include_bytes_macro,
- include_macro,
include_str,
include_str_macro,
inclusive_range_syntax,
@@ -840,7 +815,6 @@ symbols! {
instruction_set,
integer_: "integer",
integral,
- intel,
into_future,
into_iter,
intra_doc_pointers,
@@ -877,7 +851,6 @@ symbols! {
lifetimes,
likely,
line,
- line_macro,
link,
link_args,
link_cfg,
@@ -961,7 +934,6 @@ symbols! {
modifiers,
module,
module_path,
- module_path_macro,
more_qualified_paths,
more_struct_aliases,
movbe_target_feature,
@@ -1031,7 +1003,6 @@ symbols! {
non_exhaustive,
non_exhaustive_omitted_patterns_lint,
non_modrs_mods,
- none_error,
nontemporal_store,
noop_method_borrow,
noop_method_clone,
@@ -1056,7 +1027,6 @@ symbols! {
optin_builtin_traits,
option,
option_env,
- option_env_macro,
options,
or,
or_patterns,
@@ -1099,7 +1069,7 @@ symbols! {
plugins,
pointee_trait,
pointer,
- pointer_trait_fmt,
+ pointer_sized,
poll,
position,
post_dash_lto: "post-lto",
@@ -1126,7 +1096,6 @@ symbols! {
proc_dash_macro: "proc-macro",
proc_macro,
proc_macro_attribute,
- proc_macro_def_site,
proc_macro_derive,
proc_macro_expr,
proc_macro_gen,
@@ -1227,9 +1196,6 @@ symbols! {
rust_cold_cc,
rust_eh_catch_typeinfo,
rust_eh_personality,
- rust_eh_register_frames,
- rust_eh_unregister_frames,
- rust_oom,
rustc,
rustc_allocator,
rustc_allocator_zeroed,
@@ -1248,6 +1214,7 @@ symbols! {
rustc_deallocator,
rustc_def_path,
rustc_default_body_unstable,
+ rustc_deny_explicit_impl,
rustc_diagnostic_item,
rustc_diagnostic_macros,
rustc_dirty,
@@ -1302,7 +1269,6 @@ symbols! {
rustc_serialize,
rustc_skip_array_during_method_dispatch,
rustc_specialization_trait,
- rustc_stable,
rustc_std_internal_symbol,
rustc_strict_coherence,
rustc_symbol_name,
@@ -1430,7 +1396,6 @@ symbols! {
static_recursion,
staticlib,
std,
- std_inject,
std_panic,
std_panic_2015_macro,
std_panic_macro,
@@ -1444,8 +1409,8 @@ symbols! {
str_trim_end,
str_trim_start,
strict_provenance,
+ string_deref_patterns,
stringify,
- stringify_macro,
struct_field_attributes,
struct_inherit,
struct_variant,
@@ -1473,10 +1438,8 @@ symbols! {
target_has_atomic_load_store,
target_os,
target_pointer_width,
- target_target_vendor,
target_thread_local,
target_vendor,
- task,
tbm_target_feature,
termination,
termination_trait,
@@ -1488,7 +1451,6 @@ symbols! {
test_removed_feature,
test_runner,
test_unstable_lint,
- then_with,
thread,
thread_local,
thread_local_macro,
@@ -1520,13 +1482,13 @@ symbols! {
try_trait_v2,
tt,
tuple,
- tuple_from_req,
tuple_indexing,
tuple_trait,
two_phase,
ty,
type_alias_enum_variants,
type_alias_impl_trait,
+ type_ascribe,
type_ascription,
type_changing_struct_update,
type_id,
@@ -1564,7 +1526,6 @@ symbols! {
unreachable_2015,
unreachable_2015_macro,
unreachable_2021,
- unreachable_2021_macro,
unreachable_code,
unreachable_display,
unreachable_macro,
@@ -1583,7 +1544,6 @@ symbols! {
from crates.io via `Cargo.toml` instead?",
untagged_unions,
unused_imports,
- unused_qualifications,
unwind,
unwind_attributes,
unwind_safe_trait,
@@ -1917,7 +1877,7 @@ impl<S: Encoder> Encodable<S> for Symbol {
impl<D: Decoder> Decodable<D> for Symbol {
#[inline]
default fn decode(d: &mut D) -> Symbol {
- Symbol::intern(&d.read_str())
+ Symbol::intern(d.read_str())
}
}
@@ -2092,8 +2052,8 @@ impl Symbol {
}
impl Ident {
- // Returns `true` for reserved identifiers used internally for elided lifetimes,
- // unnamed method parameters, crate root module, error recovery etc.
+ /// Returns `true` for reserved identifiers used internally for elided lifetimes,
+ /// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special(self) -> bool {
self.name.is_special()
}
diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs
index 46c5fe78f..c60a2f467 100644
--- a/compiler/rustc_symbol_mangling/src/legacy.rs
+++ b/compiler/rustc_symbol_mangling/src/legacy.rs
@@ -244,7 +244,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolPrinter<'tcx> {
fn print_dyn_existential(
mut self,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
let mut first = true;
for p in predicates {
diff --git a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs
index 6aa031c83..87128e0f8 100644
--- a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs
+++ b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs
@@ -12,8 +12,8 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, SubstsRef};
use rustc_middle::ty::{
- self, Binder, Const, ExistentialPredicate, FloatTy, FnSig, IntTy, List, Region, RegionKind,
- TermKind, Ty, TyCtxt, UintTy,
+ self, Const, ExistentialPredicate, FloatTy, FnSig, IntTy, List, Region, RegionKind, TermKind,
+ Ty, TyCtxt, UintTy,
};
use rustc_span::def_id::DefId;
use rustc_span::symbol::sym;
@@ -226,7 +226,7 @@ fn encode_fnsig<'tcx>(
/// Rust types that are not used at the FFI boundary.
fn encode_predicate<'tcx>(
tcx: TyCtxt<'tcx>,
- predicate: Binder<'tcx, ExistentialPredicate<'tcx>>,
+ predicate: ty::PolyExistentialPredicate<'tcx>,
dict: &mut FxHashMap<DictKey<'tcx>, usize>,
options: EncodeTyOptions,
) -> String {
@@ -261,13 +261,13 @@ fn encode_predicate<'tcx>(
/// Rust types that are not used at the FFI boundary.
fn encode_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
- predicates: &List<Binder<'tcx, ExistentialPredicate<'tcx>>>,
+ predicates: &List<ty::PolyExistentialPredicate<'tcx>>,
dict: &mut FxHashMap<DictKey<'tcx>, usize>,
options: EncodeTyOptions,
) -> String {
// <predicate1[..predicateN]>E as part of vendor extended type
let mut s = String::new();
- let predicates: Vec<Binder<'tcx, ExistentialPredicate<'tcx>>> =
+ let predicates: Vec<ty::PolyExistentialPredicate<'tcx>> =
predicates.iter().map(|predicate| predicate).collect();
for predicate in predicates {
s.push_str(&encode_predicate(tcx, predicate, dict, options));
diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs
index ecfe6861e..2cca480f2 100644
--- a/compiler/rustc_symbol_mangling/src/v0.rs
+++ b/compiler/rustc_symbol_mangling/src/v0.rs
@@ -218,7 +218,7 @@ impl<'tcx> SymbolMangler<'tcx> {
let lifetimes = regions
.into_iter()
.map(|br| match br {
- ty::BrAnon(i) => i,
+ ty::BrAnon(i, _) => i,
_ => bug!("symbol_names: non-anonymized region `{:?}` in `{:?}`", br, value),
})
.max()
@@ -335,7 +335,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
// Late-bound lifetimes use indices starting at 1,
// see `BinderLevel` for more details.
- ty::ReLateBound(debruijn, ty::BoundRegion { kind: ty::BrAnon(i), .. }) => {
+ ty::ReLateBound(debruijn, ty::BoundRegion { kind: ty::BrAnon(i, _), .. }) => {
let binder = &self.binders[self.binders.len() - 1 - debruijn.index()];
let depth = binder.lifetime_depths.start + i;
@@ -502,7 +502,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
fn print_dyn_existential(
mut self,
- predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Result<Self::DynExistential, Self::Error> {
// Okay, so this is a bit tricky. Imagine we have a trait object like
// `dyn for<'a> Foo<'a, Bar = &'a ()>`. When we mangle this, the
@@ -575,6 +575,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
// a path), even for it we still need to encode a placeholder, as
// the path could refer back to e.g. an `impl` using the constant.
ty::ConstKind::Unevaluated(_)
+ | ty::ConstKind::Expr(_)
| ty::ConstKind::Param(_)
| ty::ConstKind::Infer(_)
| ty::ConstKind::Bound(..)
@@ -621,7 +622,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
});
match inner_ty.kind() {
- ty::Str if *mutbl == hir::Mutability::Not => {
+ ty::Str if mutbl.is_not() => {
match ct.kind() {
ty::ConstKind::Value(valtree) => {
let slice =
@@ -654,8 +655,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
.builtin_deref(true)
.expect("tried to dereference on non-ptr type")
.ty;
- let dereferenced_const =
- self.tcx.mk_const(ty::ConstS { kind: ct.kind(), ty: pointee_ty });
+ let dereferenced_const = self.tcx.mk_const(ct.kind(), pointee_ty);
self = dereferenced_const.print(self)?;
}
}
@@ -690,15 +690,15 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
self.push("V");
self = self.print_def_path(variant_def.def_id, substs)?;
- match variant_def.ctor_kind {
- CtorKind::Const => {
+ match variant_def.ctor_kind() {
+ Some(CtorKind::Const) => {
self.push("U");
}
- CtorKind::Fn => {
+ Some(CtorKind::Fn) => {
self.push("T");
self = print_field_list(self)?;
}
- CtorKind::Fictive => {
+ None => {
self.push("S");
for (field_def, field) in iter::zip(&variant_def.fields, fields) {
// HACK(eddyb) this mimics `path_append`,
diff --git a/compiler/rustc_target/Cargo.toml b/compiler/rustc_target/Cargo.toml
index fc37fdb1c..568c916a1 100644
--- a/compiler/rustc_target/Cargo.toml
+++ b/compiler/rustc_target/Cargo.toml
@@ -7,6 +7,7 @@ edition = "2021"
bitflags = "1.2.1"
tracing = "0.1"
serde_json = "1.0.59"
+rustc_abi = { path = "../rustc_abi" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_feature = { path = "../rustc_feature" }
rustc_index = { path = "../rustc_index" }
diff --git a/compiler/rustc_target/src/abi/call/loongarch.rs b/compiler/rustc_target/src/abi/call/loongarch.rs
new file mode 100644
index 000000000..d29b479de
--- /dev/null
+++ b/compiler/rustc_target/src/abi/call/loongarch.rs
@@ -0,0 +1,342 @@
+use crate::abi::call::{ArgAbi, ArgExtension, CastTarget, FnAbi, PassMode, Reg, RegKind, Uniform};
+use crate::abi::{self, Abi, FieldsShape, HasDataLayout, Size, TyAbiInterface, TyAndLayout};
+use crate::spec::HasTargetSpec;
+
+#[derive(Copy, Clone)]
+enum RegPassKind {
+ Float(Reg),
+ Integer(Reg),
+ Unknown,
+}
+
+#[derive(Copy, Clone)]
+enum FloatConv {
+ FloatPair(Reg, Reg),
+ Float(Reg),
+ MixedPair(Reg, Reg),
+}
+
+#[derive(Copy, Clone)]
+struct CannotUseFpConv;
+
+fn is_loongarch_aggregate<'a, Ty>(arg: &ArgAbi<'a, Ty>) -> bool {
+ match arg.layout.abi {
+ Abi::Vector { .. } => true,
+ _ => arg.layout.is_aggregate(),
+ }
+}
+
+fn should_use_fp_conv_helper<'a, Ty, C>(
+ cx: &C,
+ arg_layout: &TyAndLayout<'a, Ty>,
+ xlen: u64,
+ flen: u64,
+ field1_kind: &mut RegPassKind,
+ field2_kind: &mut RegPassKind,
+) -> Result<(), CannotUseFpConv>
+where
+ Ty: TyAbiInterface<'a, C> + Copy,
+{
+ match arg_layout.abi {
+ Abi::Scalar(scalar) => match scalar.primitive() {
+ abi::Int(..) | abi::Pointer => {
+ if arg_layout.size.bits() > xlen {
+ return Err(CannotUseFpConv);
+ }
+ match (*field1_kind, *field2_kind) {
+ (RegPassKind::Unknown, _) => {
+ *field1_kind = RegPassKind::Integer(Reg {
+ kind: RegKind::Integer,
+ size: arg_layout.size,
+ });
+ }
+ (RegPassKind::Float(_), RegPassKind::Unknown) => {
+ *field2_kind = RegPassKind::Integer(Reg {
+ kind: RegKind::Integer,
+ size: arg_layout.size,
+ });
+ }
+ _ => return Err(CannotUseFpConv),
+ }
+ }
+ abi::F32 | abi::F64 => {
+ if arg_layout.size.bits() > flen {
+ return Err(CannotUseFpConv);
+ }
+ match (*field1_kind, *field2_kind) {
+ (RegPassKind::Unknown, _) => {
+ *field1_kind =
+ RegPassKind::Float(Reg { kind: RegKind::Float, size: arg_layout.size });
+ }
+ (_, RegPassKind::Unknown) => {
+ *field2_kind =
+ RegPassKind::Float(Reg { kind: RegKind::Float, size: arg_layout.size });
+ }
+ _ => return Err(CannotUseFpConv),
+ }
+ }
+ },
+ Abi::Vector { .. } | Abi::Uninhabited => return Err(CannotUseFpConv),
+ Abi::ScalarPair(..) | Abi::Aggregate { .. } => match arg_layout.fields {
+ FieldsShape::Primitive => {
+ unreachable!("aggregates can't have `FieldsShape::Primitive`")
+ }
+ FieldsShape::Union(_) => {
+ if !arg_layout.is_zst() {
+ return Err(CannotUseFpConv);
+ }
+ }
+ FieldsShape::Array { count, .. } => {
+ for _ in 0..count {
+ let elem_layout = arg_layout.field(cx, 0);
+ should_use_fp_conv_helper(
+ cx,
+ &elem_layout,
+ xlen,
+ flen,
+ field1_kind,
+ field2_kind,
+ )?;
+ }
+ }
+ FieldsShape::Arbitrary { .. } => {
+ match arg_layout.variants {
+ abi::Variants::Multiple { .. } => return Err(CannotUseFpConv),
+ abi::Variants::Single { .. } => (),
+ }
+ for i in arg_layout.fields.index_by_increasing_offset() {
+ let field = arg_layout.field(cx, i);
+ should_use_fp_conv_helper(cx, &field, xlen, flen, field1_kind, field2_kind)?;
+ }
+ }
+ },
+ }
+ Ok(())
+}
+
+fn should_use_fp_conv<'a, Ty, C>(
+ cx: &C,
+ arg: &TyAndLayout<'a, Ty>,
+ xlen: u64,
+ flen: u64,
+) -> Option<FloatConv>
+where
+ Ty: TyAbiInterface<'a, C> + Copy,
+{
+ let mut field1_kind = RegPassKind::Unknown;
+ let mut field2_kind = RegPassKind::Unknown;
+ if should_use_fp_conv_helper(cx, arg, xlen, flen, &mut field1_kind, &mut field2_kind).is_err() {
+ return None;
+ }
+ match (field1_kind, field2_kind) {
+ (RegPassKind::Integer(l), RegPassKind::Float(r)) => Some(FloatConv::MixedPair(l, r)),
+ (RegPassKind::Float(l), RegPassKind::Integer(r)) => Some(FloatConv::MixedPair(l, r)),
+ (RegPassKind::Float(l), RegPassKind::Float(r)) => Some(FloatConv::FloatPair(l, r)),
+ (RegPassKind::Float(f), RegPassKind::Unknown) => Some(FloatConv::Float(f)),
+ _ => None,
+ }
+}
+
+fn classify_ret<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, xlen: u64, flen: u64) -> bool
+where
+ Ty: TyAbiInterface<'a, C> + Copy,
+{
+ if let Some(conv) = should_use_fp_conv(cx, &arg.layout, xlen, flen) {
+ match conv {
+ FloatConv::Float(f) => {
+ arg.cast_to(f);
+ }
+ FloatConv::FloatPair(l, r) => {
+ arg.cast_to(CastTarget::pair(l, r));
+ }
+ FloatConv::MixedPair(l, r) => {
+ arg.cast_to(CastTarget::pair(l, r));
+ }
+ }
+ return false;
+ }
+
+ let total = arg.layout.size;
+
+ // "Scalars wider than 2✕XLEN are passed by reference and are replaced in
+ // the argument list with the address."
+ // "Aggregates larger than 2✕XLEN bits are passed by reference and are
+ // replaced in the argument list with the address, as are C++ aggregates
+ // with nontrivial copy constructors, destructors, or vtables."
+ if total.bits() > 2 * xlen {
+ // We rely on the LLVM backend lowering code to lower passing a scalar larger than 2*XLEN.
+ if is_loongarch_aggregate(arg) {
+ arg.make_indirect();
+ }
+ return true;
+ }
+
+ let xlen_reg = match xlen {
+ 32 => Reg::i32(),
+ 64 => Reg::i64(),
+ _ => unreachable!("Unsupported XLEN: {}", xlen),
+ };
+ if is_loongarch_aggregate(arg) {
+ if total.bits() <= xlen {
+ arg.cast_to(xlen_reg);
+ } else {
+ arg.cast_to(Uniform { unit: xlen_reg, total: Size::from_bits(xlen * 2) });
+ }
+ return false;
+ }
+
+ // "When passed in registers, scalars narrower than XLEN bits are widened
+ // according to the sign of their type up to 32 bits, then sign-extended to
+ // XLEN bits."
+ extend_integer_width(arg, xlen);
+ false
+}
+
+fn classify_arg<'a, Ty, C>(
+ cx: &C,
+ arg: &mut ArgAbi<'a, Ty>,
+ xlen: u64,
+ flen: u64,
+ is_vararg: bool,
+ avail_gprs: &mut u64,
+ avail_fprs: &mut u64,
+) where
+ Ty: TyAbiInterface<'a, C> + Copy,
+{
+ if !is_vararg {
+ match should_use_fp_conv(cx, &arg.layout, xlen, flen) {
+ Some(FloatConv::Float(f)) if *avail_fprs >= 1 => {
+ *avail_fprs -= 1;
+ arg.cast_to(f);
+ return;
+ }
+ Some(FloatConv::FloatPair(l, r)) if *avail_fprs >= 2 => {
+ *avail_fprs -= 2;
+ arg.cast_to(CastTarget::pair(l, r));
+ return;
+ }
+ Some(FloatConv::MixedPair(l, r)) if *avail_fprs >= 1 && *avail_gprs >= 1 => {
+ *avail_gprs -= 1;
+ *avail_fprs -= 1;
+ arg.cast_to(CastTarget::pair(l, r));
+ return;
+ }
+ _ => (),
+ }
+ }
+
+ let total = arg.layout.size;
+ let align = arg.layout.align.abi.bits();
+
+ // "Scalars wider than 2✕XLEN are passed by reference and are replaced in
+ // the argument list with the address."
+ // "Aggregates larger than 2✕XLEN bits are passed by reference and are
+ // replaced in the argument list with the address, as are C++ aggregates
+ // with nontrivial copy constructors, destructors, or vtables."
+ if total.bits() > 2 * xlen {
+ // We rely on the LLVM backend lowering code to lower passing a scalar larger than 2*XLEN.
+ if is_loongarch_aggregate(arg) {
+ arg.make_indirect();
+ }
+ if *avail_gprs >= 1 {
+ *avail_gprs -= 1;
+ }
+ return;
+ }
+
+ let double_xlen_reg = match xlen {
+ 32 => Reg::i64(),
+ 64 => Reg::i128(),
+ _ => unreachable!("Unsupported XLEN: {}", xlen),
+ };
+
+ let xlen_reg = match xlen {
+ 32 => Reg::i32(),
+ 64 => Reg::i64(),
+ _ => unreachable!("Unsupported XLEN: {}", xlen),
+ };
+
+ if total.bits() > xlen {
+ let align_regs = align > xlen;
+ if is_loongarch_aggregate(arg) {
+ arg.cast_to(Uniform {
+ unit: if align_regs { double_xlen_reg } else { xlen_reg },
+ total: Size::from_bits(xlen * 2),
+ });
+ }
+ if align_regs && is_vararg {
+ *avail_gprs -= *avail_gprs % 2;
+ }
+ if *avail_gprs >= 2 {
+ *avail_gprs -= 2;
+ } else {
+ *avail_gprs = 0;
+ }
+ return;
+ } else if is_loongarch_aggregate(arg) {
+ arg.cast_to(xlen_reg);
+ if *avail_gprs >= 1 {
+ *avail_gprs -= 1;
+ }
+ return;
+ }
+
+ // "When passed in registers, scalars narrower than XLEN bits are widened
+ // according to the sign of their type up to 32 bits, then sign-extended to
+ // XLEN bits."
+ if *avail_gprs >= 1 {
+ extend_integer_width(arg, xlen);
+ *avail_gprs -= 1;
+ }
+}
+
+fn extend_integer_width<'a, Ty>(arg: &mut ArgAbi<'a, Ty>, xlen: u64) {
+ if let Abi::Scalar(scalar) = arg.layout.abi {
+ if let abi::Int(i, _) = scalar.primitive() {
+ // 32-bit integers are always sign-extended
+ if i.size().bits() == 32 && xlen > 32 {
+ if let PassMode::Direct(ref mut attrs) = arg.mode {
+ attrs.ext(ArgExtension::Sext);
+ return;
+ }
+ }
+ }
+ }
+
+ arg.extend_integer_width_to(xlen);
+}
+
+pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
+where
+ Ty: TyAbiInterface<'a, C> + Copy,
+ C: HasDataLayout + HasTargetSpec,
+{
+ let xlen = cx.data_layout().pointer_size.bits();
+ let flen = match &cx.target_spec().llvm_abiname[..] {
+ "ilp32f" | "lp64f" => 32,
+ "ilp32d" | "lp64d" => 64,
+ _ => 0,
+ };
+
+ let mut avail_gprs = 8;
+ let mut avail_fprs = 8;
+
+ if !fn_abi.ret.is_ignore() && classify_ret(cx, &mut fn_abi.ret, xlen, flen) {
+ avail_gprs -= 1;
+ }
+
+ for (i, arg) in fn_abi.args.iter_mut().enumerate() {
+ if arg.is_ignore() {
+ continue;
+ }
+ classify_arg(
+ cx,
+ arg,
+ xlen,
+ flen,
+ i >= fn_abi.fixed_count as usize,
+ &mut avail_gprs,
+ &mut avail_fprs,
+ );
+ }
+}
diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs
index 9e5f0e4d1..a5ffaebea 100644
--- a/compiler/rustc_target/src/abi/call/mod.rs
+++ b/compiler/rustc_target/src/abi/call/mod.rs
@@ -3,6 +3,7 @@ use crate::abi::{HasDataLayout, TyAbiInterface, TyAndLayout};
use crate::spec::{self, HasTargetSpec};
use rustc_span::Symbol;
use std::fmt;
+use std::str::FromStr;
mod aarch64;
mod amdgpu;
@@ -10,6 +11,7 @@ mod arm;
mod avr;
mod bpf;
mod hexagon;
+mod loongarch;
mod m68k;
mod mips;
mod mips64;
@@ -260,7 +262,7 @@ impl CastTarget {
let mut size = self.rest.total;
for i in 0..self.prefix.iter().count() {
match self.prefix[i] {
- Some(v) => size += Size { raw: v.size.bytes() },
+ Some(v) => size += v.size,
None => {}
}
}
@@ -696,6 +698,7 @@ impl<'a, Ty> FnAbi<'a, Ty> {
"amdgpu" => amdgpu::compute_abi_info(cx, self),
"arm" => arm::compute_abi_info(cx, self),
"avr" => avr::compute_abi_info(self),
+ "loongarch64" => loongarch::compute_abi_info(cx, self),
"m68k" => m68k::compute_abi_info(self),
"mips" => mips::compute_abi_info(cx, self),
"mips64" => mips64::compute_abi_info(cx, self),
@@ -735,6 +738,33 @@ impl<'a, Ty> FnAbi<'a, Ty> {
}
}
+impl FromStr for Conv {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "C" => Ok(Conv::C),
+ "Rust" => Ok(Conv::Rust),
+ "RustCold" => Ok(Conv::Rust),
+ "ArmAapcs" => Ok(Conv::ArmAapcs),
+ "CCmseNonSecureCall" => Ok(Conv::CCmseNonSecureCall),
+ "Msp430Intr" => Ok(Conv::Msp430Intr),
+ "PtxKernel" => Ok(Conv::PtxKernel),
+ "X86Fastcall" => Ok(Conv::X86Fastcall),
+ "X86Intr" => Ok(Conv::X86Intr),
+ "X86Stdcall" => Ok(Conv::X86Stdcall),
+ "X86ThisCall" => Ok(Conv::X86ThisCall),
+ "X86VectorCall" => Ok(Conv::X86VectorCall),
+ "X86_64SysV" => Ok(Conv::X86_64SysV),
+ "X86_64Win64" => Ok(Conv::X86_64Win64),
+ "AmdGpuKernel" => Ok(Conv::AmdGpuKernel),
+ "AvrInterrupt" => Ok(Conv::AvrInterrupt),
+ "AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt),
+ _ => Err(format!("'{}' is not a valid value for entry function call convetion.", s)),
+ }
+ }
+}
+
// Some types are used a lot. Make sure they don't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
mod size_asserts {
diff --git a/compiler/rustc_target/src/abi/call/sparc64.rs b/compiler/rustc_target/src/abi/call/sparc64.rs
index 1b74959ad..c8b6ac5ae 100644
--- a/compiler/rustc_target/src/abi/call/sparc64.rs
+++ b/compiler/rustc_target/src/abi/call/sparc64.rs
@@ -78,7 +78,7 @@ fn arg_scalar_pair<C>(
where
C: HasDataLayout,
{
- data = arg_scalar(cx, &scalar1, offset, data);
+ data = arg_scalar(cx, scalar1, offset, data);
match (scalar1.primitive(), scalar2.primitive()) {
(abi::F32, _) => offset += Reg::f32().size,
(_, abi::F64) => offset += Reg::f64().size,
@@ -87,10 +87,10 @@ where
_ => {}
}
- if (offset.raw % 4) != 0 && scalar2.primitive().is_float() {
- offset.raw += 4 - (offset.raw % 4);
+ if (offset.bytes() % 4) != 0 && scalar2.primitive().is_float() {
+ offset += Size::from_bytes(4 - (offset.bytes() % 4));
}
- data = arg_scalar(cx, &scalar2, offset, data);
+ data = arg_scalar(cx, scalar2, offset, data);
return data;
}
@@ -169,14 +169,14 @@ where
has_float: false,
arg_attribute: ArgAttribute::default(),
},
- Size { raw: 0 },
+ Size::ZERO,
);
if data.has_float {
// Structure { float, int, int } doesn't like to be handled like
// { float, long int }. Other way around it doesn't mind.
if data.last_offset < arg.layout.size
- && (data.last_offset.raw % 8) != 0
+ && (data.last_offset.bytes() % 8) != 0
&& data.prefix_index < data.prefix.len()
{
data.prefix[data.prefix_index] = Some(Reg::i32());
@@ -185,7 +185,7 @@ where
}
let mut rest_size = arg.layout.size - data.last_offset;
- if (rest_size.raw % 8) != 0 && data.prefix_index < data.prefix.len() {
+ if (rest_size.bytes() % 8) != 0 && data.prefix_index < data.prefix.len() {
data.prefix[data.prefix_index] = Some(Reg::i32());
rest_size = rest_size - Reg::i32().size;
}
@@ -214,13 +214,13 @@ where
C: HasDataLayout,
{
if !fn_abi.ret.is_ignore() {
- classify_arg(cx, &mut fn_abi.ret, Size { raw: 32 });
+ classify_arg(cx, &mut fn_abi.ret, Size::from_bytes(32));
}
for arg in fn_abi.args.iter_mut() {
if arg.is_ignore() {
continue;
}
- classify_arg(cx, arg, Size { raw: 16 });
+ classify_arg(cx, arg, Size::from_bytes(16));
}
}
diff --git a/compiler/rustc_target/src/abi/mod.rs b/compiler/rustc_target/src/abi/mod.rs
index 7171ca7bf..53c9878ab 100644
--- a/compiler/rustc_target/src/abi/mod.rs
+++ b/compiler/rustc_target/src/abi/mod.rs
@@ -2,284 +2,16 @@ pub use Integer::*;
pub use Primitive::*;
use crate::json::{Json, ToJson};
-use crate::spec::Target;
-use std::convert::{TryFrom, TryInto};
use std::fmt;
-use std::iter::Step;
-use std::num::{NonZeroUsize, ParseIntError};
-use std::ops::{Add, AddAssign, Deref, Mul, RangeInclusive, Sub};
-use std::str::FromStr;
+use std::ops::Deref;
use rustc_data_structures::intern::Interned;
-use rustc_index::vec::{Idx, IndexVec};
use rustc_macros::HashStable_Generic;
pub mod call;
-/// Parsed [Data layout](https://llvm.org/docs/LangRef.html#data-layout)
-/// for a target, which contains everything needed to compute layouts.
-pub struct TargetDataLayout {
- pub endian: Endian,
- pub i1_align: AbiAndPrefAlign,
- pub i8_align: AbiAndPrefAlign,
- pub i16_align: AbiAndPrefAlign,
- pub i32_align: AbiAndPrefAlign,
- pub i64_align: AbiAndPrefAlign,
- pub i128_align: AbiAndPrefAlign,
- pub f32_align: AbiAndPrefAlign,
- pub f64_align: AbiAndPrefAlign,
- pub pointer_size: Size,
- pub pointer_align: AbiAndPrefAlign,
- pub aggregate_align: AbiAndPrefAlign,
-
- /// Alignments for vector types.
- pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
-
- pub instruction_address_space: AddressSpace,
-
- /// Minimum size of #[repr(C)] enums (default I32 bits)
- pub c_enum_min_size: Integer,
-}
-
-impl Default for TargetDataLayout {
- /// Creates an instance of `TargetDataLayout`.
- fn default() -> TargetDataLayout {
- let align = |bits| Align::from_bits(bits).unwrap();
- TargetDataLayout {
- endian: Endian::Big,
- i1_align: AbiAndPrefAlign::new(align(8)),
- i8_align: AbiAndPrefAlign::new(align(8)),
- i16_align: AbiAndPrefAlign::new(align(16)),
- i32_align: AbiAndPrefAlign::new(align(32)),
- i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
- i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
- f32_align: AbiAndPrefAlign::new(align(32)),
- f64_align: AbiAndPrefAlign::new(align(64)),
- pointer_size: Size::from_bits(64),
- pointer_align: AbiAndPrefAlign::new(align(64)),
- aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
- vector_align: vec![
- (Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
- (Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
- ],
- instruction_address_space: AddressSpace::DATA,
- c_enum_min_size: Integer::I32,
- }
- }
-}
-
-pub enum TargetDataLayoutErrors<'a> {
- InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
- InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
- MissingAlignment { cause: &'a str },
- InvalidAlignment { cause: &'a str, err: String },
- InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
- InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
- InvalidBitsSize { err: String },
-}
-
-impl TargetDataLayout {
- pub fn parse<'a>(target: &'a Target) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
- // Parse an address space index from a string.
- let parse_address_space = |s: &'a str, cause: &'a str| {
- s.parse::<u32>().map(AddressSpace).map_err(|err| {
- TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
- })
- };
-
- // Parse a bit count from a string.
- let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
- s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
- kind,
- bit: s,
- cause,
- err,
- })
- };
-
- // Parse a size string.
- let size = |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
-
- // Parse an alignment string.
- let align = |s: &[&'a str], cause: &'a str| {
- if s.is_empty() {
- return Err(TargetDataLayoutErrors::MissingAlignment { cause });
- }
- let align_from_bits = |bits| {
- Align::from_bits(bits)
- .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
- };
- let abi = parse_bits(s[0], "alignment", cause)?;
- let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
- Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
- };
-
- let mut dl = TargetDataLayout::default();
- let mut i128_align_src = 64;
- for spec in target.data_layout.split('-') {
- let spec_parts = spec.split(':').collect::<Vec<_>>();
-
- match &*spec_parts {
- ["e"] => dl.endian = Endian::Little,
- ["E"] => dl.endian = Endian::Big,
- [p] if p.starts_with('P') => {
- dl.instruction_address_space = parse_address_space(&p[1..], "P")?
- }
- ["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
- ["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?,
- ["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?,
- [p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
- dl.pointer_size = size(s, p)?;
- dl.pointer_align = align(a, p)?;
- }
- [s, ref a @ ..] if s.starts_with('i') => {
- let Ok(bits) = s[1..].parse::<u64>() else {
- size(&s[1..], "i")?; // For the user error.
- continue;
- };
- let a = align(a, s)?;
- match bits {
- 1 => dl.i1_align = a,
- 8 => dl.i8_align = a,
- 16 => dl.i16_align = a,
- 32 => dl.i32_align = a,
- 64 => dl.i64_align = a,
- _ => {}
- }
- if bits >= i128_align_src && bits <= 128 {
- // Default alignment for i128 is decided by taking the alignment of
- // largest-sized i{64..=128}.
- i128_align_src = bits;
- dl.i128_align = a;
- }
- }
- [s, ref a @ ..] if s.starts_with('v') => {
- let v_size = size(&s[1..], "v")?;
- let a = align(a, s)?;
- if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
- v.1 = a;
- continue;
- }
- // No existing entry, add a new one.
- dl.vector_align.push((v_size, a));
- }
- _ => {} // Ignore everything else.
- }
- }
-
- // Perform consistency checks against the Target information.
- if dl.endian != target.endian {
- return Err(TargetDataLayoutErrors::InconsistentTargetArchitecture {
- dl: dl.endian.as_str(),
- target: target.endian.as_str(),
- });
- }
-
- let target_pointer_width: u64 = target.pointer_width.into();
- if dl.pointer_size.bits() != target_pointer_width {
- return Err(TargetDataLayoutErrors::InconsistentTargetPointerWidth {
- pointer_size: dl.pointer_size.bits(),
- target: target.pointer_width,
- });
- }
-
- dl.c_enum_min_size = match Integer::from_size(Size::from_bits(target.c_enum_min_bits)) {
- Ok(bits) => bits,
- Err(err) => return Err(TargetDataLayoutErrors::InvalidBitsSize { err }),
- };
-
- Ok(dl)
- }
-
- /// Returns exclusive upper bound on object size.
- ///
- /// The theoretical maximum object size is defined as the maximum positive `isize` value.
- /// This ensures that the `offset` semantics remain well-defined by allowing it to correctly
- /// index every address within an object along with one byte past the end, along with allowing
- /// `isize` to store the difference between any two pointers into an object.
- ///
- /// The upper bound on 64-bit currently needs to be lower because LLVM uses a 64-bit integer
- /// to represent object size in bits. It would need to be 1 << 61 to account for this, but is
- /// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable
- /// address space on 64-bit ARMv8 and x86_64.
- #[inline]
- pub fn obj_size_bound(&self) -> u64 {
- match self.pointer_size.bits() {
- 16 => 1 << 15,
- 32 => 1 << 31,
- 64 => 1 << 47,
- bits => panic!("obj_size_bound: unknown pointer bit size {}", bits),
- }
- }
-
- #[inline]
- pub fn ptr_sized_integer(&self) -> Integer {
- match self.pointer_size.bits() {
- 16 => I16,
- 32 => I32,
- 64 => I64,
- bits => panic!("ptr_sized_integer: unknown pointer bit size {}", bits),
- }
- }
-
- #[inline]
- pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
- for &(size, align) in &self.vector_align {
- if size == vec_size {
- return align;
- }
- }
- // Default to natural alignment, which is what LLVM does.
- // That is, use the size, rounded up to a power of 2.
- AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
- }
-}
-
-pub trait HasDataLayout {
- fn data_layout(&self) -> &TargetDataLayout;
-}
-
-impl HasDataLayout for TargetDataLayout {
- #[inline]
- fn data_layout(&self) -> &TargetDataLayout {
- self
- }
-}
-
-/// Endianness of the target, which must match cfg(target-endian).
-#[derive(Copy, Clone, PartialEq)]
-pub enum Endian {
- Little,
- Big,
-}
-
-impl Endian {
- pub fn as_str(&self) -> &'static str {
- match self {
- Self::Little => "little",
- Self::Big => "big",
- }
- }
-}
-
-impl fmt::Debug for Endian {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.write_str(self.as_str())
- }
-}
-
-impl FromStr for Endian {
- type Err = String;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match s {
- "little" => Ok(Self::Little),
- "big" => Ok(Self::Big),
- _ => Err(format!(r#"unknown endian: "{}""#, s)),
- }
- }
-}
+pub use rustc_abi::*;
impl ToJson for Endian {
fn to_json(&self) -> Json {
@@ -287,1025 +19,15 @@ impl ToJson for Endian {
}
}
-/// Size of a type in bytes.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
-#[derive(HashStable_Generic)]
-pub struct Size {
- raw: u64,
-}
-
-// This is debug-printed a lot in larger structs, don't waste too much space there
-impl fmt::Debug for Size {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "Size({} bytes)", self.bytes())
- }
-}
-
-impl Size {
- pub const ZERO: Size = Size { raw: 0 };
-
- /// Rounds `bits` up to the next-higher byte boundary, if `bits` is
- /// not a multiple of 8.
- pub fn from_bits(bits: impl TryInto<u64>) -> Size {
- let bits = bits.try_into().ok().unwrap();
- // Avoid potential overflow from `bits + 7`.
- Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
- }
-
- #[inline]
- pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
- let bytes: u64 = bytes.try_into().ok().unwrap();
- Size { raw: bytes }
- }
-
- #[inline]
- pub fn bytes(self) -> u64 {
- self.raw
- }
-
- #[inline]
- pub fn bytes_usize(self) -> usize {
- self.bytes().try_into().unwrap()
- }
-
- #[inline]
- pub fn bits(self) -> u64 {
- #[cold]
- fn overflow(bytes: u64) -> ! {
- panic!("Size::bits: {} bytes in bits doesn't fit in u64", bytes)
- }
-
- self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
- }
-
- #[inline]
- pub fn bits_usize(self) -> usize {
- self.bits().try_into().unwrap()
- }
-
- #[inline]
- pub fn align_to(self, align: Align) -> Size {
- let mask = align.bytes() - 1;
- Size::from_bytes((self.bytes() + mask) & !mask)
- }
-
- #[inline]
- pub fn is_aligned(self, align: Align) -> bool {
- let mask = align.bytes() - 1;
- self.bytes() & mask == 0
- }
-
- #[inline]
- pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
- let dl = cx.data_layout();
-
- let bytes = self.bytes().checked_add(offset.bytes())?;
-
- if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
- }
-
- #[inline]
- pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
- let dl = cx.data_layout();
-
- let bytes = self.bytes().checked_mul(count)?;
- if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
- }
-
- /// Truncates `value` to `self` bits and then sign-extends it to 128 bits
- /// (i.e., if it is negative, fill with 1's on the left).
- #[inline]
- pub fn sign_extend(self, value: u128) -> u128 {
- let size = self.bits();
- if size == 0 {
- // Truncated until nothing is left.
- return 0;
- }
- // Sign-extend it.
- let shift = 128 - size;
- // Shift the unsigned value to the left, then shift back to the right as signed
- // (essentially fills with sign bit on the left).
- (((value << shift) as i128) >> shift) as u128
- }
-
- /// Truncates `value` to `self` bits.
- #[inline]
- pub fn truncate(self, value: u128) -> u128 {
- let size = self.bits();
- if size == 0 {
- // Truncated until nothing is left.
- return 0;
- }
- let shift = 128 - size;
- // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
- (value << shift) >> shift
- }
-
- #[inline]
- pub fn signed_int_min(&self) -> i128 {
- self.sign_extend(1_u128 << (self.bits() - 1)) as i128
- }
-
- #[inline]
- pub fn signed_int_max(&self) -> i128 {
- i128::MAX >> (128 - self.bits())
- }
-
- #[inline]
- pub fn unsigned_int_max(&self) -> u128 {
- u128::MAX >> (128 - self.bits())
- }
-}
-
-// Panicking addition, subtraction and multiplication for convenience.
-// Avoid during layout computation, return `LayoutError` instead.
-
-impl Add for Size {
- type Output = Size;
- #[inline]
- fn add(self, other: Size) -> Size {
- Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
- panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
- }))
- }
-}
-
-impl Sub for Size {
- type Output = Size;
- #[inline]
- fn sub(self, other: Size) -> Size {
- Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
- panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
- }))
- }
-}
-
-impl Mul<Size> for u64 {
- type Output = Size;
- #[inline]
- fn mul(self, size: Size) -> Size {
- size * self
- }
-}
-
-impl Mul<u64> for Size {
- type Output = Size;
- #[inline]
- fn mul(self, count: u64) -> Size {
- match self.bytes().checked_mul(count) {
- Some(bytes) => Size::from_bytes(bytes),
- None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
- }
- }
-}
-
-impl AddAssign for Size {
- #[inline]
- fn add_assign(&mut self, other: Size) {
- *self = *self + other;
- }
-}
-
-impl Step for Size {
- #[inline]
- fn steps_between(start: &Self, end: &Self) -> Option<usize> {
- u64::steps_between(&start.bytes(), &end.bytes())
- }
-
- #[inline]
- fn forward_checked(start: Self, count: usize) -> Option<Self> {
- u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
- }
-
- #[inline]
- fn forward(start: Self, count: usize) -> Self {
- Self::from_bytes(u64::forward(start.bytes(), count))
- }
-
- #[inline]
- unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
- Self::from_bytes(u64::forward_unchecked(start.bytes(), count))
- }
-
- #[inline]
- fn backward_checked(start: Self, count: usize) -> Option<Self> {
- u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
- }
-
- #[inline]
- fn backward(start: Self, count: usize) -> Self {
- Self::from_bytes(u64::backward(start.bytes(), count))
- }
-
- #[inline]
- unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
- Self::from_bytes(u64::backward_unchecked(start.bytes(), count))
- }
-}
-
-/// Alignment of a type in bytes (always a power of two).
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
-#[derive(HashStable_Generic)]
-pub struct Align {
- pow2: u8,
-}
-
-// This is debug-printed a lot in larger structs, don't waste too much space there
-impl fmt::Debug for Align {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "Align({} bytes)", self.bytes())
- }
-}
-
-impl Align {
- pub const ONE: Align = Align { pow2: 0 };
- pub const MAX: Align = Align { pow2: 29 };
-
- #[inline]
- pub fn from_bits(bits: u64) -> Result<Align, String> {
- Align::from_bytes(Size::from_bits(bits).bytes())
- }
-
- #[inline]
- pub fn from_bytes(align: u64) -> Result<Align, String> {
- // Treat an alignment of 0 bytes like 1-byte alignment.
- if align == 0 {
- return Ok(Align::ONE);
- }
-
- #[cold]
- fn not_power_of_2(align: u64) -> String {
- format!("`{}` is not a power of 2", align)
- }
-
- #[cold]
- fn too_large(align: u64) -> String {
- format!("`{}` is too large", align)
- }
-
- let mut bytes = align;
- let mut pow2: u8 = 0;
- while (bytes & 1) == 0 {
- pow2 += 1;
- bytes >>= 1;
- }
- if bytes != 1 {
- return Err(not_power_of_2(align));
- }
- if pow2 > Self::MAX.pow2 {
- return Err(too_large(align));
- }
-
- Ok(Align { pow2 })
- }
-
- #[inline]
- pub fn bytes(self) -> u64 {
- 1 << self.pow2
- }
-
- #[inline]
- pub fn bits(self) -> u64 {
- self.bytes() * 8
- }
-
- /// Computes the best alignment possible for the given offset
- /// (the largest power of two that the offset is a multiple of).
- ///
- /// N.B., for an offset of `0`, this happens to return `2^64`.
- #[inline]
- pub fn max_for_offset(offset: Size) -> Align {
- Align { pow2: offset.bytes().trailing_zeros() as u8 }
- }
-
- /// Lower the alignment, if necessary, such that the given offset
- /// is aligned to it (the offset is a multiple of the alignment).
- #[inline]
- pub fn restrict_for_offset(self, offset: Size) -> Align {
- self.min(Align::max_for_offset(offset))
- }
-}
-
-/// A pair of alignments, ABI-mandated and preferred.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
-#[derive(HashStable_Generic)]
-pub struct AbiAndPrefAlign {
- pub abi: Align,
- pub pref: Align,
-}
-
-impl AbiAndPrefAlign {
- #[inline]
- pub fn new(align: Align) -> AbiAndPrefAlign {
- AbiAndPrefAlign { abi: align, pref: align }
- }
-
- #[inline]
- pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
- AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
- }
-
- #[inline]
- pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
- AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
- }
-}
-
-/// Integers, also used for enum discriminants.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, HashStable_Generic)]
-pub enum Integer {
- I8,
- I16,
- I32,
- I64,
- I128,
-}
-
-impl Integer {
- #[inline]
- pub fn size(self) -> Size {
- match self {
- I8 => Size::from_bytes(1),
- I16 => Size::from_bytes(2),
- I32 => Size::from_bytes(4),
- I64 => Size::from_bytes(8),
- I128 => Size::from_bytes(16),
- }
- }
-
- pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
- let dl = cx.data_layout();
-
- match self {
- I8 => dl.i8_align,
- I16 => dl.i16_align,
- I32 => dl.i32_align,
- I64 => dl.i64_align,
- I128 => dl.i128_align,
- }
- }
-
- /// Finds the smallest Integer type which can represent the signed value.
- #[inline]
- pub fn fit_signed(x: i128) -> Integer {
- match x {
- -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
- -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
- -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
- -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
- _ => I128,
- }
- }
-
- /// Finds the smallest Integer type which can represent the unsigned value.
- #[inline]
- pub fn fit_unsigned(x: u128) -> Integer {
- match x {
- 0..=0x0000_0000_0000_00ff => I8,
- 0..=0x0000_0000_0000_ffff => I16,
- 0..=0x0000_0000_ffff_ffff => I32,
- 0..=0xffff_ffff_ffff_ffff => I64,
- _ => I128,
- }
- }
-
- /// Finds the smallest integer with the given alignment.
- pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
- let dl = cx.data_layout();
-
- for candidate in [I8, I16, I32, I64, I128] {
- if wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes() {
- return Some(candidate);
- }
- }
- None
- }
-
- /// Find the largest integer with the given alignment or less.
- pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
- let dl = cx.data_layout();
-
- // FIXME(eddyb) maybe include I128 in the future, when it works everywhere.
- for candidate in [I64, I32, I16] {
- if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
- return candidate;
- }
- }
- I8
- }
-
- // FIXME(eddyb) consolidate this and other methods that find the appropriate
- // `Integer` given some requirements.
- #[inline]
- fn from_size(size: Size) -> Result<Self, String> {
- match size.bits() {
- 8 => Ok(Integer::I8),
- 16 => Ok(Integer::I16),
- 32 => Ok(Integer::I32),
- 64 => Ok(Integer::I64),
- 128 => Ok(Integer::I128),
- _ => Err(format!("rust does not support integers with {} bits", size.bits())),
- }
- }
-}
-
-/// Fundamental unit of memory access and layout.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
-pub enum Primitive {
- /// The `bool` is the signedness of the `Integer` type.
- ///
- /// One would think we would not care about such details this low down,
- /// but some ABIs are described in terms of C types and ISAs where the
- /// integer arithmetic is done on {sign,zero}-extended registers, e.g.
- /// a negative integer passed by zero-extension will appear positive in
- /// the callee, and most operations on it will produce the wrong values.
- Int(Integer, bool),
- F32,
- F64,
- Pointer,
-}
-
-impl Primitive {
- pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
- let dl = cx.data_layout();
-
- match self {
- Int(i, _) => i.size(),
- F32 => Size::from_bits(32),
- F64 => Size::from_bits(64),
- Pointer => dl.pointer_size,
- }
- }
-
- pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
- let dl = cx.data_layout();
-
- match self {
- Int(i, _) => i.align(dl),
- F32 => dl.f32_align,
- F64 => dl.f64_align,
- Pointer => dl.pointer_align,
- }
- }
-
- // FIXME(eddyb) remove, it's trivial thanks to `matches!`.
- #[inline]
- pub fn is_float(self) -> bool {
- matches!(self, F32 | F64)
- }
-
- // FIXME(eddyb) remove, it's completely unused.
- #[inline]
- pub fn is_int(self) -> bool {
- matches!(self, Int(..))
- }
-
- #[inline]
- pub fn is_ptr(self) -> bool {
- matches!(self, Pointer)
- }
-}
-
-/// Inclusive wrap-around range of valid values, that is, if
-/// start > end, it represents `start..=MAX`,
-/// followed by `0..=end`.
-///
-/// That is, for an i8 primitive, a range of `254..=2` means following
-/// sequence:
-///
-/// 254 (-2), 255 (-1), 0, 1, 2
-///
-/// This is intended specifically to mirror LLVM’s `!range` metadata semantics.
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
-#[derive(HashStable_Generic)]
-pub struct WrappingRange {
- pub start: u128,
- pub end: u128,
-}
-
-impl WrappingRange {
- pub fn full(size: Size) -> Self {
- Self { start: 0, end: size.unsigned_int_max() }
- }
-
- /// Returns `true` if `v` is contained in the range.
- #[inline(always)]
- pub fn contains(&self, v: u128) -> bool {
- if self.start <= self.end {
- self.start <= v && v <= self.end
- } else {
- self.start <= v || v <= self.end
- }
- }
-
- /// Returns `self` with replaced `start`
- #[inline(always)]
- pub fn with_start(mut self, start: u128) -> Self {
- self.start = start;
- self
- }
-
- /// Returns `self` with replaced `end`
- #[inline(always)]
- pub fn with_end(mut self, end: u128) -> Self {
- self.end = end;
- self
- }
-
- /// Returns `true` if `size` completely fills the range.
- #[inline]
- pub fn is_full_for(&self, size: Size) -> bool {
- let max_value = size.unsigned_int_max();
- debug_assert!(self.start <= max_value && self.end <= max_value);
- self.start == (self.end.wrapping_add(1) & max_value)
- }
-}
-
-impl fmt::Debug for WrappingRange {
- fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
- if self.start > self.end {
- write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
- } else {
- write!(fmt, "{}..={}", self.start, self.end)?;
- }
- Ok(())
- }
-}
-
-/// Information about one scalar component of a Rust type.
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
-#[derive(HashStable_Generic)]
-pub enum Scalar {
- Initialized {
- value: Primitive,
-
- // FIXME(eddyb) always use the shortest range, e.g., by finding
- // the largest space between two consecutive valid values and
- // taking everything else as the (shortest) valid range.
- valid_range: WrappingRange,
- },
- Union {
- /// Even for unions, we need to use the correct registers for the kind of
- /// values inside the union, so we keep the `Primitive` type around. We
- /// also use it to compute the size of the scalar.
- /// However, unions never have niches and even allow undef,
- /// so there is no `valid_range`.
- value: Primitive,
- },
-}
-
-impl Scalar {
- #[inline]
- pub fn is_bool(&self) -> bool {
- matches!(
- self,
- Scalar::Initialized {
- value: Int(I8, false),
- valid_range: WrappingRange { start: 0, end: 1 }
- }
- )
- }
-
- /// Get the primitive representation of this type, ignoring the valid range and whether the
- /// value is allowed to be undefined (due to being a union).
- pub fn primitive(&self) -> Primitive {
- match *self {
- Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
- }
- }
-
- pub fn align(self, cx: &impl HasDataLayout) -> AbiAndPrefAlign {
- self.primitive().align(cx)
- }
-
- pub fn size(self, cx: &impl HasDataLayout) -> Size {
- self.primitive().size(cx)
- }
-
- #[inline]
- pub fn to_union(&self) -> Self {
- Self::Union { value: self.primitive() }
- }
-
- #[inline]
- pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
- match *self {
- Scalar::Initialized { valid_range, .. } => valid_range,
- Scalar::Union { value } => WrappingRange::full(value.size(cx)),
- }
- }
-
- #[inline]
- /// Allows the caller to mutate the valid range. This operation will panic if attempted on a union.
- pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
- match self {
- Scalar::Initialized { valid_range, .. } => valid_range,
- Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
- }
- }
-
- /// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole layout
- #[inline]
- pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
- match *self {
- Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
- Scalar::Union { .. } => true,
- }
- }
-
- /// Returns `true` if this type can be left uninit.
- #[inline]
- pub fn is_uninit_valid(&self) -> bool {
- match *self {
- Scalar::Initialized { .. } => false,
- Scalar::Union { .. } => true,
- }
- }
-}
-
-/// Describes how the fields of a type are located in memory.
-#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
-pub enum FieldsShape {
- /// Scalar primitives and `!`, which never have fields.
- Primitive,
-
- /// All fields start at no offset. The `usize` is the field count.
- Union(NonZeroUsize),
-
- /// Array/vector-like placement, with all fields of identical types.
- Array { stride: Size, count: u64 },
-
- /// Struct-like placement, with precomputed offsets.
- ///
- /// Fields are guaranteed to not overlap, but note that gaps
- /// before, between and after all the fields are NOT always
- /// padding, and as such their contents may not be discarded.
- /// For example, enum variants leave a gap at the start,
- /// where the discriminant field in the enum layout goes.
- Arbitrary {
- /// Offsets for the first byte of each field,
- /// ordered to match the source definition order.
- /// This vector does not go in increasing order.
- // FIXME(eddyb) use small vector optimization for the common case.
- offsets: Vec<Size>,
-
- /// Maps source order field indices to memory order indices,
- /// depending on how the fields were reordered (if at all).
- /// This is a permutation, with both the source order and the
- /// memory order using the same (0..n) index ranges.
- ///
- /// Note that during computation of `memory_index`, sometimes
- /// it is easier to operate on the inverse mapping (that is,
- /// from memory order to source order), and that is usually
- /// named `inverse_memory_index`.
- ///
- // FIXME(eddyb) build a better abstraction for permutations, if possible.
- // FIXME(camlorn) also consider small vector optimization here.
- memory_index: Vec<u32>,
- },
-}
-
-impl FieldsShape {
- #[inline]
- pub fn count(&self) -> usize {
- match *self {
- FieldsShape::Primitive => 0,
- FieldsShape::Union(count) => count.get(),
- FieldsShape::Array { count, .. } => count.try_into().unwrap(),
- FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
- }
- }
-
- #[inline]
- pub fn offset(&self, i: usize) -> Size {
- match *self {
- FieldsShape::Primitive => {
- unreachable!("FieldsShape::offset: `Primitive`s have no fields")
- }
- FieldsShape::Union(count) => {
- assert!(
- i < count.get(),
- "tried to access field {} of union with {} fields",
- i,
- count
- );
- Size::ZERO
- }
- FieldsShape::Array { stride, count } => {
- let i = u64::try_from(i).unwrap();
- assert!(i < count);
- stride * i
- }
- FieldsShape::Arbitrary { ref offsets, .. } => offsets[i],
- }
- }
-
- #[inline]
- pub fn memory_index(&self, i: usize) -> usize {
- match *self {
- FieldsShape::Primitive => {
- unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
- }
- FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
- FieldsShape::Arbitrary { ref memory_index, .. } => memory_index[i].try_into().unwrap(),
- }
- }
-
- /// Gets source indices of the fields by increasing offsets.
- #[inline]
- pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
- let mut inverse_small = [0u8; 64];
- let mut inverse_big = vec![];
- let use_small = self.count() <= inverse_small.len();
-
- // We have to write this logic twice in order to keep the array small.
- if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
- if use_small {
- for i in 0..self.count() {
- inverse_small[memory_index[i] as usize] = i as u8;
- }
- } else {
- inverse_big = vec![0; self.count()];
- for i in 0..self.count() {
- inverse_big[memory_index[i] as usize] = i as u32;
- }
- }
- }
-
- (0..self.count()).map(move |i| match *self {
- FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
- FieldsShape::Arbitrary { .. } => {
- if use_small {
- inverse_small[i] as usize
- } else {
- inverse_big[i] as usize
- }
- }
- })
- }
-}
-
-/// An identifier that specifies the address space that some operation
-/// should operate on. Special address spaces have an effect on code generation,
-/// depending on the target and the address spaces it implements.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
-pub struct AddressSpace(pub u32);
-
-impl AddressSpace {
- /// The default address space, corresponding to data space.
- pub const DATA: Self = AddressSpace(0);
-}
-
-/// Describes how values of the type are passed by target ABIs,
-/// in terms of categories of C types there are ABI rules for.
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
-pub enum Abi {
- Uninhabited,
- Scalar(Scalar),
- ScalarPair(Scalar, Scalar),
- Vector {
- element: Scalar,
- count: u64,
- },
- Aggregate {
- /// If true, the size is exact, otherwise it's only a lower bound.
- sized: bool,
- },
-}
-
-impl Abi {
- /// Returns `true` if the layout corresponds to an unsized type.
- #[inline]
- pub fn is_unsized(&self) -> bool {
- match *self {
- Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
- Abi::Aggregate { sized } => !sized,
- }
- }
-
- /// Returns `true` if this is a single signed integer scalar
- #[inline]
- pub fn is_signed(&self) -> bool {
- match self {
- Abi::Scalar(scal) => match scal.primitive() {
- Primitive::Int(_, signed) => signed,
- _ => false,
- },
- _ => panic!("`is_signed` on non-scalar ABI {:?}", self),
- }
- }
-
- /// Returns `true` if this is an uninhabited type
- #[inline]
- pub fn is_uninhabited(&self) -> bool {
- matches!(*self, Abi::Uninhabited)
- }
-
- /// Returns `true` is this is a scalar type
- #[inline]
- pub fn is_scalar(&self) -> bool {
- matches!(*self, Abi::Scalar(_))
- }
-}
-
rustc_index::newtype_index! {
pub struct VariantIdx {
derive [HashStable_Generic]
}
}
-#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
-pub enum Variants<'a> {
- /// Single enum variants, structs/tuples, unions, and all non-ADTs.
- Single { index: VariantIdx },
-
- /// Enum-likes with more than one inhabited variant: each variant comes with
- /// a *discriminant* (usually the same as the variant index but the user can
- /// assign explicit discriminant values). That discriminant is encoded
- /// as a *tag* on the machine. The layout of each variant is
- /// a struct, and they all have space reserved for the tag.
- /// For enums, the tag is the sole field of the layout.
- Multiple {
- tag: Scalar,
- tag_encoding: TagEncoding,
- tag_field: usize,
- variants: IndexVec<VariantIdx, Layout<'a>>,
- },
-}
-
-#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
-pub enum TagEncoding {
- /// The tag directly stores the discriminant, but possibly with a smaller layout
- /// (so converting the tag to the discriminant can require sign extension).
- Direct,
-
- /// Niche (values invalid for a type) encoding the discriminant:
- /// Discriminant and variant index coincide.
- /// The variant `untagged_variant` contains a niche at an arbitrary
- /// offset (field `tag_field` of the enum), which for a variant with
- /// discriminant `d` is set to
- /// `(d - niche_variants.start).wrapping_add(niche_start)`.
- ///
- /// For example, `Option<(usize, &T)>` is represented such that
- /// `None` has a null pointer for the second tuple field, and
- /// `Some` is the identity function (with a non-null reference).
- Niche {
- untagged_variant: VariantIdx,
- niche_variants: RangeInclusive<VariantIdx>,
- niche_start: u128,
- },
-}
-
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
-pub struct Niche {
- pub offset: Size,
- pub value: Primitive,
- pub valid_range: WrappingRange,
-}
-
-impl Niche {
- pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
- let Scalar::Initialized { value, valid_range } = scalar else { return None };
- let niche = Niche { offset, value, valid_range };
- if niche.available(cx) > 0 { Some(niche) } else { None }
- }
-
- pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
- let Self { value, valid_range: v, .. } = *self;
- let size = value.size(cx);
- assert!(size.bits() <= 128);
- let max_value = size.unsigned_int_max();
-
- // Find out how many values are outside the valid range.
- let niche = v.end.wrapping_add(1)..v.start;
- niche.end.wrapping_sub(niche.start) & max_value
- }
-
- pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
- assert!(count > 0);
-
- let Self { value, valid_range: v, .. } = *self;
- let size = value.size(cx);
- assert!(size.bits() <= 128);
- let max_value = size.unsigned_int_max();
-
- let niche = v.end.wrapping_add(1)..v.start;
- let available = niche.end.wrapping_sub(niche.start) & max_value;
- if count > available {
- return None;
- }
-
- // Extend the range of valid values being reserved by moving either `v.start` or `v.end` bound.
- // Given an eventual `Option<T>`, we try to maximize the chance for `None` to occupy the niche of zero.
- // This is accomplished by preferring enums with 2 variants(`count==1`) and always taking the shortest path to niche zero.
- // Having `None` in niche zero can enable some special optimizations.
- //
- // Bound selection criteria:
- // 1. Select closest to zero given wrapping semantics.
- // 2. Avoid moving past zero if possible.
- //
- // In practice this means that enums with `count > 1` are unlikely to claim niche zero, since they have to fit perfectly.
- // If niche zero is already reserved, the selection of bounds are of little interest.
- let move_start = |v: WrappingRange| {
- let start = v.start.wrapping_sub(count) & max_value;
- Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
- };
- let move_end = |v: WrappingRange| {
- let start = v.end.wrapping_add(1) & max_value;
- let end = v.end.wrapping_add(count) & max_value;
- Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
- };
- let distance_end_zero = max_value - v.end;
- if v.start > v.end {
- // zero is unavailable because wrapping occurs
- move_end(v)
- } else if v.start <= distance_end_zero {
- if count <= v.start {
- move_start(v)
- } else {
- // moved past zero, use other bound
- move_end(v)
- }
- } else {
- let end = v.end.wrapping_add(count) & max_value;
- let overshot_zero = (1..=v.end).contains(&end);
- if overshot_zero {
- // moved past zero, use other bound
- move_start(v)
- } else {
- move_end(v)
- }
- }
- }
-}
-
-#[derive(PartialEq, Eq, Hash, HashStable_Generic)]
-pub struct LayoutS<'a> {
- /// Says where the fields are located within the layout.
- pub fields: FieldsShape,
-
- /// Encodes information about multi-variant layouts.
- /// Even with `Multiple` variants, a layout still has its own fields! Those are then
- /// shared between all variants. One of them will be the discriminant,
- /// but e.g. generators can have more.
- ///
- /// To access all fields of this layout, both `fields` and the fields of the active variant
- /// must be taken into account.
- pub variants: Variants<'a>,
-
- /// The `abi` defines how this data is passed between functions, and it defines
- /// value restrictions via `valid_range`.
- ///
- /// Note that this is entirely orthogonal to the recursive structure defined by
- /// `variants` and `fields`; for example, `ManuallyDrop<Result<isize, isize>>` has
- /// `Abi::ScalarPair`! So, even with non-`Aggregate` `abi`, `fields` and `variants`
- /// have to be taken into account to find all fields of this layout.
- pub abi: Abi,
-
- /// The leaf scalar with the largest number of invalid values
- /// (i.e. outside of its `valid_range`), if it exists.
- pub largest_niche: Option<Niche>,
-
- pub align: AbiAndPrefAlign,
- pub size: Size,
-}
-
-impl<'a> LayoutS<'a> {
- pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
- let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar);
- let size = scalar.size(cx);
- let align = scalar.align(cx);
- LayoutS {
- variants: Variants::Single { index: VariantIdx::new(0) },
- fields: FieldsShape::Primitive,
- abi: Abi::Scalar(scalar),
- largest_niche,
- size,
- align,
- }
- }
-}
-
-impl<'a> fmt::Debug for LayoutS<'a> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- // This is how `Layout` used to print before it become
- // `Interned<LayoutS>`. We print it like this to avoid having to update
- // expected output in a lot of tests.
- let LayoutS { size, align, abi, fields, largest_niche, variants } = self;
- f.debug_struct("Layout")
- .field("size", size)
- .field("align", align)
- .field("abi", abi)
- .field("fields", fields)
- .field("largest_niche", largest_niche)
- .field("variants", variants)
- .finish()
- }
-}
-
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)]
#[rustc_pass_by_value]
-pub struct Layout<'a>(pub Interned<'a, LayoutS<'a>>);
+pub struct Layout<'a>(pub Interned<'a, LayoutS<VariantIdx>>);
impl<'a> fmt::Debug for Layout<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -1319,7 +41,7 @@ impl<'a> Layout<'a> {
&self.0.0.fields
}
- pub fn variants(self) -> &'a Variants<'a> {
+ pub fn variants(self) -> &'a Variants<VariantIdx> {
&self.0.0.variants
}
@@ -1354,47 +76,12 @@ pub struct TyAndLayout<'a, Ty> {
}
impl<'a, Ty> Deref for TyAndLayout<'a, Ty> {
- type Target = &'a LayoutS<'a>;
- fn deref(&self) -> &&'a LayoutS<'a> {
+ type Target = &'a LayoutS<VariantIdx>;
+ fn deref(&self) -> &&'a LayoutS<VariantIdx> {
&self.layout.0.0
}
}
-#[derive(Copy, Clone, PartialEq, Eq, Debug)]
-pub enum PointerKind {
- /// Most general case, we know no restrictions to tell LLVM.
- SharedMutable,
-
- /// `&T` where `T` contains no `UnsafeCell`, is `dereferenceable`, `noalias` and `readonly`.
- Frozen,
-
- /// `&mut T` which is `dereferenceable` and `noalias` but not `readonly`.
- UniqueBorrowed,
-
- /// `&mut !Unpin`, which is `dereferenceable` but neither `noalias` nor `readonly`.
- UniqueBorrowedPinned,
-
- /// `Box<T>`, which is `noalias` (even on return types, unlike the above) but neither `readonly`
- /// nor `dereferenceable`.
- UniqueOwned,
-}
-
-#[derive(Copy, Clone, Debug)]
-pub struct PointeeInfo {
- pub size: Size,
- pub align: Align,
- pub safe: Option<PointerKind>,
- pub address_space: AddressSpace,
-}
-
-/// Used in `might_permit_raw_init` to indicate the kind of initialisation
-/// that is checked to be valid
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum InitKind {
- Zero,
- UninitMitigated0x01Fill,
-}
-
/// Trait that needs to be implemented by the higher-level type representation
/// (e.g. `rustc_middle::ty::Ty`), to provide `rustc_target::abi` functionality.
pub trait TyAbiInterface<'a, C>: Sized {
@@ -1490,6 +177,11 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
self.abi.is_unsized()
}
+ #[inline]
+ pub fn is_sized(&self) -> bool {
+ self.abi.is_sized()
+ }
+
/// Returns `true` if the type is a ZST and not unsized.
pub fn is_zst(&self) -> bool {
match self.abi {
diff --git a/compiler/rustc_target/src/json.rs b/compiler/rustc_target/src/json.rs
index b5d926352..75bb76a9d 100644
--- a/compiler/rustc_target/src/json.rs
+++ b/compiler/rustc_target/src/json.rs
@@ -89,3 +89,28 @@ impl<A: ToJson> ToJson for Option<A> {
}
}
}
+
+impl ToJson for crate::abi::call::Conv {
+ fn to_json(&self) -> Json {
+ let s = match self {
+ Self::C => "C",
+ Self::Rust => "Rust",
+ Self::RustCold => "RustCold",
+ Self::ArmAapcs => "ArmAapcs",
+ Self::CCmseNonSecureCall => "CCmseNonSecureCall",
+ Self::Msp430Intr => "Msp430Intr",
+ Self::PtxKernel => "PtxKernel",
+ Self::X86Fastcall => "X86Fastcall",
+ Self::X86Intr => "X86Intr",
+ Self::X86Stdcall => "X86Stdcall",
+ Self::X86ThisCall => "X86ThisCall",
+ Self::X86VectorCall => "X86VectorCall",
+ Self::X86_64SysV => "X86_64SysV",
+ Self::X86_64Win64 => "X86_64Win64",
+ Self::AmdGpuKernel => "AmdGpuKernel",
+ Self::AvrInterrupt => "AvrInterrupt",
+ Self::AvrNonBlockingInterrupt => "AvrNonBlockingInterrupt",
+ };
+ Json::String(s.to_owned())
+ }
+}
diff --git a/compiler/rustc_target/src/lib.rs b/compiler/rustc_target/src/lib.rs
index aaba0d7f0..b69a0a645 100644
--- a/compiler/rustc_target/src/lib.rs
+++ b/compiler/rustc_target/src/lib.rs
@@ -35,10 +35,7 @@ pub mod spec;
#[cfg(test)]
mod tests;
-/// Requirements for a `StableHashingContext` to be used in this crate.
-/// This is a hack to allow using the `HashStable_Generic` derive macro
-/// instead of implementing everything in `rustc_middle`.
-pub trait HashStableContext {}
+pub use rustc_abi::HashStableContext;
/// The name of rustc's own place to organize libraries.
///
diff --git a/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs b/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs
index 6d919a4c2..e72cab629 100644
--- a/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs
+++ b/compiler/rustc_target/src/spec/aarch64_apple_darwin.rs
@@ -1,26 +1,23 @@
+use super::apple_base::{macos_llvm_target, opts, Arch};
use crate::spec::{FramePointer, SanitizerSet, Target, TargetOptions};
pub fn target() -> Target {
- let arch = "arm64";
- let mut base = super::apple_base::opts("macos", arch, "");
+ let arch = Arch::Arm64;
+ let mut base = opts("macos", arch);
base.cpu = "apple-a14".into();
base.max_atomic_width = Some(128);
// FIXME: The leak sanitizer currently fails the tests, see #88132.
base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::THREAD;
- base.link_env_remove.to_mut().extend(super::apple_base::macos_link_env_remove());
-
- // Clang automatically chooses a more specific target based on
- // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
- // correctly, we do too.
- let llvm_target = super::apple_base::macos_llvm_target(arch);
-
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
+ // correctly, we do too.
+ llvm_target: macos_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(),
- arch: "aarch64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
mcount: "\u{1}mcount".into(),
frame_pointer: FramePointer::NonLeaf,
diff --git a/compiler/rustc_target/src/spec/aarch64_apple_ios.rs b/compiler/rustc_target/src/spec/aarch64_apple_ios.rs
index beb904239..b5f9eb125 100644
--- a/compiler/rustc_target/src/spec/aarch64_apple_ios.rs
+++ b/compiler/rustc_target/src/spec/aarch64_apple_ios.rs
@@ -1,19 +1,17 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{ios_llvm_target, opts, Arch};
use crate::spec::{FramePointer, Target, TargetOptions};
pub fn target() -> Target {
- // Clang automatically chooses a more specific target based on
- // IPHONEOS_DEPLOYMENT_TARGET.
- // This is required for the target to pick the right
- // MACH-O commands, so we do too.
- let arch = "arm64";
- let llvm_target = super::apple_base::ios_llvm_target(arch);
-
+ let arch = Arch::Arm64;
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // IPHONEOS_DEPLOYMENT_TARGET.
+ // This is required for the target to pick the right
+ // MACH-O commands, so we do too.
+ llvm_target: ios_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(),
- arch: "aarch64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+neon,+fp-armv8,+apple-a7".into(),
max_atomic_width: Some(128),
@@ -30,7 +28,7 @@ pub fn target() -> Target {
darwinpcs\0\
-Os\0"
.into(),
- ..opts("ios", Arch::Arm64)
+ ..opts("ios", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs b/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs
index 2d2671549..0009972cf 100644
--- a/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs
+++ b/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs
@@ -1,17 +1,18 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, Target, TargetOptions};
pub fn target() -> Target {
let llvm_target = "arm64-apple-ios14.0-macabi";
- let mut base = opts("ios", Arch::Arm64_macabi);
+ let arch = Arch::Arm64_macabi;
+ let mut base = opts("ios", arch);
base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-target", llvm_target]);
Target {
llvm_target: llvm_target.into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(),
- arch: "aarch64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+neon,+fp-armv8,+apple-a12".into(),
max_atomic_width: Some(128),
diff --git a/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs b/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs
index b4e135f66..3374755e2 100644
--- a/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs
+++ b/compiler/rustc_target/src/spec/aarch64_apple_ios_sim.rs
@@ -1,21 +1,17 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{ios_sim_llvm_target, opts, Arch};
use crate::spec::{FramePointer, Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("ios", Arch::Arm64_sim);
-
- // Clang automatically chooses a more specific target based on
- // IPHONEOS_DEPLOYMENT_TARGET.
- // This is required for the simulator target to pick the right
- // MACH-O commands, so we do too.
- let arch = "arm64";
- let llvm_target = super::apple_base::ios_sim_llvm_target(arch);
-
+ let arch = Arch::Arm64_sim;
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // IPHONEOS_DEPLOYMENT_TARGET.
+ // This is required for the simulator target to pick the right
+ // MACH-O commands, so we do too.
+ llvm_target: ios_sim_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(),
- arch: "aarch64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+neon,+fp-armv8,+apple-a7".into(),
max_atomic_width: Some(128),
@@ -32,7 +28,7 @@ pub fn target() -> Target {
darwinpcs\0\
-Os\0"
.into(),
- ..base
+ ..opts("ios", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs b/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs
index 2e31d16dc..bb7c39ff2 100644
--- a/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs
+++ b/compiler/rustc_target/src/spec/aarch64_apple_tvos.rs
@@ -1,18 +1,19 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{FramePointer, Target, TargetOptions};
pub fn target() -> Target {
+ let arch = Arch::Arm64;
Target {
llvm_target: "arm64-apple-tvos".into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(),
- arch: "aarch64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+neon,+fp-armv8,+apple-a7".into(),
max_atomic_width: Some(128),
forces_embed_bitcode: true,
frame_pointer: FramePointer::NonLeaf,
- ..opts("tvos", Arch::Arm64)
+ ..opts("tvos", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs b/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs
index 3059f4214..e4af4127c 100644
--- a/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs
+++ b/compiler/rustc_target/src/spec/aarch64_apple_watchos_sim.rs
@@ -1,21 +1,17 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, watchos_sim_llvm_target, Arch};
use crate::spec::{FramePointer, Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("watchos", Arch::Arm64_sim);
-
- // Clang automatically chooses a more specific target based on
- // WATCHOS_DEPLOYMENT_TARGET.
- // This is required for the simulator target to pick the right
- // MACH-O commands, so we do too.
- let arch = "arm64";
- let llvm_target = super::apple_base::watchos_sim_llvm_target(arch);
-
+ let arch = Arch::Arm64_sim;
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // WATCHOS_DEPLOYMENT_TARGET.
+ // This is required for the simulator target to pick the right
+ // MACH-O commands, so we do too.
+ llvm_target: watchos_sim_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".into(),
- arch: "aarch64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+neon,+fp-armv8,+apple-a7".into(),
max_atomic_width: Some(128),
@@ -32,7 +28,7 @@ pub fn target() -> Target {
darwinpcs\0\
-Os\0"
.into(),
- ..base
+ ..opts("watchos", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs b/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs
new file mode 100644
index 000000000..916b6137b
--- /dev/null
+++ b/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs
@@ -0,0 +1,28 @@
+use super::nto_qnx_base;
+use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions};
+
+pub fn target() -> Target {
+ Target {
+ llvm_target: "aarch64-unknown-unknown".into(),
+ pointer_width: 64,
+ // from: https://llvm.org/docs/LangRef.html#data-layout
+ // e = little endian
+ // m:e = ELF mangling: Private symbols get a .L prefix
+ // i8:8:32 = 8-bit-integer, minimum_alignment=8, preferred_alignment=32
+ // i16:16:32 = 16-bit-integer, minimum_alignment=16, preferred_alignment=32
+ // i64:64 = 64-bit-integer, minimum_alignment=64, preferred_alignment=64
+ // i128:128 = 128-bit-integer, minimum_alignment=128, preferred_alignment=128
+ // n32:64 = 32 and 64 are native integer widths; Elements of this set are considered to support most general arithmetic operations efficiently.
+ // S128 = 128 bits are the natural alignment of the stack in bits.
+ data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(),
+ arch: "aarch64".into(),
+ options: TargetOptions {
+ max_atomic_width: Some(128),
+ pre_link_args: TargetOptions::link_args(
+ LinkerFlavor::Gnu(Cc::Yes, Lld::No),
+ &["-Vgcc_ntoaarch64le_cxx"],
+ ),
+ ..nto_qnx_base::opts()
+ },
+ }
+}
diff --git a/compiler/rustc_target/src/spec/abi.rs b/compiler/rustc_target/src/spec/abi.rs
index ce45fa139..cb2a0c04c 100644
--- a/compiler/rustc_target/src/spec/abi.rs
+++ b/compiler/rustc_target/src/spec/abi.rs
@@ -40,6 +40,28 @@ pub enum Abi {
RustCold,
}
+impl Abi {
+ pub fn supports_varargs(self) -> bool {
+ // * C and Cdecl obviously support varargs.
+ // * C can be based on SysV64 or Win64, so they must support varargs.
+ // * EfiApi is based on Win64 or C, so it also supports it.
+ //
+ // * Stdcall does not, because it would be impossible for the callee to clean
+ // up the arguments. (callee doesn't know how many arguments are there)
+ // * Same for Fastcall, Vectorcall and Thiscall.
+ // * System can become Stdcall, so is also a no-no.
+ // * Other calling conventions are related to hardware or the compiler itself.
+ match self {
+ Self::C { .. }
+ | Self::Cdecl { .. }
+ | Self::Win64 { .. }
+ | Self::SysV64 { .. }
+ | Self::EfiApi => true,
+ _ => false,
+ }
+ }
+}
+
#[derive(Copy, Clone)]
pub struct AbiData {
abi: Abi,
diff --git a/compiler/rustc_target/src/spec/aix_base.rs b/compiler/rustc_target/src/spec/aix_base.rs
new file mode 100644
index 000000000..c71c4ba2c
--- /dev/null
+++ b/compiler/rustc_target/src/spec/aix_base.rs
@@ -0,0 +1,32 @@
+use crate::abi::Endian;
+use crate::spec::{crt_objects, cvs, Cc, CodeModel, LinkOutputKind, LinkerFlavor, TargetOptions};
+
+pub fn opts() -> TargetOptions {
+ TargetOptions {
+ abi: "vec-extabi".into(),
+ code_model: Some(CodeModel::Small),
+ cpu: "pwr7".into(),
+ os: "aix".into(),
+ vendor: "ibm".into(),
+ dynamic_linking: true,
+ endian: Endian::Big,
+ executables: true,
+ archive_format: "aix_big".into(),
+ families: cvs!["unix"],
+ has_rpath: false,
+ has_thread_local: true,
+ crt_static_respected: true,
+ linker_flavor: LinkerFlavor::Unix(Cc::No),
+ linker: Some("ld".into()),
+ eh_frame_header: false,
+ is_like_aix: true,
+ default_dwarf_version: 3,
+ function_sections: true,
+ pre_link_objects: crt_objects::new(&[
+ (LinkOutputKind::DynamicNoPicExe, &["/usr/lib/crt0_64.o", "/usr/lib/crti_64.o"]),
+ (LinkOutputKind::DynamicPicExe, &["/usr/lib/crt0_64.o", "/usr/lib/crti_64.o"]),
+ ]),
+ dll_suffix: ".a".into(),
+ ..Default::default()
+ }
+}
diff --git a/compiler/rustc_target/src/spec/apple/tests.rs b/compiler/rustc_target/src/spec/apple/tests.rs
new file mode 100644
index 000000000..3c90a5e7e
--- /dev/null
+++ b/compiler/rustc_target/src/spec/apple/tests.rs
@@ -0,0 +1,35 @@
+use crate::spec::{
+ aarch64_apple_darwin, aarch64_apple_ios_sim, aarch64_apple_watchos_sim, i686_apple_darwin,
+ x86_64_apple_darwin, x86_64_apple_ios, x86_64_apple_tvos, x86_64_apple_watchos_sim,
+};
+
+#[test]
+fn simulator_targets_set_abi() {
+ let all_sim_targets = [
+ x86_64_apple_ios::target(),
+ x86_64_apple_tvos::target(),
+ x86_64_apple_watchos_sim::target(),
+ aarch64_apple_ios_sim::target(),
+ // Note: There is currently no ARM64 tvOS simulator target
+ aarch64_apple_watchos_sim::target(),
+ ];
+
+ for target in all_sim_targets {
+ assert_eq!(target.abi, "sim")
+ }
+}
+
+#[test]
+fn macos_link_environment_unmodified() {
+ let all_macos_targets = [
+ aarch64_apple_darwin::target(),
+ i686_apple_darwin::target(),
+ x86_64_apple_darwin::target(),
+ ];
+
+ for target in all_macos_targets {
+ // macOS targets should only remove information for cross-compiling, but never
+ // for the host.
+ assert_eq!(target.link_env_remove, crate::spec::cvs!["IPHONEOS_DEPLOYMENT_TARGET"]);
+ }
+}
diff --git a/compiler/rustc_target/src/spec/apple_base.rs b/compiler/rustc_target/src/spec/apple_base.rs
index 40bc59ca1..7f8160b5d 100644
--- a/compiler/rustc_target/src/spec/apple_base.rs
+++ b/compiler/rustc_target/src/spec/apple_base.rs
@@ -3,7 +3,78 @@ use std::{borrow::Cow, env};
use crate::spec::{cvs, Cc, DebuginfoKind, FramePointer, LinkArgs};
use crate::spec::{LinkerFlavor, Lld, SplitDebuginfo, StaticCow, TargetOptions};
-fn pre_link_args(os: &'static str, arch: &'static str, abi: &'static str) -> LinkArgs {
+#[cfg(test)]
+#[path = "apple/tests.rs"]
+mod tests;
+
+use Arch::*;
+#[allow(non_camel_case_types)]
+#[derive(Copy, Clone)]
+pub enum Arch {
+ Armv7,
+ Armv7k,
+ Armv7s,
+ Arm64,
+ Arm64_32,
+ I386,
+ I686,
+ X86_64,
+ X86_64_sim,
+ X86_64_macabi,
+ Arm64_macabi,
+ Arm64_sim,
+}
+
+impl Arch {
+ pub fn target_name(self) -> &'static str {
+ match self {
+ Armv7 => "armv7",
+ Armv7k => "armv7k",
+ Armv7s => "armv7s",
+ Arm64 | Arm64_macabi | Arm64_sim => "arm64",
+ Arm64_32 => "arm64_32",
+ I386 => "i386",
+ I686 => "i686",
+ X86_64 | X86_64_sim | X86_64_macabi => "x86_64",
+ }
+ }
+
+ pub fn target_arch(self) -> Cow<'static, str> {
+ Cow::Borrowed(match self {
+ Armv7 | Armv7k | Armv7s => "arm",
+ Arm64 | Arm64_32 | Arm64_macabi | Arm64_sim => "aarch64",
+ I386 | I686 => "x86",
+ X86_64 | X86_64_sim | X86_64_macabi => "x86_64",
+ })
+ }
+
+ fn target_abi(self) -> &'static str {
+ match self {
+ Armv7 | Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | I686 | X86_64 => "",
+ X86_64_macabi | Arm64_macabi => "macabi",
+ // x86_64-apple-ios is a simulator target, even though it isn't
+ // declared that way in the target like the other ones...
+ Arm64_sim | X86_64_sim => "sim",
+ }
+ }
+
+ fn target_cpu(self) -> &'static str {
+ match self {
+ Armv7 => "cortex-a8", // iOS7 is supported on iPhone 4 and higher
+ Armv7k => "cortex-a8",
+ Armv7s => "cortex-a9",
+ Arm64 => "apple-a7",
+ Arm64_32 => "apple-s4",
+ I386 | I686 => "yonah",
+ X86_64 | X86_64_sim => "core2",
+ X86_64_macabi => "core2",
+ Arm64_macabi => "apple-a12",
+ Arm64_sim => "apple-a12",
+ }
+ }
+}
+
+fn pre_link_args(os: &'static str, arch: Arch, abi: &'static str) -> LinkArgs {
let platform_name: StaticCow<str> = match abi {
"sim" => format!("{}-simulator", os).into(),
"macabi" => "mac-catalyst".into(),
@@ -19,6 +90,8 @@ fn pre_link_args(os: &'static str, arch: &'static str, abi: &'static str) -> Lin
}
.into();
+ let arch = arch.target_name();
+
let mut args = TargetOptions::link_args(
LinkerFlavor::Darwin(Cc::No, Lld::No),
&["-arch", arch, "-platform_version"],
@@ -35,24 +108,29 @@ fn pre_link_args(os: &'static str, arch: &'static str, abi: &'static str) -> Lin
args
}
-pub fn opts(os: &'static str, arch: &'static str, abi: &'static str) -> TargetOptions {
- // ELF TLS is only available in macOS 10.7+. If you try to compile for 10.6
+pub fn opts(os: &'static str, arch: Arch) -> TargetOptions {
+ // Static TLS is only available in macOS 10.7+. If you try to compile for 10.6
// either the linker will complain if it is used or the binary will end up
// segfaulting at runtime when run on 10.6. Rust by default supports macOS
// 10.7+, but there is a standard environment variable,
// MACOSX_DEPLOYMENT_TARGET, which is used to signal targeting older
// versions of macOS. For example compiling on 10.10 with
// MACOSX_DEPLOYMENT_TARGET set to 10.6 will cause the linker to generate
- // warnings about the usage of ELF TLS.
+ // warnings about the usage of static TLS.
//
- // Here we detect what version is being requested, defaulting to 10.7. ELF
+ // Here we detect what version is being requested, defaulting to 10.7. Static
// TLS is flagged as enabled if it looks to be supported. The architecture
// only matters for default deployment target which is 11.0 for ARM64 and
// 10.7 for everything else.
- let has_thread_local = macos_deployment_target("x86_64") >= (10, 7);
+ let has_thread_local = os == "macos" && macos_deployment_target(Arch::X86_64) >= (10, 7);
+
+ let abi = arch.target_abi();
TargetOptions {
+ abi: abi.into(),
os: os.into(),
+ cpu: arch.target_cpu().into(),
+ link_env_remove: link_env_remove(arch, os),
vendor: "apple".into(),
linker_flavor: LinkerFlavor::Darwin(Cc::Yes, Lld::No),
// macOS has -dead_strip, which doesn't rely on function_sections
@@ -103,46 +181,65 @@ fn deployment_target(var_name: &str) -> Option<(u32, u32)> {
.and_then(|(a, b)| a.parse::<u32>().and_then(|a| b.parse::<u32>().map(|b| (a, b))).ok())
}
-fn macos_default_deployment_target(arch: &str) -> (u32, u32) {
- if arch == "arm64" { (11, 0) } else { (10, 7) }
+fn macos_default_deployment_target(arch: Arch) -> (u32, u32) {
+ // Note: Arm64_sim is not included since macOS has no simulator.
+ if matches!(arch, Arm64 | Arm64_macabi) { (11, 0) } else { (10, 7) }
}
-fn macos_deployment_target(arch: &str) -> (u32, u32) {
+fn macos_deployment_target(arch: Arch) -> (u32, u32) {
deployment_target("MACOSX_DEPLOYMENT_TARGET")
.unwrap_or_else(|| macos_default_deployment_target(arch))
}
-fn macos_lld_platform_version(arch: &str) -> String {
+fn macos_lld_platform_version(arch: Arch) -> String {
let (major, minor) = macos_deployment_target(arch);
format!("{}.{}", major, minor)
}
-pub fn macos_llvm_target(arch: &str) -> String {
+pub fn macos_llvm_target(arch: Arch) -> String {
let (major, minor) = macos_deployment_target(arch);
- format!("{}-apple-macosx{}.{}.0", arch, major, minor)
+ format!("{}-apple-macosx{}.{}.0", arch.target_name(), major, minor)
}
-pub fn macos_link_env_remove() -> Vec<StaticCow<str>> {
- let mut env_remove = Vec::with_capacity(2);
- // Remove the `SDKROOT` environment variable if it's clearly set for the wrong platform, which
- // may occur when we're linking a custom build script while targeting iOS for example.
- if let Ok(sdkroot) = env::var("SDKROOT") {
- if sdkroot.contains("iPhoneOS.platform") || sdkroot.contains("iPhoneSimulator.platform") {
- env_remove.push("SDKROOT".into())
+fn link_env_remove(arch: Arch, os: &'static str) -> StaticCow<[StaticCow<str>]> {
+ // Apple platforms only officially support macOS as a host for any compilation.
+ //
+ // If building for macOS, we go ahead and remove any erronous environment state
+ // that's only applicable to cross-OS compilation. Always leave anything for the
+ // host OS alone though.
+ if os == "macos" {
+ let mut env_remove = Vec::with_capacity(2);
+ // Remove the `SDKROOT` environment variable if it's clearly set for the wrong platform, which
+ // may occur when we're linking a custom build script while targeting iOS for example.
+ if let Ok(sdkroot) = env::var("SDKROOT") {
+ if sdkroot.contains("iPhoneOS.platform") || sdkroot.contains("iPhoneSimulator.platform")
+ {
+ env_remove.push("SDKROOT".into())
+ }
+ }
+ // Additionally, `IPHONEOS_DEPLOYMENT_TARGET` must not be set when using the Xcode linker at
+ // "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld",
+ // although this is apparently ignored when using the linker at "/usr/bin/ld".
+ env_remove.push("IPHONEOS_DEPLOYMENT_TARGET".into());
+ env_remove.into()
+ } else {
+ // Otherwise if cross-compiling for a different OS/SDK, remove any part
+ // of the linking environment that's wrong and reversed.
+ match arch {
+ Armv7 | Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | I686 | X86_64 | X86_64_sim
+ | Arm64_sim => {
+ cvs!["MACOSX_DEPLOYMENT_TARGET"]
+ }
+ X86_64_macabi | Arm64_macabi => cvs!["IPHONEOS_DEPLOYMENT_TARGET"],
}
}
- // Additionally, `IPHONEOS_DEPLOYMENT_TARGET` must not be set when using the Xcode linker at
- // "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin/ld",
- // although this is apparently ignored when using the linker at "/usr/bin/ld".
- env_remove.push("IPHONEOS_DEPLOYMENT_TARGET".into());
- env_remove
}
fn ios_deployment_target() -> (u32, u32) {
deployment_target("IPHONEOS_DEPLOYMENT_TARGET").unwrap_or((7, 0))
}
-pub fn ios_llvm_target(arch: &str) -> String {
+pub fn ios_llvm_target(arch: Arch) -> String {
// Modern iOS tooling extracts information about deployment target
// from LC_BUILD_VERSION. This load command will only be emitted when
// we build with a version specific `llvm_target`, with the version
@@ -150,7 +247,7 @@ pub fn ios_llvm_target(arch: &str) -> String {
// to pick it up (since std and core are still built with the fallback
// of version 7.0 and hence emit the old LC_IPHONE_MIN_VERSION).
let (major, minor) = ios_deployment_target();
- format!("{}-apple-ios{}.{}.0", arch, major, minor)
+ format!("{}-apple-ios{}.{}.0", arch.target_name(), major, minor)
}
fn ios_lld_platform_version() -> String {
@@ -158,9 +255,9 @@ fn ios_lld_platform_version() -> String {
format!("{}.{}", major, minor)
}
-pub fn ios_sim_llvm_target(arch: &str) -> String {
+pub fn ios_sim_llvm_target(arch: Arch) -> String {
let (major, minor) = ios_deployment_target();
- format!("{}-apple-ios{}.{}.0-simulator", arch, major, minor)
+ format!("{}-apple-ios{}.{}.0-simulator", arch.target_name(), major, minor)
}
fn tvos_deployment_target() -> (u32, u32) {
@@ -181,7 +278,7 @@ fn watchos_lld_platform_version() -> String {
format!("{}.{}", major, minor)
}
-pub fn watchos_sim_llvm_target(arch: &str) -> String {
+pub fn watchos_sim_llvm_target(arch: Arch) -> String {
let (major, minor) = watchos_deployment_target();
- format!("{}-apple-watchos{}.{}.0-simulator", arch, major, minor)
+ format!("{}-apple-watchos{}.{}.0-simulator", arch.target_name(), major, minor)
}
diff --git a/compiler/rustc_target/src/spec/apple_sdk_base.rs b/compiler/rustc_target/src/spec/apple_sdk_base.rs
deleted file mode 100644
index 49e302676..000000000
--- a/compiler/rustc_target/src/spec/apple_sdk_base.rs
+++ /dev/null
@@ -1,72 +0,0 @@
-use crate::spec::{cvs, TargetOptions};
-use std::borrow::Cow;
-
-use Arch::*;
-#[allow(non_camel_case_types)]
-#[derive(Copy, Clone)]
-pub enum Arch {
- Armv7,
- Armv7k,
- Armv7s,
- Arm64,
- Arm64_32,
- I386,
- X86_64,
- X86_64_macabi,
- Arm64_macabi,
- Arm64_sim,
-}
-
-fn target_arch_name(arch: Arch) -> &'static str {
- match arch {
- Armv7 => "armv7",
- Armv7k => "armv7k",
- Armv7s => "armv7s",
- Arm64 | Arm64_macabi | Arm64_sim => "arm64",
- Arm64_32 => "arm64_32",
- I386 => "i386",
- X86_64 | X86_64_macabi => "x86_64",
- }
-}
-
-fn target_abi(arch: Arch) -> &'static str {
- match arch {
- Armv7 | Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | X86_64 => "",
- X86_64_macabi | Arm64_macabi => "macabi",
- Arm64_sim => "sim",
- }
-}
-
-fn target_cpu(arch: Arch) -> &'static str {
- match arch {
- Armv7 => "cortex-a8", // iOS7 is supported on iPhone 4 and higher
- Armv7k => "cortex-a8",
- Armv7s => "cortex-a9",
- Arm64 => "apple-a7",
- Arm64_32 => "apple-s4",
- I386 => "yonah",
- X86_64 => "core2",
- X86_64_macabi => "core2",
- Arm64_macabi => "apple-a12",
- Arm64_sim => "apple-a12",
- }
-}
-
-fn link_env_remove(arch: Arch) -> Cow<'static, [Cow<'static, str>]> {
- match arch {
- Armv7 | Armv7k | Armv7s | Arm64 | Arm64_32 | I386 | X86_64 | Arm64_sim => {
- cvs!["MACOSX_DEPLOYMENT_TARGET"]
- }
- X86_64_macabi | Arm64_macabi => cvs!["IPHONEOS_DEPLOYMENT_TARGET"],
- }
-}
-
-pub fn opts(os: &'static str, arch: Arch) -> TargetOptions {
- TargetOptions {
- abi: target_abi(arch).into(),
- cpu: target_cpu(arch).into(),
- link_env_remove: link_env_remove(arch),
- has_thread_local: false,
- ..super::apple_base::opts(os, target_arch_name(arch), target_abi(arch))
- }
-}
diff --git a/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs b/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs
index cb7f5f2a5..52ee68e75 100644
--- a/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs
+++ b/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs
@@ -1,4 +1,4 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target {
@@ -12,6 +12,8 @@ pub fn target() -> Target {
features: "+neon,+fp-armv8,+apple-a7".into(),
max_atomic_width: Some(128),
forces_embed_bitcode: true,
+ dynamic_linking: false,
+ position_independent_executables: true,
// These arguments are not actually invoked - they just have
// to look right to pass App Store validation.
bitcode_llvm_cmdline: "-triple\0\
diff --git a/compiler/rustc_target/src/spec/armv7_apple_ios.rs b/compiler/rustc_target/src/spec/armv7_apple_ios.rs
index 57fd74a36..3259c8547 100644
--- a/compiler/rustc_target/src/spec/armv7_apple_ios.rs
+++ b/compiler/rustc_target/src/spec/armv7_apple_ios.rs
@@ -1,18 +1,21 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{ios_llvm_target, opts, Arch};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target {
- let llvm_target = super::apple_base::ios_llvm_target("armv7");
-
+ let arch = Arch::Armv7;
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // IPHONEOS_DEPLOYMENT_TARGET.
+ // This is required for the target to pick the right
+ // MACH-O commands, so we do too.
+ llvm_target: ios_llvm_target(arch).into(),
pointer_width: 32,
data_layout: "e-m:o-p:32:32-Fi8-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".into(),
- arch: "arm".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+v7,+vfp3,+neon".into(),
max_atomic_width: Some(64),
- ..opts("ios", Arch::Armv7)
+ ..opts("ios", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs b/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs
index af5d1c2ff..6e1d00d1f 100644
--- a/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs
+++ b/compiler/rustc_target/src/spec/armv7k_apple_watchos.rs
@@ -1,17 +1,19 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("watchos", Arch::Armv7k);
+ let arch = Arch::Armv7k;
Target {
llvm_target: "armv7k-apple-watchos".into(),
pointer_width: 32,
data_layout: "e-m:o-p:32:32-Fi8-i64:64-a:0:32-n32-S128".into(),
- arch: "arm".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+v7,+vfp4,+neon".into(),
max_atomic_width: Some(64),
forces_embed_bitcode: true,
+ dynamic_linking: false,
+ position_independent_executables: true,
// These arguments are not actually invoked - they just have
// to look right to pass App Store validation.
bitcode_llvm_cmdline: "-triple\0\
@@ -22,7 +24,7 @@ pub fn target() -> Target {
darwinpcs\0\
-Os\0"
.into(),
- ..base
+ ..opts("watchos", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/armv7s_apple_ios.rs b/compiler/rustc_target/src/spec/armv7s_apple_ios.rs
index cc17265b2..be4bc6758 100644
--- a/compiler/rustc_target/src/spec/armv7s_apple_ios.rs
+++ b/compiler/rustc_target/src/spec/armv7s_apple_ios.rs
@@ -1,16 +1,17 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target {
+ let arch = Arch::Armv7s;
Target {
llvm_target: "armv7s-apple-ios".into(),
pointer_width: 32,
data_layout: "e-m:o-p:32:32-Fi8-f64:32:64-v64:32:64-v128:32:128-a:0:32-n32-S32".into(),
- arch: "arm".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
features: "+v7,+vfp4,+neon".into(),
max_atomic_width: Some(64),
- ..opts("ios", Arch::Armv7s)
+ ..opts("ios", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/i386_apple_ios.rs b/compiler/rustc_target/src/spec/i386_apple_ios.rs
index b85214a9c..581998161 100644
--- a/compiler/rustc_target/src/spec/i386_apple_ios.rs
+++ b/compiler/rustc_target/src/spec/i386_apple_ios.rs
@@ -1,21 +1,23 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{ios_sim_llvm_target, opts, Arch};
use crate::spec::{StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("ios", Arch::I386);
- let llvm_target = super::apple_base::ios_sim_llvm_target("i386");
-
+ let arch = Arch::I386;
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // IPHONEOS_DEPLOYMENT_TARGET.
+ // This is required for the target to pick the right
+ // MACH-O commands, so we do too.
+ llvm_target: ios_sim_llvm_target(arch).into(),
pointer_width: 32,
data_layout: "e-m:o-p:32:32-p270:32:32-p271:32:32-p272:64:64-\
f64:32:64-f80:128-n8:16:32-S128"
.into(),
- arch: "x86".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
max_atomic_width: Some(64),
stack_probes: StackProbeType::X86,
- ..base
+ ..opts("ios", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/i686_apple_darwin.rs b/compiler/rustc_target/src/spec/i686_apple_darwin.rs
index 15607c12e..ad22467ba 100644
--- a/compiler/rustc_target/src/spec/i686_apple_darwin.rs
+++ b/compiler/rustc_target/src/spec/i686_apple_darwin.rs
@@ -1,28 +1,27 @@
+use super::apple_base::{macos_llvm_target, opts, Arch};
use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
- // ld64 only understand i386 and not i686
- let mut base = super::apple_base::opts("macos", "i386", "");
- base.cpu = "yonah".into();
+ // ld64 only understands i386 and not i686
+ let arch = Arch::I386;
+ let mut base = opts("macos", arch);
base.max_atomic_width = Some(64);
base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m32"]);
- base.link_env_remove.to_mut().extend(super::apple_base::macos_link_env_remove());
base.stack_probes = StackProbeType::X86;
base.frame_pointer = FramePointer::Always;
- // Clang automatically chooses a more specific target based on
- // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
- // correctly, we do too.
- let arch = "i686";
- let llvm_target = super::apple_base::macos_llvm_target(&arch);
-
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
+ // correctly, we do too.
+ //
+ // While ld64 doesn't understand i686, LLVM does.
+ llvm_target: macos_llvm_target(Arch::I686).into(),
pointer_width: 32,
data_layout: "e-m:o-p:32:32-p270:32:32-p271:32:32-p272:64:64-\
f64:32:64-f80:128-n8:16:32-S128"
.into(),
- arch: "x86".into(),
+ arch: arch.target_arch(),
options: TargetOptions { mcount: "\u{1}mcount".into(), ..base },
}
}
diff --git a/compiler/rustc_target/src/spec/linux_kernel_base.rs b/compiler/rustc_target/src/spec/linux_kernel_base.rs
deleted file mode 100644
index f41533a95..000000000
--- a/compiler/rustc_target/src/spec/linux_kernel_base.rs
+++ /dev/null
@@ -1,18 +0,0 @@
-use crate::spec::TargetOptions;
-use crate::spec::{FramePointer, PanicStrategy, RelocModel, RelroLevel, StackProbeType};
-
-pub fn opts() -> TargetOptions {
- TargetOptions {
- env: "gnu".into(),
- disable_redzone: true,
- panic_strategy: PanicStrategy::Abort,
- stack_probes: StackProbeType::X86,
- frame_pointer: FramePointer::Always,
- position_independent_executables: true,
- needs_plt: true,
- relro_level: RelroLevel::Full,
- relocation_model: RelocModel::Static,
-
- ..Default::default()
- }
-}
diff --git a/compiler/rustc_target/src/spec/mipsel_sony_psx.rs b/compiler/rustc_target/src/spec/mipsel_sony_psx.rs
new file mode 100644
index 000000000..12a66efdd
--- /dev/null
+++ b/compiler/rustc_target/src/spec/mipsel_sony_psx.rs
@@ -0,0 +1,37 @@
+use crate::spec::{cvs, Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions};
+
+pub fn target() -> Target {
+ Target {
+ llvm_target: "mipsel-sony-psx".into(),
+ pointer_width: 32,
+ data_layout: "e-m:m-p:32:32-i8:8:32-i16:16:32-i64:64-n32-S64".into(),
+ arch: "mips".into(),
+
+ options: TargetOptions {
+ os: "none".into(),
+ env: "psx".into(),
+ vendor: "sony".into(),
+ linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
+ cpu: "mips1".into(),
+ executables: true,
+ linker: Some("rust-lld".into()),
+ relocation_model: RelocModel::Static,
+ exe_suffix: ".exe".into(),
+
+ // PSX doesn't natively support floats.
+ features: "+soft-float".into(),
+
+ // This should be 16 bits, but LLVM incorrectly tries emitting MIPS-II SYNC instructions
+ // for atomic loads and stores. This crashes rustc so we have to disable the Atomic* API
+ // until this is fixed upstream. See https://reviews.llvm.org/D122427#3420144 for more
+ // info.
+ max_atomic_width: Some(0),
+
+ // PSX does not support trap-on-condition instructions.
+ llvm_args: cvs!["-mno-check-zero-division"],
+ llvm_abiname: "o32".into(),
+ panic_strategy: PanicStrategy::Abort,
+ ..Default::default()
+ },
+ }
+}
diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs
index 8909cf33a..d05b8aa42 100644
--- a/compiler/rustc_target/src/spec/mod.rs
+++ b/compiler/rustc_target/src/spec/mod.rs
@@ -34,7 +34,8 @@
//! the target's settings, though `target-feature` and `link-args` will *add*
//! to the list specified by the target, rather than replace.
-use crate::abi::Endian;
+use crate::abi::call::Conv;
+use crate::abi::{Endian, Integer, Size, TargetDataLayout, TargetDataLayoutErrors};
use crate::json::{Json, ToJson};
use crate::spec::abi::{lookup as lookup_abi, Abi};
use crate::spec::crt_objects::{CrtObjects, LinkSelfContainedDefault};
@@ -57,9 +58,9 @@ use rustc_macros::HashStable_Generic;
pub mod abi;
pub mod crt_objects;
+mod aix_base;
mod android_base;
mod apple_base;
-mod apple_sdk_base;
mod avr_gnu_base;
mod bpf_base;
mod dragonfly_base;
@@ -71,11 +72,11 @@ mod illumos_base;
mod l4re_base;
mod linux_base;
mod linux_gnu_base;
-mod linux_kernel_base;
mod linux_musl_base;
mod linux_uclibc_base;
mod msvc_base;
mod netbsd_base;
+mod nto_qnx_base;
mod openbsd_base;
mod redox_base;
mod solaris_base;
@@ -115,7 +116,7 @@ pub enum Lld {
/// relevant now.
///
/// The second goal is to keep the number of flavors to the minimum if possible.
-/// LLD somewhat forces our hand here because that linker is self-sufficent only if its executable
+/// LLD somewhat forces our hand here because that linker is self-sufficient only if its executable
/// (`argv[0]`) is named in specific way, otherwise it doesn't work and requires a
/// `-flavor LLD_FLAVOR` argument to choose which logic to use. Our shipped `rust-lld` in
/// particular is not named in such specific way, so it needs the flavor option, so we make our
@@ -1003,7 +1004,7 @@ macro_rules! supported_targets {
$(
#[test] // `#[test]`
fn $module() {
- tests_impl::test_target(super::$module::target(), $triple);
+ tests_impl::test_target(super::$module::target());
}
)+
}
@@ -1027,6 +1028,7 @@ supported_targets! {
("powerpc-unknown-linux-gnu", powerpc_unknown_linux_gnu),
("powerpc-unknown-linux-gnuspe", powerpc_unknown_linux_gnuspe),
("powerpc-unknown-linux-musl", powerpc_unknown_linux_musl),
+ ("powerpc64-ibm-aix", powerpc64_ibm_aix),
("powerpc64-unknown-linux-gnu", powerpc64_unknown_linux_gnu),
("powerpc64-unknown-linux-musl", powerpc64_unknown_linux_musl),
("powerpc64le-unknown-linux-gnu", powerpc64le_unknown_linux_gnu),
@@ -1071,8 +1073,6 @@ supported_targets! {
("thumbv7neon-linux-androideabi", thumbv7neon_linux_androideabi),
("aarch64-linux-android", aarch64_linux_android),
- ("x86_64-unknown-none-linuxkernel", x86_64_unknown_none_linuxkernel),
-
("aarch64-unknown-freebsd", aarch64_unknown_freebsd),
("armv6-unknown-freebsd", armv6_unknown_freebsd),
("armv7-unknown-freebsd", armv7_unknown_freebsd),
@@ -1222,6 +1222,7 @@ supported_targets! {
("armv7a-kmc-solid_asp3-eabihf", armv7a_kmc_solid_asp3_eabihf),
("mipsel-sony-psp", mipsel_sony_psp),
+ ("mipsel-sony-psx", mipsel_sony_psx),
("mipsel-unknown-none", mipsel_unknown_none),
("thumbv4t-none-eabi", thumbv4t_none_eabi),
("armv4t-none-eabi", armv4t_none_eabi),
@@ -1245,6 +1246,9 @@ supported_targets! {
("x86_64-unknown-none", x86_64_unknown_none),
("mips64-openwrt-linux-musl", mips64_openwrt_linux_musl),
+
+ ("aarch64-unknown-nto-qnx710", aarch64_unknown_nto_qnx_710),
+ ("x86_64-pc-nto-qnx710", x86_64_pc_nto_qnx710),
}
/// Cow-Vec-Str: Cow<'static, [Cow<'static, str>]>
@@ -1313,6 +1317,35 @@ pub struct Target {
pub options: TargetOptions,
}
+impl Target {
+ pub fn parse_data_layout<'a>(&'a self) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
+ let mut dl = TargetDataLayout::parse_from_llvm_datalayout_string(&self.data_layout)?;
+
+ // Perform consistency checks against the Target information.
+ if dl.endian != self.endian {
+ return Err(TargetDataLayoutErrors::InconsistentTargetArchitecture {
+ dl: dl.endian.as_str(),
+ target: self.endian.as_str(),
+ });
+ }
+
+ let target_pointer_width: u64 = self.pointer_width.into();
+ if dl.pointer_size.bits() != target_pointer_width {
+ return Err(TargetDataLayoutErrors::InconsistentTargetPointerWidth {
+ pointer_size: dl.pointer_size.bits(),
+ target: self.pointer_width,
+ });
+ }
+
+ dl.c_enum_min_size = match Integer::from_size(Size::from_bits(self.c_enum_min_bits)) {
+ Ok(bits) => bits,
+ Err(err) => return Err(TargetDataLayoutErrors::InvalidBitsSize { err }),
+ };
+
+ Ok(dl)
+ }
+}
+
pub trait HasTargetSpec {
fn target_spec(&self) -> &Target;
}
@@ -1452,6 +1485,9 @@ pub struct TargetOptions {
pub families: StaticCow<[StaticCow<str>]>,
/// Whether the target toolchain's ABI supports returning small structs as an integer.
pub abi_return_struct_as_int: bool,
+ /// Whether the target toolchain is like AIX's. Linker options on AIX are special and it uses
+ /// XCOFF as binary format. Defaults to false.
+ pub is_like_aix: bool,
/// Whether the target toolchain is like macOS's. Only useful for compiling against iOS/macOS,
/// in particular running dsymutil and some other stuff like `-dead_strip`. Defaults to false.
/// Also indiates whether to use Apple-specific ABI changes, such as extending function
@@ -1526,9 +1562,9 @@ pub struct TargetOptions {
/// Flag indicating whether #[thread_local] is available for this target.
pub has_thread_local: bool,
- // This is mainly for easy compatibility with emscripten.
- // If we give emcc .o files that are actually .bc files it
- // will 'just work'.
+ /// This is mainly for easy compatibility with emscripten.
+ /// If we give emcc .o files that are actually .bc files it
+ /// will 'just work'.
pub obj_is_bitcode: bool,
/// Whether the target requires that emitted object code includes bitcode.
pub forces_embed_bitcode: bool,
@@ -1667,6 +1703,14 @@ pub struct TargetOptions {
/// Whether the target supports stack canary checks. `true` by default,
/// since this is most common among tier 1 and tier 2 targets.
pub supports_stack_protector: bool,
+
+ /// The name of entry function.
+ /// Default value is "main"
+ pub entry_name: StaticCow<str>,
+
+ /// The ABI of entry function.
+ /// Default value is `Conv::C`, i.e. C call convention
+ pub entry_abi: Conv,
}
/// Add arguments for the given flavor and also for its "twin" flavors
@@ -1807,6 +1851,7 @@ impl Default for TargetOptions {
staticlib_suffix: ".a".into(),
families: cvs![],
abi_return_struct_as_int: false,
+ is_like_aix: false,
is_like_osx: false,
is_like_solaris: false,
is_like_windows: false,
@@ -1883,6 +1928,8 @@ impl Default for TargetOptions {
c_enum_min_bits: 32,
generate_arange_section: true,
supports_stack_protector: true,
+ entry_name: "main".into(),
+ entry_abi: Conv::C,
}
}
}
@@ -1914,6 +1961,7 @@ impl Target {
Abi::Stdcall { unwind }
}
Abi::System { unwind } => Abi::C { unwind },
+ Abi::EfiApi if self.arch == "arm" => Abi::Aapcs { unwind: false },
Abi::EfiApi if self.arch == "x86_64" => Abi::Win64 { unwind: false },
Abi::EfiApi => Abi::C { unwind: false },
@@ -1940,8 +1988,10 @@ impl Target {
| PlatformIntrinsic
| Unadjusted
| Cdecl { .. }
- | EfiApi
| RustCold => true,
+ EfiApi => {
+ ["arm", "aarch64", "riscv32", "riscv64", "x86", "x86_64"].contains(&&self.arch[..])
+ }
X86Interrupt => ["x86", "x86_64"].contains(&&self.arch[..]),
Aapcs { .. } => "arm" == self.arch,
CCmseNonSecureCall => ["arm", "aarch64"].contains(&&self.arch[..]),
@@ -2400,6 +2450,18 @@ impl Target {
}
}
} );
+ ($key_name:ident, Conv) => ( {
+ let name = (stringify!($key_name)).replace("_", "-");
+ obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
+ match Conv::from_str(s) {
+ Ok(c) => {
+ base.$key_name = c;
+ Some(Ok(()))
+ }
+ Err(e) => Some(Err(e))
+ }
+ })).unwrap_or(Ok(()))
+ } );
}
if let Some(j) = obj.remove("target-endian") {
@@ -2461,6 +2523,7 @@ impl Target {
key!(staticlib_suffix);
key!(families, TargetFamilies);
key!(abi_return_struct_as_int, bool);
+ key!(is_like_aix, bool);
key!(is_like_osx, bool);
key!(is_like_solaris, bool);
key!(is_like_windows, bool);
@@ -2519,6 +2582,8 @@ impl Target {
key!(c_enum_min_bits, u64);
key!(generate_arange_section, bool);
key!(supports_stack_protector, bool);
+ key!(entry_name);
+ key!(entry_abi, Conv)?;
if base.is_builtin {
// This can cause unfortunate ICEs later down the line.
@@ -2593,7 +2658,7 @@ impl Target {
// Additionally look in the sysroot under `lib/rustlib/<triple>/target.json`
// as a fallback.
- let rustlib_path = crate::target_rustlib_path(&sysroot, &target_triple);
+ let rustlib_path = crate::target_rustlib_path(sysroot, target_triple);
let p = PathBuf::from_iter([
Path::new(sysroot),
Path::new(&rustlib_path),
@@ -2712,6 +2777,7 @@ impl ToJson for Target {
target_option_val!(staticlib_suffix);
target_option_val!(families, "target-family");
target_option_val!(abi_return_struct_as_int);
+ target_option_val!(is_like_aix);
target_option_val!(is_like_osx);
target_option_val!(is_like_solaris);
target_option_val!(is_like_windows);
@@ -2769,6 +2835,8 @@ impl ToJson for Target {
target_option_val!(c_enum_min_bits);
target_option_val!(generate_arange_section);
target_option_val!(supports_stack_protector);
+ target_option_val!(entry_name);
+ target_option_val!(entry_abi);
if let Some(abi) = self.default_adjusted_cabi {
d.insert("default-adjusted-cabi".into(), Abi::name(abi).to_json());
diff --git a/compiler/rustc_target/src/spec/nto_qnx_base.rs b/compiler/rustc_target/src/spec/nto_qnx_base.rs
new file mode 100644
index 000000000..6fb581ef5
--- /dev/null
+++ b/compiler/rustc_target/src/spec/nto_qnx_base.rs
@@ -0,0 +1,19 @@
+use crate::spec::{cvs, RelroLevel, TargetOptions};
+
+pub fn opts() -> TargetOptions {
+ TargetOptions {
+ crt_static_respected: true,
+ dynamic_linking: true,
+ env: "nto71".into(),
+ executables: true,
+ families: cvs!["unix"],
+ has_rpath: true,
+ has_thread_local: false,
+ linker: Some("qcc".into()),
+ os: "nto".into(),
+ position_independent_executables: true,
+ static_position_independent_executables: true,
+ relro_level: RelroLevel::Full,
+ ..Default::default()
+ }
+}
diff --git a/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs b/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs
new file mode 100644
index 000000000..e3eb9bccd
--- /dev/null
+++ b/compiler/rustc_target/src/spec/powerpc64_ibm_aix.rs
@@ -0,0 +1,23 @@
+use crate::spec::{Cc, LinkerFlavor, Target};
+
+pub fn target() -> Target {
+ let mut base = super::aix_base::opts();
+ base.max_atomic_width = Some(64);
+ base.add_pre_link_args(
+ LinkerFlavor::Unix(Cc::No),
+ &[
+ "-b64".into(),
+ "-bpT:0x100000000".into(),
+ "-bpD:0x110000000".into(),
+ "-bcdtors:all:0:s".into(),
+ ],
+ );
+
+ Target {
+ llvm_target: "powerpc64-ibm-aix".into(),
+ pointer_width: 64,
+ data_layout: "E-m:a-i64:64-n32:64-S128-v256:256:256-v512:512:512".into(),
+ arch: "powerpc64".into(),
+ options: base,
+ }
+}
diff --git a/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs b/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs
index 0539eca6c..8281bac10 100644
--- a/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs
+++ b/compiler/rustc_target/src/spec/riscv64gc_unknown_freebsd.rs
@@ -4,7 +4,7 @@ pub fn target() -> Target {
Target {
llvm_target: "riscv64-unknown-freebsd".into(),
pointer_width: 64,
- data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
+ data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
arch: "riscv64".into(),
options: TargetOptions {
code_model: Some(CodeModel::Medium),
diff --git a/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs
index 7d1bf228c..90dccb280 100644
--- a/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs
+++ b/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_gnu.rs
@@ -4,7 +4,7 @@ pub fn target() -> Target {
Target {
llvm_target: "riscv64-unknown-linux-gnu".into(),
pointer_width: 64,
- data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
+ data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
arch: "riscv64".into(),
options: TargetOptions {
code_model: Some(CodeModel::Medium),
diff --git a/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs
index f04f8a48b..1a56c78e6 100644
--- a/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs
+++ b/compiler/rustc_target/src/spec/riscv64gc_unknown_linux_musl.rs
@@ -4,7 +4,7 @@ pub fn target() -> Target {
Target {
llvm_target: "riscv64-unknown-linux-musl".into(),
pointer_width: 64,
- data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
+ data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
arch: "riscv64".into(),
options: TargetOptions {
code_model: Some(CodeModel::Medium),
diff --git a/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs b/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs
index 67806d578..409b0b269 100644
--- a/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs
+++ b/compiler/rustc_target/src/spec/riscv64gc_unknown_none_elf.rs
@@ -3,7 +3,7 @@ use crate::spec::{RelocModel, Target, TargetOptions};
pub fn target() -> Target {
Target {
- data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
+ data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
llvm_target: "riscv64".into(),
pointer_width: 64,
arch: "riscv64".into(),
diff --git a/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs b/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs
index cd10f3afa..ade9d7762 100644
--- a/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs
+++ b/compiler/rustc_target/src/spec/riscv64gc_unknown_openbsd.rs
@@ -4,7 +4,7 @@ pub fn target() -> Target {
Target {
llvm_target: "riscv64-unknown-openbsd".into(),
pointer_width: 64,
- data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
+ data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
arch: "riscv64".into(),
options: TargetOptions {
code_model: Some(CodeModel::Medium),
diff --git a/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs b/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs
index f371e09be..87aba9171 100644
--- a/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs
+++ b/compiler/rustc_target/src/spec/riscv64imac_unknown_none_elf.rs
@@ -3,7 +3,7 @@ use crate::spec::{RelocModel, Target, TargetOptions};
pub fn target() -> Target {
Target {
- data_layout: "e-m:e-p:64:64-i64:64-i128:128-n64-S128".into(),
+ data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
llvm_target: "riscv64".into(),
pointer_width: 64,
arch: "riscv64".into(),
diff --git a/compiler/rustc_target/src/spec/tests/tests_impl.rs b/compiler/rustc_target/src/spec/tests/tests_impl.rs
index 172da0ed5..e0ecf8037 100644
--- a/compiler/rustc_target/src/spec/tests/tests_impl.rs
+++ b/compiler/rustc_target/src/spec/tests/tests_impl.rs
@@ -2,15 +2,15 @@ use super::super::*;
use std::assert_matches::assert_matches;
// Test target self-consistency and JSON encoding/decoding roundtrip.
-pub(super) fn test_target(mut target: Target, triple: &str) {
+pub(super) fn test_target(mut target: Target) {
let recycled_target = Target::from_json(target.to_json()).map(|(j, _)| j);
target.update_to_cli();
- target.check_consistency(triple);
+ target.check_consistency();
assert_eq!(recycled_target, Ok(target));
}
impl Target {
- fn check_consistency(&self, triple: &str) {
+ fn check_consistency(&self) {
assert_eq!(self.is_like_osx, self.vendor == "apple");
assert_eq!(self.is_like_solaris, self.os == "solaris" || self.os == "illumos");
assert_eq!(self.is_like_windows, self.os == "windows" || self.os == "uefi");
@@ -129,8 +129,7 @@ impl Target {
if self.dynamic_linking && !(self.is_like_wasm && self.os != "emscripten") {
assert_eq!(self.relocation_model, RelocModel::Pic);
}
- // PIEs are supported but not enabled by default with linuxkernel target.
- if self.position_independent_executables && !triple.ends_with("-linuxkernel") {
+ if self.position_independent_executables {
assert_eq!(self.relocation_model, RelocModel::Pic);
}
// The UEFI targets do not support dynamic linking but still require PIC (#101377).
diff --git a/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs b/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs
index 8dad941b5..06529c2e4 100644
--- a/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs
+++ b/compiler/rustc_target/src/spec/wasm32_unknown_unknown.rs
@@ -33,12 +33,6 @@ pub fn target() -> Target {
// For now this target just never has an entry symbol no matter the output
// type, so unconditionally pass this.
"--no-entry",
- // Rust really needs a way for users to specify exports and imports in
- // the source code. --export-dynamic isn't the right tool for this job,
- // however it does have the side effect of automatically exporting a lot
- // of symbols, which approximates what people want when compiling for
- // wasm32-unknown-unknown expect, so use it for now.
- "--export-dynamic",
],
);
options.add_pre_link_args(
@@ -48,7 +42,6 @@ pub fn target() -> Target {
// otherwise
"--target=wasm32-unknown-unknown",
"-Wl,--no-entry",
- "-Wl,--export-dynamic",
],
);
diff --git a/compiler/rustc_target/src/spec/wasm32_wasi.rs b/compiler/rustc_target/src/spec/wasm32_wasi.rs
index 93a956403..a0476d542 100644
--- a/compiler/rustc_target/src/spec/wasm32_wasi.rs
+++ b/compiler/rustc_target/src/spec/wasm32_wasi.rs
@@ -72,7 +72,8 @@
//! best we can with this target. Don't start relying on too much here unless
//! you know what you're getting in to!
-use super::{crt_objects, wasm_base, Cc, LinkerFlavor, Target};
+use super::crt_objects::{self, LinkSelfContainedDefault};
+use super::{wasm_base, Cc, LinkerFlavor, Target};
pub fn target() -> Target {
let mut options = wasm_base::options();
@@ -83,6 +84,9 @@ pub fn target() -> Target {
options.pre_link_objects_self_contained = crt_objects::pre_wasi_self_contained();
options.post_link_objects_self_contained = crt_objects::post_wasi_self_contained();
+ // FIXME: Figure out cases in which WASM needs to link with a native toolchain.
+ options.link_self_contained = LinkSelfContainedDefault::True;
+
// Right now this is a bit of a workaround but we're currently saying that
// the target by default has a static crt which we're taking as a signal
// for "use the bundled crt". If that's turned off then the system's crt
@@ -100,6 +104,10 @@ pub fn target() -> Target {
// `args::args()` makes the WASI API calls itself.
options.main_needs_argc_argv = false;
+ // And, WASI mangles the name of "main" to distinguish between different
+ // signatures.
+ options.entry_name = "__main_void".into();
+
Target {
llvm_target: "wasm32-wasi".into(),
pointer_width: 32,
diff --git a/compiler/rustc_target/src/spec/wasm_base.rs b/compiler/rustc_target/src/spec/wasm_base.rs
index 528a84a8b..625d3b37c 100644
--- a/compiler/rustc_target/src/spec/wasm_base.rs
+++ b/compiler/rustc_target/src/spec/wasm_base.rs
@@ -1,4 +1,3 @@
-use super::crt_objects::LinkSelfContainedDefault;
use super::{cvs, Cc, LinkerFlavor, PanicStrategy, RelocModel, TargetOptions, TlsModel};
pub fn options() -> TargetOptions {
@@ -95,9 +94,6 @@ pub fn options() -> TargetOptions {
pre_link_args,
- // FIXME: Figure out cases in which WASM needs to link with a native toolchain.
- link_self_contained: LinkSelfContainedDefault::True,
-
// This has no effect in LLVM 8 or prior, but in LLVM 9 and later when
// PIC code is implemented this has quite a drastic effect if it stays
// at the default, `pic`. In an effort to keep wasm binaries as minimal
diff --git a/compiler/rustc_target/src/spec/windows_gnullvm_base.rs b/compiler/rustc_target/src/spec/windows_gnullvm_base.rs
index 58210c75a..cada28652 100644
--- a/compiler/rustc_target/src/spec/windows_gnullvm_base.rs
+++ b/compiler/rustc_target/src/spec/windows_gnullvm_base.rs
@@ -1,4 +1,5 @@
-use crate::spec::{cvs, Cc, LinkerFlavor, Lld, TargetOptions};
+use crate::spec::{cvs, Cc, DebuginfoKind, LinkerFlavor, Lld, SplitDebuginfo, TargetOptions};
+use std::borrow::Cow;
pub fn opts() -> TargetOptions {
// We cannot use `-nodefaultlibs` because compiler-rt has to be passed
@@ -36,7 +37,10 @@ pub fn opts() -> TargetOptions {
eh_frame_header: false,
no_default_libraries: false,
has_thread_local: true,
-
+ // FIXME(davidtwco): Support Split DWARF on Windows GNU - may require LLVM changes to
+ // output DWO, despite using DWARF, doesn't use ELF..
+ debuginfo_kind: DebuginfoKind::Pdb,
+ supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]),
..Default::default()
}
}
diff --git a/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs b/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs
index 087be1b95..9a3e7a805 100644
--- a/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs
+++ b/compiler/rustc_target/src/spec/x86_64_apple_darwin.rs
@@ -1,29 +1,26 @@
+use super::apple_base::{macos_llvm_target, opts, Arch};
use crate::spec::{Cc, FramePointer, LinkerFlavor, Lld, SanitizerSet};
use crate::spec::{StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
- let arch = "x86_64";
- let mut base = super::apple_base::opts("macos", arch, "");
- base.cpu = "core2".into();
- base.max_atomic_width = Some(128); // core2 support cmpxchg16b
+ let arch = Arch::X86_64;
+ let mut base = opts("macos", arch);
+ base.max_atomic_width = Some(128); // core2 supports cmpxchg16b
base.frame_pointer = FramePointer::Always;
base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-m64"]);
- base.link_env_remove.to_mut().extend(super::apple_base::macos_link_env_remove());
base.stack_probes = StackProbeType::X86;
base.supported_sanitizers =
SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK | SanitizerSet::THREAD;
- // Clang automatically chooses a more specific target based on
- // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
- // correctly, we do too.
- let llvm_target = super::apple_base::macos_llvm_target(&arch);
-
Target {
- llvm_target: llvm_target.into(),
+ // Clang automatically chooses a more specific target based on
+ // MACOSX_DEPLOYMENT_TARGET. To enable cross-language LTO to work
+ // correctly, we do too.
+ llvm_target: macos_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
.into(),
- arch: arch.into(),
+ arch: arch.target_arch(),
options: TargetOptions { mcount: "\u{1}mcount".into(), ..base },
}
}
diff --git a/compiler/rustc_target/src/spec/x86_64_apple_ios.rs b/compiler/rustc_target/src/spec/x86_64_apple_ios.rs
index e6143025d..fbd3ebd4d 100644
--- a/compiler/rustc_target/src/spec/x86_64_apple_ios.rs
+++ b/compiler/rustc_target/src/spec/x86_64_apple_ios.rs
@@ -1,20 +1,18 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{ios_sim_llvm_target, opts, Arch};
use crate::spec::{StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("ios", Arch::X86_64);
- let llvm_target = super::apple_base::ios_sim_llvm_target("x86_64");
-
+ let arch = Arch::X86_64_sim;
Target {
- llvm_target: llvm_target.into(),
+ llvm_target: ios_sim_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
.into(),
- arch: "x86_64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
max_atomic_width: Some(64),
stack_probes: StackProbeType::X86,
- ..base
+ ..opts("ios", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs b/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs
index 13259205a..0f3f85199 100644
--- a/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs
+++ b/compiler/rustc_target/src/spec/x86_64_apple_ios_macabi.rs
@@ -1,10 +1,11 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
let llvm_target = "x86_64-apple-ios13.0-macabi";
- let mut base = opts("ios", Arch::X86_64_macabi);
+ let arch = Arch::X86_64_macabi;
+ let mut base = opts("ios", arch);
base.add_pre_link_args(LinkerFlavor::Darwin(Cc::Yes, Lld::No), &["-target", llvm_target]);
Target {
@@ -12,7 +13,7 @@ pub fn target() -> Target {
pointer_width: 64,
data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
.into(),
- arch: "x86_64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
max_atomic_width: Some(64),
stack_probes: StackProbeType::X86,
diff --git a/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs b/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs
index 3d54da086..550ce0b9c 100644
--- a/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs
+++ b/compiler/rustc_target/src/spec/x86_64_apple_tvos.rs
@@ -1,17 +1,17 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, Arch};
use crate::spec::{StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("tvos", Arch::X86_64);
+ let arch = Arch::X86_64_sim;
Target {
llvm_target: "x86_64-apple-tvos".into(),
pointer_width: 64,
data_layout: "e-m:o-i64:64-f80:128-n8:16:32:64-S128".into(),
- arch: "x86_64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
max_atomic_width: Some(64),
stack_probes: StackProbeType::X86,
- ..base
+ ..opts("tvos", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs b/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs
index e499b1985..75ce02cba 100644
--- a/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs
+++ b/compiler/rustc_target/src/spec/x86_64_apple_watchos_sim.rs
@@ -1,18 +1,14 @@
-use super::apple_sdk_base::{opts, Arch};
+use super::apple_base::{opts, watchos_sim_llvm_target, Arch};
use crate::spec::{StackProbeType, Target, TargetOptions};
pub fn target() -> Target {
- let base = opts("watchos", Arch::X86_64);
-
- let arch = "x86_64";
- let llvm_target = super::apple_base::watchos_sim_llvm_target(arch);
-
+ let arch = Arch::X86_64_sim;
Target {
- llvm_target: llvm_target.into(),
+ llvm_target: watchos_sim_llvm_target(arch).into(),
pointer_width: 64,
data_layout: "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
.into(),
- arch: "x86_64".into(),
+ arch: arch.target_arch(),
options: TargetOptions {
max_atomic_width: Some(64),
stack_probes: StackProbeType::X86,
@@ -28,7 +24,7 @@ pub fn target() -> Target {
darwinpcs\0\
-Os\0"
.into(),
- ..base
+ ..opts("watchos", arch)
},
}
}
diff --git a/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs b/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs
new file mode 100644
index 000000000..e9b3acee2
--- /dev/null
+++ b/compiler/rustc_target/src/spec/x86_64_pc_nto_qnx710.rs
@@ -0,0 +1,21 @@
+use super::nto_qnx_base;
+use crate::spec::{Cc, LinkerFlavor, Lld, Target, TargetOptions};
+
+pub fn target() -> Target {
+ Target {
+ llvm_target: "x86_64-pc-unknown".into(),
+ pointer_width: 64,
+ data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
+ .into(),
+ arch: "x86_64".into(),
+ options: TargetOptions {
+ cpu: "x86-64".into(),
+ max_atomic_width: Some(64),
+ pre_link_args: TargetOptions::link_args(
+ LinkerFlavor::Gnu(Cc::Yes, Lld::No),
+ &["-Vgcc_ntox86_64_cxx"],
+ ),
+ ..nto_qnx_base::opts()
+ },
+ }
+}
diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_none_linuxkernel.rs b/compiler/rustc_target/src/spec/x86_64_unknown_none_linuxkernel.rs
deleted file mode 100644
index ebd9636ff..000000000
--- a/compiler/rustc_target/src/spec/x86_64_unknown_none_linuxkernel.rs
+++ /dev/null
@@ -1,28 +0,0 @@
-// This defines the amd64 target for the Linux Kernel. See the linux-kernel-base module for
-// generic Linux kernel options.
-
-use crate::spec::{Cc, CodeModel, LinkerFlavor, Lld, Target};
-
-pub fn target() -> Target {
- let mut base = super::linux_kernel_base::opts();
- base.cpu = "x86-64".into();
- base.max_atomic_width = Some(64);
- base.features =
- "-mmx,-sse,-sse2,-sse3,-ssse3,-sse4.1,-sse4.2,-3dnow,-3dnowa,-avx,-avx2,+soft-float".into();
- base.code_model = Some(CodeModel::Kernel);
- base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
-
- Target {
- // FIXME: Some dispute, the linux-on-clang folks think this should use
- // "Linux". We disagree because running *on* Linux is nothing like
- // running *as" linux, and historically the "os" component as has always
- // been used to mean the "on" part.
- llvm_target: "x86_64-unknown-none-elf".into(),
- pointer_width: 64,
- data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
- .into(),
- arch: "x86_64".into(),
-
- options: base,
- }
-}
diff --git a/compiler/rustc_trait_selection/src/autoderef.rs b/compiler/rustc_trait_selection/src/autoderef.rs
index 61cfeec4b..e988c77a0 100644
--- a/compiler/rustc_trait_selection/src/autoderef.rs
+++ b/compiler/rustc_trait_selection/src/autoderef.rs
@@ -1,10 +1,11 @@
use crate::errors::AutoDerefReachedRecursionLimit;
use crate::traits::query::evaluate_obligation::InferCtxtExt;
-use crate::traits::{self, TraitEngine};
+use crate::traits::NormalizeExt;
+use crate::traits::{self, TraitEngine, TraitEngineExt};
use rustc_hir as hir;
use rustc_infer::infer::InferCtxt;
-use rustc_middle::ty::{self, TraitRef, Ty, TyCtxt};
-use rustc_middle::ty::{ToPredicate, TypeVisitable};
+use rustc_middle::ty::TypeVisitable;
+use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_session::Limit;
use rustc_span::def_id::LOCAL_CRATE;
use rustc_span::Span;
@@ -27,7 +28,6 @@ pub struct Autoderef<'a, 'tcx> {
// Meta infos:
infcx: &'a InferCtxt<'tcx>,
span: Span,
- overloaded_span: Span,
body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
@@ -99,12 +99,10 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> {
body_id: hir::HirId,
span: Span,
base_ty: Ty<'tcx>,
- overloaded_span: Span,
) -> Autoderef<'a, 'tcx> {
Autoderef {
infcx,
span,
- overloaded_span,
body_id,
param_env,
state: AutoderefSnapshot {
@@ -125,33 +123,28 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> {
let tcx = self.infcx.tcx;
// <ty as Deref>
- let trait_ref = TraitRef {
- def_id: tcx.lang_items().deref_trait()?,
- substs: tcx.mk_substs_trait(ty, &[]),
- };
+ let trait_ref = tcx.mk_trait_ref(tcx.lang_items().deref_trait()?, [ty]);
let cause = traits::ObligationCause::misc(self.span, self.body_id);
let obligation = traits::Obligation::new(
+ tcx,
cause.clone(),
self.param_env,
- ty::Binder::dummy(trait_ref).without_const().to_predicate(tcx),
+ ty::Binder::dummy(trait_ref),
);
if !self.infcx.predicate_may_hold(&obligation) {
debug!("overloaded_deref_ty: cannot match obligation");
return None;
}
- let mut fulfillcx = traits::FulfillmentContext::new_in_snapshot();
- let normalized_ty = fulfillcx.normalize_projection_type(
- &self.infcx,
- self.param_env,
- ty::ProjectionTy {
- item_def_id: tcx.lang_items().deref_target()?,
- substs: trait_ref.substs,
- },
- cause,
- );
+ let normalized_ty = self
+ .infcx
+ .at(&cause, self.param_env)
+ .normalize(tcx.mk_projection(tcx.lang_items().deref_target()?, trait_ref.substs));
+ let mut fulfillcx = <dyn TraitEngine<'tcx>>::new_in_snapshot(tcx);
+ let normalized_ty =
+ normalized_ty.into_value_registering_obligations(self.infcx, &mut *fulfillcx);
let errors = fulfillcx.select_where_possible(&self.infcx);
if !errors.is_empty() {
// This shouldn't happen, except for evaluate/fulfill mismatches,
@@ -193,10 +186,6 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> {
self.span
}
- pub fn overloaded_span(&self) -> Span {
- self.overloaded_span
- }
-
pub fn reached_recursion_limit(&self) -> bool {
self.state.reached_recursion_limit
}
diff --git a/compiler/rustc_trait_selection/src/errors.rs b/compiler/rustc_trait_selection/src/errors.rs
index 7f8705824..19f404cb5 100644
--- a/compiler/rustc_trait_selection/src/errors.rs
+++ b/compiler/rustc_trait_selection/src/errors.rs
@@ -58,24 +58,25 @@ pub struct NoValueInOnUnimplemented {
pub span: Span,
}
-pub struct NegativePositiveConflict<'a> {
+pub struct NegativePositiveConflict<'tcx> {
pub impl_span: Span,
- pub trait_desc: &'a str,
- pub self_desc: &'a Option<String>,
+ pub trait_desc: ty::TraitRef<'tcx>,
+ pub self_ty: Option<Ty<'tcx>>,
pub negative_impl_span: Result<Span, Symbol>,
pub positive_impl_span: Result<Span, Symbol>,
}
impl IntoDiagnostic<'_> for NegativePositiveConflict<'_> {
+ #[track_caller]
fn into_diagnostic(
self,
handler: &Handler,
) -> rustc_errors::DiagnosticBuilder<'_, ErrorGuaranteed> {
let mut diag = handler.struct_err(fluent::trait_selection_negative_positive_conflict);
- diag.set_arg("trait_desc", self.trait_desc);
+ diag.set_arg("trait_desc", self.trait_desc.print_only_trait_path().to_string());
diag.set_arg(
"self_desc",
- self.self_desc.clone().map_or_else(|| String::from("none"), |ty| ty),
+ self.self_ty.map_or_else(|| "none".to_string(), |ty| ty.to_string()),
);
diag.set_span(self.impl_span);
diag.code(rustc_errors::error_code!(E0751));
diff --git a/compiler/rustc_trait_selection/src/infer.rs b/compiler/rustc_trait_selection/src/infer.rs
index a335f8e06..6c70bbf75 100644
--- a/compiler/rustc_trait_selection/src/infer.rs
+++ b/compiler/rustc_trait_selection/src/infer.rs
@@ -1,15 +1,13 @@
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
-use crate::traits::{self, TraitEngine, TraitEngineExt};
+use crate::traits::{self, ObligationCtxt};
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
-use rustc_infer::traits::ObligationCause;
use rustc_middle::arena::ArenaAllocatable;
use rustc_middle::infer::canonical::{Canonical, CanonicalizedQueryResponse, QueryResponse};
use rustc_middle::traits::query::Fallible;
-use rustc_middle::ty::subst::SubstsRef;
-use rustc_middle::ty::ToPredicate;
use rustc_middle::ty::{self, Ty, TypeFoldable, TypeVisitable};
+use rustc_middle::ty::{GenericArg, ToPredicate};
use rustc_span::{Span, DUMMY_SP};
use std::fmt::Debug;
@@ -31,21 +29,11 @@ pub trait InferCtxtExt<'tcx> {
span: Span,
) -> bool;
- fn partially_normalize_associated_types_in<T>(
- &self,
- cause: ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- value: T,
- ) -> InferOk<'tcx, T>
- where
- T: TypeFoldable<'tcx>;
-
/// Check whether a `ty` implements given trait(trait_def_id).
/// The inputs are:
///
/// - the def-id of the trait
- /// - the self type
- /// - the *other* type parameters of the trait, excluding the self-type
+ /// - the type parameters of the trait, including the self-type
/// - the parameter environment
///
/// Invokes `evaluate_obligation`, so in the event that evaluating
@@ -54,8 +42,7 @@ pub trait InferCtxtExt<'tcx> {
fn type_implements_trait(
&self,
trait_def_id: DefId,
- ty: Ty<'tcx>,
- params: SubstsRef<'tcx>,
+ params: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
param_env: ty::ParamEnv<'tcx>,
) -> traits::EvaluationResult;
}
@@ -91,42 +78,14 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
traits::type_known_to_meet_bound_modulo_regions(self, param_env, ty, lang_item, span)
}
- /// Normalizes associated types in `value`, potentially returning
- /// new obligations that must further be processed.
- fn partially_normalize_associated_types_in<T>(
- &self,
- cause: ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- value: T,
- ) -> InferOk<'tcx, T>
- where
- T: TypeFoldable<'tcx>,
- {
- debug!("partially_normalize_associated_types_in(value={:?})", value);
- let mut selcx = traits::SelectionContext::new(self);
- let traits::Normalized { value, obligations } =
- traits::normalize(&mut selcx, param_env, cause, value);
- debug!(
- "partially_normalize_associated_types_in: result={:?} predicates={:?}",
- value, obligations
- );
- InferOk { value, obligations }
- }
-
+ #[instrument(level = "debug", skip(self, params), ret)]
fn type_implements_trait(
&self,
trait_def_id: DefId,
- ty: Ty<'tcx>,
- params: SubstsRef<'tcx>,
+ params: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
param_env: ty::ParamEnv<'tcx>,
) -> traits::EvaluationResult {
- debug!(
- "type_implements_trait: trait_def_id={:?}, type={:?}, params={:?}, param_env={:?}",
- trait_def_id, ty, params, param_env
- );
-
- let trait_ref =
- ty::TraitRef { def_id: trait_def_id, substs: self.tcx.mk_substs_trait(ty, params) };
+ let trait_ref = self.tcx.mk_trait_ref(trait_def_id, params);
let obligation = traits::Obligation {
cause: traits::ObligationCause::dummy(),
@@ -142,7 +101,7 @@ pub trait InferCtxtBuilderExt<'tcx> {
fn enter_canonical_trait_query<K, R>(
&mut self,
canonical_key: &Canonical<'tcx, K>,
- operation: impl FnOnce(&InferCtxt<'tcx>, &mut dyn TraitEngine<'tcx>, K) -> Fallible<R>,
+ operation: impl FnOnce(&ObligationCtxt<'_, 'tcx>, K) -> Fallible<R>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, R>>
where
K: TypeFoldable<'tcx>,
@@ -170,17 +129,17 @@ impl<'tcx> InferCtxtBuilderExt<'tcx> for InferCtxtBuilder<'tcx> {
fn enter_canonical_trait_query<K, R>(
&mut self,
canonical_key: &Canonical<'tcx, K>,
- operation: impl FnOnce(&InferCtxt<'tcx>, &mut dyn TraitEngine<'tcx>, K) -> Fallible<R>,
+ operation: impl FnOnce(&ObligationCtxt<'_, 'tcx>, K) -> Fallible<R>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, R>>
where
K: TypeFoldable<'tcx>,
R: Debug + TypeFoldable<'tcx>,
Canonical<'tcx, QueryResponse<'tcx, R>>: ArenaAllocatable<'tcx>,
{
- let (ref infcx, key, canonical_inference_vars) =
+ let (infcx, key, canonical_inference_vars) =
self.build_with_canonical(DUMMY_SP, canonical_key);
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
- let value = operation(infcx, &mut *fulfill_cx, key)?;
- infcx.make_canonicalized_query_response(canonical_inference_vars, value, &mut *fulfill_cx)
+ let ocx = ObligationCtxt::new(&infcx);
+ let value = operation(&ocx, key)?;
+ ocx.make_canonicalized_query_response(canonical_inference_vars, value)
}
}
diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs
index 5d52aa075..975ff31a6 100644
--- a/compiler/rustc_trait_selection/src/lib.rs
+++ b/compiler/rustc_trait_selection/src/lib.rs
@@ -10,8 +10,8 @@
//!
//! This API is completely unstable and subject to change.
-#![allow(rustc::potential_query_instability)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
+#![feature(associated_type_bounds)]
#![feature(box_patterns)]
#![feature(control_flow_enum)]
#![feature(drain_filter)]
diff --git a/compiler/rustc_trait_selection/src/traits/auto_trait.rs b/compiler/rustc_trait_selection/src/traits/auto_trait.rs
index ed34ab95a..8e04da4f9 100644
--- a/compiler/rustc_trait_selection/src/traits/auto_trait.rs
+++ b/compiler/rustc_trait_selection/src/traits/auto_trait.rs
@@ -10,9 +10,9 @@ use crate::traits::project::ProjectAndUnifyResult;
use rustc_middle::mir::interpret::ErrorHandled;
use rustc_middle::ty::fold::{TypeFolder, TypeSuperFoldable};
use rustc_middle::ty::visit::TypeVisitable;
-use rustc_middle::ty::{PolyTraitRef, Region, RegionVid};
+use rustc_middle::ty::{ImplPolarity, Region, RegionVid};
-use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use std::collections::hash_map::Entry;
use std::collections::VecDeque;
@@ -27,8 +27,8 @@ pub enum RegionTarget<'tcx> {
#[derive(Default, Debug, Clone)]
pub struct RegionDeps<'tcx> {
- larger: FxHashSet<RegionTarget<'tcx>>,
- smaller: FxHashSet<RegionTarget<'tcx>>,
+ larger: FxIndexSet<RegionTarget<'tcx>>,
+ smaller: FxIndexSet<RegionTarget<'tcx>>,
}
pub enum AutoTraitResult<A> {
@@ -86,18 +86,25 @@ impl<'tcx> AutoTraitFinder<'tcx> {
) -> AutoTraitResult<A> {
let tcx = self.tcx;
- let trait_ref = ty::TraitRef { def_id: trait_did, substs: tcx.mk_substs_trait(ty, &[]) };
-
- let trait_pred = ty::Binder::dummy(trait_ref);
+ let trait_ref = tcx.mk_trait_ref(trait_did, [ty]);
let infcx = tcx.infer_ctxt().build();
let mut selcx = SelectionContext::new(&infcx);
- for f in [
- PolyTraitRef::to_poly_trait_predicate,
- PolyTraitRef::to_poly_trait_predicate_negative_polarity,
- ] {
- let result =
- selcx.select(&Obligation::new(ObligationCause::dummy(), orig_env, f(&trait_pred)));
+ for polarity in [true, false] {
+ let result = selcx.select(&Obligation::new(
+ tcx,
+ ObligationCause::dummy(),
+ orig_env,
+ ty::Binder::dummy(ty::TraitPredicate {
+ trait_ref,
+ constness: ty::BoundConstness::NotConst,
+ polarity: if polarity {
+ ImplPolarity::Positive
+ } else {
+ ImplPolarity::Negative
+ },
+ }),
+ ));
if let Ok(Some(ImplSource::UserDefined(_))) = result {
debug!(
"find_auto_trait_generics({:?}): \
@@ -256,17 +263,15 @@ impl<'tcx> AutoTraitFinder<'tcx> {
let mut already_visited = FxHashSet::default();
let mut predicates = VecDeque::new();
predicates.push_back(ty::Binder::dummy(ty::TraitPredicate {
- trait_ref: ty::TraitRef {
- def_id: trait_did,
- substs: infcx.tcx.mk_substs_trait(ty, &[]),
- },
+ trait_ref: infcx.tcx.mk_trait_ref(trait_did, [ty]),
+
constness: ty::BoundConstness::NotConst,
// Auto traits are positive
polarity: ty::ImplPolarity::Positive,
}));
let computed_preds = param_env.caller_bounds().iter();
- let mut user_computed_preds: FxHashSet<_> = user_env.caller_bounds().iter().collect();
+ let mut user_computed_preds: FxIndexSet<_> = user_env.caller_bounds().iter().collect();
let mut new_env = param_env;
let dummy_cause = ObligationCause::dummy();
@@ -280,8 +285,12 @@ impl<'tcx> AutoTraitFinder<'tcx> {
// Call `infcx.resolve_vars_if_possible` to see if we can
// get rid of any inference variables.
- let obligation =
- infcx.resolve_vars_if_possible(Obligation::new(dummy_cause.clone(), new_env, pred));
+ let obligation = infcx.resolve_vars_if_possible(Obligation::new(
+ tcx,
+ dummy_cause.clone(),
+ new_env,
+ pred,
+ ));
let result = select.select(&obligation);
match result {
@@ -389,13 +398,15 @@ impl<'tcx> AutoTraitFinder<'tcx> {
/// not just one specific lifetime (e.g., `'static`).
fn add_user_pred(
&self,
- user_computed_preds: &mut FxHashSet<ty::Predicate<'tcx>>,
+ user_computed_preds: &mut FxIndexSet<ty::Predicate<'tcx>>,
new_pred: ty::Predicate<'tcx>,
) {
let mut should_add_new = true;
user_computed_preds.retain(|&old_pred| {
- if let (ty::PredicateKind::Trait(new_trait), ty::PredicateKind::Trait(old_trait)) =
- (new_pred.kind().skip_binder(), old_pred.kind().skip_binder())
+ if let (
+ ty::PredicateKind::Clause(ty::Clause::Trait(new_trait)),
+ ty::PredicateKind::Clause(ty::Clause::Trait(old_trait)),
+ ) = (new_pred.kind().skip_binder(), old_pred.kind().skip_binder())
{
if new_trait.def_id() == old_trait.def_id() {
let new_substs = new_trait.trait_ref.substs;
@@ -585,20 +596,20 @@ impl<'tcx> AutoTraitFinder<'tcx> {
&self,
ty: Ty<'_>,
nested: impl Iterator<Item = Obligation<'tcx, ty::Predicate<'tcx>>>,
- computed_preds: &mut FxHashSet<ty::Predicate<'tcx>>,
+ computed_preds: &mut FxIndexSet<ty::Predicate<'tcx>>,
fresh_preds: &mut FxHashSet<ty::Predicate<'tcx>>,
predicates: &mut VecDeque<ty::PolyTraitPredicate<'tcx>>,
- select: &mut SelectionContext<'_, 'tcx>,
+ selcx: &mut SelectionContext<'_, 'tcx>,
only_projections: bool,
) -> bool {
let dummy_cause = ObligationCause::dummy();
for obligation in nested {
let is_new_pred =
- fresh_preds.insert(self.clean_pred(select.infcx(), obligation.predicate));
+ fresh_preds.insert(self.clean_pred(selcx.infcx, obligation.predicate));
// Resolve any inference variables that we can, to help selection succeed
- let predicate = select.infcx().resolve_vars_if_possible(obligation.predicate);
+ let predicate = selcx.infcx.resolve_vars_if_possible(obligation.predicate);
// We only add a predicate as a user-displayable bound if
// it involves a generic parameter, and doesn't contain
@@ -615,14 +626,14 @@ impl<'tcx> AutoTraitFinder<'tcx> {
let bound_predicate = predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(p) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(p)) => {
// Add this to `predicates` so that we end up calling `select`
// with it. If this predicate ends up being unimplemented,
// then `evaluate_predicates` will handle adding it the `ParamEnv`
// if possible.
predicates.push_back(bound_predicate.rebind(p));
}
- ty::PredicateKind::Projection(p) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(p)) => {
let p = bound_predicate.rebind(p);
debug!(
"evaluate_nested_obligations: examining projection predicate {:?}",
@@ -706,7 +717,8 @@ impl<'tcx> AutoTraitFinder<'tcx> {
// and turn them into an explicit negative impl for our type.
debug!("Projecting and unifying projection predicate {:?}", predicate);
- match project::poly_project_and_unify_type(select, &obligation.with(p)) {
+ match project::poly_project_and_unify_type(selcx, &obligation.with(self.tcx, p))
+ {
ProjectAndUnifyResult::MismatchedProjectionTypes(e) => {
debug!(
"evaluate_nested_obligations: Unable to unify predicate \
@@ -731,7 +743,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
computed_preds,
fresh_preds,
predicates,
- select,
+ selcx,
only_projections,
) {
return false;
@@ -752,25 +764,25 @@ impl<'tcx> AutoTraitFinder<'tcx> {
}
}
}
- ty::PredicateKind::RegionOutlives(binder) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(binder)) => {
let binder = bound_predicate.rebind(binder);
- select.infcx().region_outlives_predicate(&dummy_cause, binder)
+ selcx.infcx.region_outlives_predicate(&dummy_cause, binder)
}
- ty::PredicateKind::TypeOutlives(binder) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(binder)) => {
let binder = bound_predicate.rebind(binder);
match (
binder.no_bound_vars(),
binder.map_bound_ref(|pred| pred.0).no_bound_vars(),
) {
(None, Some(t_a)) => {
- select.infcx().register_region_obligation_with_cause(
+ selcx.infcx.register_region_obligation_with_cause(
t_a,
- select.infcx().tcx.lifetimes.re_static,
+ selcx.infcx.tcx.lifetimes.re_static,
&dummy_cause,
);
}
(Some(ty::OutlivesPredicate(t_a, r_b)), _) => {
- select.infcx().register_region_obligation_with_cause(
+ selcx.infcx.register_region_obligation_with_cause(
t_a,
r_b,
&dummy_cause,
@@ -782,14 +794,12 @@ impl<'tcx> AutoTraitFinder<'tcx> {
ty::PredicateKind::ConstEquate(c1, c2) => {
let evaluate = |c: ty::Const<'tcx>| {
if let ty::ConstKind::Unevaluated(unevaluated) = c.kind() {
- match select.infcx().const_eval_resolve(
+ match selcx.infcx.const_eval_resolve(
obligation.param_env,
unevaluated,
Some(obligation.cause.span),
) {
- Ok(Some(valtree)) => {
- Ok(ty::Const::from_value(select.tcx(), valtree, c.ty()))
- }
+ Ok(Some(valtree)) => Ok(selcx.tcx().mk_const(valtree, c.ty())),
Ok(None) => {
let tcx = self.tcx;
let def_id = unevaluated.def.did;
@@ -809,10 +819,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
match (evaluate(c1), evaluate(c2)) {
(Ok(c1), Ok(c2)) => {
- match select
- .infcx()
- .at(&obligation.cause, obligation.param_env)
- .eq(c1, c2)
+ match selcx.infcx.at(&obligation.cause, obligation.param_env).eq(c1, c2)
{
Ok(_) => (),
Err(_) => return false,
@@ -832,6 +839,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::TypeWellFormedFromEnv(..) => {}
+ ty::PredicateKind::Ambiguous => return false,
};
}
true
@@ -846,7 +854,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
}
}
-// Replaces all ReVars in a type with ty::Region's, using the provided map
+/// Replaces all ReVars in a type with ty::Region's, using the provided map
pub struct RegionReplacer<'a, 'tcx> {
vid_to_region: &'a FxHashMap<ty::RegionVid, ty::Region<'tcx>>,
tcx: TyCtxt<'tcx>,
diff --git a/compiler/rustc_trait_selection/src/traits/chalk_fulfill.rs b/compiler/rustc_trait_selection/src/traits/chalk_fulfill.rs
index 81e1d6449..e88950523 100644
--- a/compiler/rustc_trait_selection/src/traits/chalk_fulfill.rs
+++ b/compiler/rustc_trait_selection/src/traits/chalk_fulfill.rs
@@ -4,44 +4,43 @@ use crate::infer::canonical::OriginalQueryValues;
use crate::infer::InferCtxt;
use crate::traits::query::NoSolution;
use crate::traits::{
- ChalkEnvironmentAndGoal, FulfillmentError, FulfillmentErrorCode, ObligationCause,
- PredicateObligation, SelectionError, TraitEngine,
+ ChalkEnvironmentAndGoal, FulfillmentError, FulfillmentErrorCode, PredicateObligation,
+ SelectionError, TraitEngine,
};
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
-use rustc_middle::ty::{self, Ty, TypeVisitable};
+use rustc_middle::ty::{self, TypeVisitable};
pub struct FulfillmentContext<'tcx> {
obligations: FxIndexSet<PredicateObligation<'tcx>>,
relationships: FxHashMap<ty::TyVid, ty::FoundRelationships>,
+
+ usable_in_snapshot: bool,
}
impl FulfillmentContext<'_> {
- pub(crate) fn new() -> Self {
+ pub(super) fn new() -> Self {
FulfillmentContext {
obligations: FxIndexSet::default(),
relationships: FxHashMap::default(),
+ usable_in_snapshot: false,
}
}
-}
-impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
- fn normalize_projection_type(
- &mut self,
- infcx: &InferCtxt<'tcx>,
- _param_env: ty::ParamEnv<'tcx>,
- projection_ty: ty::ProjectionTy<'tcx>,
- _cause: ObligationCause<'tcx>,
- ) -> Ty<'tcx> {
- infcx.tcx.mk_ty(ty::Projection(projection_ty))
+ pub(crate) fn new_in_snapshot() -> Self {
+ FulfillmentContext { usable_in_snapshot: true, ..Self::new() }
}
+}
+impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
fn register_predicate_obligation(
&mut self,
infcx: &InferCtxt<'tcx>,
obligation: PredicateObligation<'tcx>,
) {
- assert!(!infcx.is_in_snapshot());
+ if !self.usable_in_snapshot {
+ assert!(!infcx.is_in_snapshot());
+ }
let obligation = infcx.resolve_vars_if_possible(obligation);
super::relationships::update(self, infcx, &obligation);
@@ -72,7 +71,9 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
}
fn select_where_possible(&mut self, infcx: &InferCtxt<'tcx>) -> Vec<FulfillmentError<'tcx>> {
- assert!(!infcx.is_in_snapshot());
+ if !self.usable_in_snapshot {
+ assert!(!infcx.is_in_snapshot());
+ }
let mut errors = Vec::new();
let mut next_round = FxIndexSet::default();
diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs
index 8aab75490..899e30275 100644
--- a/compiler/rustc_trait_selection/src/traits/coherence.rs
+++ b/compiler/rustc_trait_selection/src/traits/coherence.rs
@@ -11,14 +11,14 @@ use crate::traits::select::IntercrateAmbiguityCause;
use crate::traits::util::impl_subject_and_oblig;
use crate::traits::SkipLeakCheck;
use crate::traits::{
- self, Normalized, Obligation, ObligationCause, ObligationCtxt, PredicateObligation,
- PredicateObligations, SelectionContext,
+ self, Obligation, ObligationCause, ObligationCtxt, PredicateObligation, PredicateObligations,
+ SelectionContext,
};
use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::Diagnostic;
use rustc_hir::def_id::{DefId, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::CRATE_HIR_ID;
-use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
+use rustc_infer::infer::{DefiningAnchor, InferCtxt, TyCtxtInferExt};
use rustc_infer::traits::util;
use rustc_middle::traits::specialization_graph::OverlapMode;
use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams};
@@ -30,6 +30,8 @@ use std::fmt::Debug;
use std::iter;
use std::ops::ControlFlow;
+use super::NormalizeExt;
+
/// Whether we do the orphan check relative to this crate or
/// to some remote crate.
#[derive(Copy, Clone, Debug)]
@@ -64,13 +66,13 @@ pub fn add_placeholder_note(err: &mut Diagnostic) {
/// with a suitably-freshened `ImplHeader` with those types
/// substituted. Otherwise, returns `None`.
#[instrument(skip(tcx, skip_leak_check), level = "debug")]
-pub fn overlapping_impls(
- tcx: TyCtxt<'_>,
+pub fn overlapping_impls<'tcx>(
+ tcx: TyCtxt<'tcx>,
impl1_def_id: DefId,
impl2_def_id: DefId,
skip_leak_check: SkipLeakCheck,
overlap_mode: OverlapMode,
-) -> Option<OverlapResult<'_>> {
+) -> Option<OverlapResult<'tcx>> {
// Before doing expensive operations like entering an inference context, do
// a quick check via fast_reject to tell if the impl headers could possibly
// unify.
@@ -94,8 +96,9 @@ pub fn overlapping_impls(
return None;
}
- let infcx = tcx.infer_ctxt().build();
- let selcx = &mut SelectionContext::intercrate(&infcx);
+ let infcx =
+ tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).intercrate().build();
+ let selcx = &mut SelectionContext::new(&infcx);
let overlaps =
overlap(selcx, skip_leak_check, impl1_def_id, impl2_def_id, overlap_mode).is_some();
if !overlaps {
@@ -105,8 +108,9 @@ pub fn overlapping_impls(
// In the case where we detect an error, run the check again, but
// this time tracking intercrate ambiguity causes for better
// diagnostics. (These take time and can lead to false errors.)
- let infcx = tcx.infer_ctxt().build();
- let selcx = &mut SelectionContext::intercrate(&infcx);
+ let infcx =
+ tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).intercrate().build();
+ let selcx = &mut SelectionContext::new(&infcx);
selcx.enable_tracking_intercrate_ambiguity_causes();
Some(overlap(selcx, skip_leak_check, impl1_def_id, impl2_def_id, overlap_mode).unwrap())
}
@@ -117,7 +121,7 @@ fn with_fresh_ty_vars<'cx, 'tcx>(
impl_def_id: DefId,
) -> ty::ImplHeader<'tcx> {
let tcx = selcx.tcx();
- let impl_substs = selcx.infcx().fresh_substs_for_item(DUMMY_SP, impl_def_id);
+ let impl_substs = selcx.infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
let header = ty::ImplHeader {
impl_def_id,
@@ -126,8 +130,8 @@ fn with_fresh_ty_vars<'cx, 'tcx>(
predicates: tcx.predicates_of(impl_def_id).instantiate(tcx, impl_substs).predicates,
};
- let Normalized { value: mut header, obligations } =
- traits::normalize(selcx, param_env, ObligationCause::dummy(), header);
+ let InferOk { value: mut header, obligations } =
+ selcx.infcx.at(&ObligationCause::dummy(), param_env).normalize(header);
header.predicates.extend(obligations.into_iter().map(|o| o.predicate));
header
@@ -147,7 +151,7 @@ fn overlap<'cx, 'tcx>(
impl1_def_id, impl2_def_id, overlap_mode
);
- selcx.infcx().probe_maybe_skip_leak_check(skip_leak_check.is_yes(), |snapshot| {
+ selcx.infcx.probe_maybe_skip_leak_check(skip_leak_check.is_yes(), |snapshot| {
overlap_within_probe(selcx, impl1_def_id, impl2_def_id, overlap_mode, snapshot)
})
}
@@ -159,11 +163,11 @@ fn overlap_within_probe<'cx, 'tcx>(
overlap_mode: OverlapMode,
snapshot: &CombinedSnapshot<'tcx>,
) -> Option<OverlapResult<'tcx>> {
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
if overlap_mode.use_negative_impl() {
- if negative_impl(selcx, impl1_def_id, impl2_def_id)
- || negative_impl(selcx, impl2_def_id, impl1_def_id)
+ if negative_impl(infcx.tcx, impl1_def_id, impl2_def_id)
+ || negative_impl(infcx.tcx, impl2_def_id, impl1_def_id)
{
return None;
}
@@ -198,9 +202,9 @@ fn overlap_within_probe<'cx, 'tcx>(
debug!("overlap: intercrate_ambiguity_causes={:#?}", intercrate_ambiguity_causes);
let involves_placeholder =
- matches!(selcx.infcx().region_constraints_added_in_snapshot(snapshot), Some(true));
+ matches!(selcx.infcx.region_constraints_added_in_snapshot(snapshot), Some(true));
- let impl_header = selcx.infcx().resolve_vars_if_possible(impl1_header);
+ let impl_header = selcx.infcx.resolve_vars_if_possible(impl1_header);
Some(OverlapResult { impl_header, intercrate_ambiguity_causes, involves_placeholder })
}
@@ -212,7 +216,7 @@ fn equate_impl_headers<'cx, 'tcx>(
// Do `a` and `b` unify? If not, no overlap.
debug!("equate_impl_headers(impl1_header={:?}, impl2_header={:?}", impl1_header, impl2_header);
selcx
- .infcx()
+ .infcx
.at(&ObligationCause::dummy(), ty::ParamEnv::empty())
.eq_impl_headers(impl1_header, impl2_header)
.map(|infer_ok| infer_ok.obligations)
@@ -253,7 +257,7 @@ fn implicit_negative<'cx, 'tcx>(
"implicit_negative(impl1_header={:?}, impl2_header={:?}, obligations={:?})",
impl1_header, impl2_header, obligations
);
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
let opt_failing_obligation = impl1_header
.predicates
.iter()
@@ -279,13 +283,8 @@ fn implicit_negative<'cx, 'tcx>(
/// Given impl1 and impl2 check if both impls are never satisfied by a common type (including
/// where-clauses) If so, return true, they are disjoint and false otherwise.
-fn negative_impl<'cx, 'tcx>(
- selcx: &mut SelectionContext<'cx, 'tcx>,
- impl1_def_id: DefId,
- impl2_def_id: DefId,
-) -> bool {
+fn negative_impl<'tcx>(tcx: TyCtxt<'tcx>, impl1_def_id: DefId, impl2_def_id: DefId) -> bool {
debug!("negative_impl(impl1_def_id={:?}, impl2_def_id={:?})", impl1_def_id, impl2_def_id);
- let tcx = selcx.infcx().tcx;
// Create an infcx, taking the predicates of impl1 as assumptions:
let infcx = tcx.infer_ctxt().build();
@@ -332,11 +331,10 @@ fn equate<'tcx>(
return true;
};
- let selcx = &mut SelectionContext::new(&infcx);
let opt_failing_obligation = obligations
.into_iter()
.chain(more_obligations)
- .find(|o| negative_impl_exists(selcx, o, body_def_id));
+ .find(|o| negative_impl_exists(infcx, o, body_def_id));
if let Some(failing_obligation) = opt_failing_obligation {
debug!("overlap: obligation unsatisfiable {:?}", failing_obligation);
@@ -347,19 +345,19 @@ fn equate<'tcx>(
}
/// Try to prove that a negative impl exist for the given obligation and its super predicates.
-#[instrument(level = "debug", skip(selcx))]
-fn negative_impl_exists<'cx, 'tcx>(
- selcx: &SelectionContext<'cx, 'tcx>,
+#[instrument(level = "debug", skip(infcx))]
+fn negative_impl_exists<'tcx>(
+ infcx: &InferCtxt<'tcx>,
o: &PredicateObligation<'tcx>,
body_def_id: DefId,
) -> bool {
- if resolve_negative_obligation(selcx.infcx().fork(), o, body_def_id) {
+ if resolve_negative_obligation(infcx.fork(), o, body_def_id) {
return true;
}
// Try to prove a negative obligation exists for super predicates
- for o in util::elaborate_predicates(selcx.tcx(), iter::once(o.predicate)) {
- if resolve_negative_obligation(selcx.infcx().fork(), &o, body_def_id) {
+ for o in util::elaborate_predicates(infcx.tcx, iter::once(o.predicate)) {
+ if resolve_negative_obligation(infcx.fork(), &o, body_def_id) {
return true;
}
}
diff --git a/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs b/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
index 84038625f..7c9fde274 100644
--- a/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
+++ b/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs
@@ -8,164 +8,30 @@
//! In this case we try to build an abstract representation of this constant using
//! `thir_abstract_const` which can then be checked for structural equality with other
//! generic constants mentioned in the `caller_bounds` of the current environment.
-use rustc_errors::ErrorGuaranteed;
+use rustc_hir::def::DefKind;
use rustc_infer::infer::InferCtxt;
use rustc_middle::mir::interpret::ErrorHandled;
-use rustc_middle::ty::abstract_const::{
- walk_abstract_const, AbstractConst, FailureKind, Node, NotConstEvaluatable,
-};
-use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
-use rustc_span::Span;
-
-use std::iter;
-use std::ops::ControlFlow;
-
-pub struct ConstUnifyCtxt<'tcx> {
- pub tcx: TyCtxt<'tcx>,
- pub param_env: ty::ParamEnv<'tcx>,
-}
-
-impl<'tcx> ConstUnifyCtxt<'tcx> {
- // Substitutes generics repeatedly to allow AbstractConsts to unify where a
- // ConstKind::Unevaluated could be turned into an AbstractConst that would unify e.g.
- // Param(N) should unify with Param(T), substs: [Unevaluated("T2", [Unevaluated("T3", [Param(N)])])]
- #[inline]
- #[instrument(skip(self), level = "debug")]
- fn try_replace_substs_in_root(
- &self,
- mut abstr_const: AbstractConst<'tcx>,
- ) -> Option<AbstractConst<'tcx>> {
- while let Node::Leaf(ct) = abstr_const.root(self.tcx) {
- match AbstractConst::from_const(self.tcx, ct) {
- Ok(Some(act)) => abstr_const = act,
- Ok(None) => break,
- Err(_) => return None,
- }
- }
-
- Some(abstr_const)
- }
- /// Tries to unify two abstract constants using structural equality.
- #[instrument(skip(self), level = "debug")]
- pub fn try_unify(&self, a: AbstractConst<'tcx>, b: AbstractConst<'tcx>) -> bool {
- let a = if let Some(a) = self.try_replace_substs_in_root(a) {
- a
- } else {
- return true;
- };
-
- let b = if let Some(b) = self.try_replace_substs_in_root(b) {
- b
- } else {
- return true;
- };
-
- let a_root = a.root(self.tcx);
- let b_root = b.root(self.tcx);
- debug!(?a_root, ?b_root);
-
- match (a_root, b_root) {
- (Node::Leaf(a_ct), Node::Leaf(b_ct)) => {
- let a_ct = a_ct.eval(self.tcx, self.param_env);
- debug!("a_ct evaluated: {:?}", a_ct);
- let b_ct = b_ct.eval(self.tcx, self.param_env);
- debug!("b_ct evaluated: {:?}", b_ct);
-
- if a_ct.ty() != b_ct.ty() {
- return false;
- }
+use rustc_middle::traits::ObligationCause;
+use rustc_middle::ty::abstract_const::NotConstEvaluatable;
+use rustc_middle::ty::{self, TyCtxt, TypeVisitable, TypeVisitor};
- match (a_ct.kind(), b_ct.kind()) {
- // We can just unify errors with everything to reduce the amount of
- // emitted errors here.
- (ty::ConstKind::Error(_), _) | (_, ty::ConstKind::Error(_)) => true,
- (ty::ConstKind::Param(a_param), ty::ConstKind::Param(b_param)) => {
- a_param == b_param
- }
- (ty::ConstKind::Value(a_val), ty::ConstKind::Value(b_val)) => a_val == b_val,
- // If we have `fn a<const N: usize>() -> [u8; N + 1]` and `fn b<const M: usize>() -> [u8; 1 + M]`
- // we do not want to use `assert_eq!(a(), b())` to infer that `N` and `M` have to be `1`. This
- // means that we only allow inference variables if they are equal.
- (ty::ConstKind::Infer(a_val), ty::ConstKind::Infer(b_val)) => a_val == b_val,
- // We expand generic anonymous constants at the start of this function, so this
- // branch should only be taking when dealing with associated constants, at
- // which point directly comparing them seems like the desired behavior.
- //
- // FIXME(generic_const_exprs): This isn't actually the case.
- // We also take this branch for concrete anonymous constants and
- // expand generic anonymous constants with concrete substs.
- (ty::ConstKind::Unevaluated(a_uv), ty::ConstKind::Unevaluated(b_uv)) => {
- a_uv == b_uv
- }
- // FIXME(generic_const_exprs): We may want to either actually try
- // to evaluate `a_ct` and `b_ct` if they are fully concrete or something like
- // this, for now we just return false here.
- _ => false,
- }
- }
- (Node::Binop(a_op, al, ar), Node::Binop(b_op, bl, br)) if a_op == b_op => {
- self.try_unify(a.subtree(al), b.subtree(bl))
- && self.try_unify(a.subtree(ar), b.subtree(br))
- }
- (Node::UnaryOp(a_op, av), Node::UnaryOp(b_op, bv)) if a_op == b_op => {
- self.try_unify(a.subtree(av), b.subtree(bv))
- }
- (Node::FunctionCall(a_f, a_args), Node::FunctionCall(b_f, b_args))
- if a_args.len() == b_args.len() =>
- {
- self.try_unify(a.subtree(a_f), b.subtree(b_f))
- && iter::zip(a_args, b_args)
- .all(|(&an, &bn)| self.try_unify(a.subtree(an), b.subtree(bn)))
- }
- (Node::Cast(a_kind, a_operand, a_ty), Node::Cast(b_kind, b_operand, b_ty))
- if (a_ty == b_ty) && (a_kind == b_kind) =>
- {
- self.try_unify(a.subtree(a_operand), b.subtree(b_operand))
- }
- // use this over `_ => false` to make adding variants to `Node` less error prone
- (Node::Cast(..), _)
- | (Node::FunctionCall(..), _)
- | (Node::UnaryOp(..), _)
- | (Node::Binop(..), _)
- | (Node::Leaf(..), _) => false,
- }
- }
-}
-
-#[instrument(skip(tcx), level = "debug")]
-pub fn try_unify_abstract_consts<'tcx>(
- tcx: TyCtxt<'tcx>,
- (a, b): (ty::UnevaluatedConst<'tcx>, ty::UnevaluatedConst<'tcx>),
- param_env: ty::ParamEnv<'tcx>,
-) -> bool {
- (|| {
- if let Some(a) = AbstractConst::new(tcx, a)? {
- if let Some(b) = AbstractConst::new(tcx, b)? {
- let const_unify_ctxt = ConstUnifyCtxt { tcx, param_env };
- return Ok(const_unify_ctxt.try_unify(a, b));
- }
- }
+use rustc_span::Span;
+use std::ops::ControlFlow;
- Ok(false)
- })()
- .unwrap_or_else(|_: ErrorGuaranteed| true)
- // FIXME(generic_const_exprs): We should instead have this
- // method return the resulting `ty::Const` and return `ConstKind::Error`
- // on `ErrorGuaranteed`.
-}
+use crate::traits::ObligationCtxt;
/// Check if a given constant can be evaluated.
#[instrument(skip(infcx), level = "debug")]
pub fn is_const_evaluatable<'tcx>(
infcx: &InferCtxt<'tcx>,
- ct: ty::Const<'tcx>,
+ unexpanded_ct: ty::Const<'tcx>,
param_env: ty::ParamEnv<'tcx>,
span: Span,
) -> Result<(), NotConstEvaluatable> {
let tcx = infcx.tcx;
- let uv = match ct.kind() {
- ty::ConstKind::Unevaluated(uv) => uv,
+ match tcx.expand_abstract_consts(unexpanded_ct).kind() {
+ ty::ConstKind::Unevaluated(_) | ty::ConstKind::Expr(_) => (),
ty::ConstKind::Param(_)
| ty::ConstKind::Bound(_, _)
| ty::ConstKind::Placeholder(_)
@@ -175,40 +41,59 @@ pub fn is_const_evaluatable<'tcx>(
};
if tcx.features().generic_const_exprs {
- if let Some(ct) = AbstractConst::new(tcx, uv)? {
- if satisfied_from_param_env(tcx, ct, param_env)? {
+ let ct = tcx.expand_abstract_consts(unexpanded_ct);
+
+ let is_anon_ct = if let ty::ConstKind::Unevaluated(uv) = ct.kind() {
+ tcx.def_kind(uv.def.did) == DefKind::AnonConst
+ } else {
+ false
+ };
+
+ if !is_anon_ct {
+ if satisfied_from_param_env(tcx, infcx, ct, param_env) {
return Ok(());
}
- match ct.unify_failure_kind(tcx) {
- FailureKind::MentionsInfer => {
- return Err(NotConstEvaluatable::MentionsInfer);
- }
- FailureKind::MentionsParam => {
- return Err(NotConstEvaluatable::MentionsParam);
- }
- // returned below
- FailureKind::Concrete => {}
+ if ct.has_non_region_infer() {
+ return Err(NotConstEvaluatable::MentionsInfer);
+ } else if ct.has_non_region_param() {
+ return Err(NotConstEvaluatable::MentionsParam);
}
}
- let concrete = infcx.const_eval_resolve(param_env, uv, Some(span));
- match concrete {
- Err(ErrorHandled::TooGeneric) => {
- Err(NotConstEvaluatable::Error(infcx.tcx.sess.delay_span_bug(
+
+ match unexpanded_ct.kind() {
+ ty::ConstKind::Expr(_) => {
+ // FIXME(generic_const_exprs): we have a `ConstKind::Expr` which is fully concrete, but
+ // currently it is not possible to evaluate `ConstKind::Expr` so we are unable to tell if it
+ // is evaluatable or not. For now we just ICE until this is implemented.
+ Err(NotConstEvaluatable::Error(tcx.sess.delay_span_bug(
span,
- format!("Missing value for constant, but no error reported?"),
+ "evaluating `ConstKind::Expr` is not currently supported",
)))
}
- Err(ErrorHandled::Linted) => {
- let reported = infcx
- .tcx
- .sess
- .delay_span_bug(span, "constant in type had error reported as lint");
- Err(NotConstEvaluatable::Error(reported))
+ ty::ConstKind::Unevaluated(uv) => {
+ let concrete = infcx.const_eval_resolve(param_env, uv, Some(span));
+ match concrete {
+ Err(ErrorHandled::TooGeneric) => {
+ Err(NotConstEvaluatable::Error(infcx.tcx.sess.delay_span_bug(
+ span,
+ "Missing value for constant, but no error reported?",
+ )))
+ }
+ Err(ErrorHandled::Reported(e)) => Err(NotConstEvaluatable::Error(e)),
+ Ok(_) => Ok(()),
+ }
}
- Err(ErrorHandled::Reported(e)) => Err(NotConstEvaluatable::Error(e)),
- Ok(_) => Ok(()),
+ _ => bug!("unexpected constkind in `is_const_evalautable: {unexpanded_ct:?}`"),
}
} else {
+ let uv = match unexpanded_ct.kind() {
+ ty::ConstKind::Unevaluated(uv) => uv,
+ ty::ConstKind::Expr(_) => {
+ bug!("`ConstKind::Expr` without `feature(generic_const_exprs)` enabled")
+ }
+ _ => bug!("unexpected constkind in `is_const_evalautable: {unexpanded_ct:?}`"),
+ };
+
// FIXME: We should only try to evaluate a given constant here if it is fully concrete
// as we don't want to allow things like `[u8; std::mem::size_of::<*mut T>()]`.
//
@@ -218,28 +103,33 @@ pub fn is_const_evaluatable<'tcx>(
//
// See #74595 for more details about this.
let concrete = infcx.const_eval_resolve(param_env, uv, Some(span));
-
match concrete {
- // If we're evaluating a foreign constant, under a nightly compiler without generic
- // const exprs, AND it would've passed if that expression had been evaluated with
- // generic const exprs, then suggest using generic const exprs.
- Err(_) if tcx.sess.is_nightly_build()
- && let Ok(Some(ct)) = AbstractConst::new(tcx, uv)
- && satisfied_from_param_env(tcx, ct, param_env) == Ok(true) => {
- tcx.sess
- .struct_span_fatal(
- // Slightly better span than just using `span` alone
- if span == rustc_span::DUMMY_SP { tcx.def_span(uv.def.did) } else { span },
- "failed to evaluate generic const expression",
- )
- .note("the crate this constant originates from uses `#![feature(generic_const_exprs)]`")
- .span_suggestion_verbose(
- rustc_span::DUMMY_SP,
- "consider enabling this feature",
- "#![feature(generic_const_exprs)]\n",
- rustc_errors::Applicability::MaybeIncorrect,
- )
- .emit()
+ // If we're evaluating a generic foreign constant, under a nightly compiler while
+ // the current crate does not enable `feature(generic_const_exprs)`, abort
+ // compilation with a useful error.
+ Err(_)
+ if tcx.sess.is_nightly_build()
+ && satisfied_from_param_env(
+ tcx,
+ infcx,
+ tcx.expand_abstract_consts(unexpanded_ct),
+ param_env,
+ ) =>
+ {
+ tcx.sess
+ .struct_span_fatal(
+ // Slightly better span than just using `span` alone
+ if span == rustc_span::DUMMY_SP { tcx.def_span(uv.def.did) } else { span },
+ "failed to evaluate generic const expression",
+ )
+ .note("the crate this constant originates from uses `#![feature(generic_const_exprs)]`")
+ .span_suggestion_verbose(
+ rustc_span::DUMMY_SP,
+ "consider enabling this feature",
+ "#![feature(generic_const_exprs)]\n",
+ rustc_errors::Applicability::MaybeIncorrect,
+ )
+ .emit()
}
Err(ErrorHandled::TooGeneric) => {
@@ -248,16 +138,14 @@ pub fn is_const_evaluatable<'tcx>(
} else if uv.has_non_region_param() {
NotConstEvaluatable::MentionsParam
} else {
- let guar = infcx.tcx.sess.delay_span_bug(span, format!("Missing value for constant, but no error reported?"));
+ let guar = infcx.tcx.sess.delay_span_bug(
+ span,
+ format!("Missing value for constant, but no error reported?"),
+ );
NotConstEvaluatable::Error(guar)
};
Err(err)
- },
- Err(ErrorHandled::Linted) => {
- let reported =
- infcx.tcx.sess.delay_span_bug(span, "constant in type had error reported as lint");
- Err(NotConstEvaluatable::Error(reported))
}
Err(ErrorHandled::Reported(e)) => Err(NotConstEvaluatable::Error(e)),
Ok(_) => Ok(()),
@@ -265,37 +153,69 @@ pub fn is_const_evaluatable<'tcx>(
}
}
-#[instrument(skip(tcx), level = "debug")]
+#[instrument(skip(infcx, tcx), level = "debug")]
fn satisfied_from_param_env<'tcx>(
tcx: TyCtxt<'tcx>,
- ct: AbstractConst<'tcx>,
+ infcx: &InferCtxt<'tcx>,
+ ct: ty::Const<'tcx>,
param_env: ty::ParamEnv<'tcx>,
-) -> Result<bool, NotConstEvaluatable> {
+) -> bool {
+ // Try to unify with each subtree in the AbstractConst to allow for
+ // `N + 1` being const evaluatable even if theres only a `ConstEvaluatable`
+ // predicate for `(N + 1) * 2`
+ struct Visitor<'a, 'tcx> {
+ ct: ty::Const<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+
+ infcx: &'a InferCtxt<'tcx>,
+ }
+ impl<'a, 'tcx> TypeVisitor<'tcx> for Visitor<'a, 'tcx> {
+ type BreakTy = ();
+ fn visit_const(&mut self, c: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
+ debug!("is_const_evaluatable: candidate={:?}", c);
+ if let Ok(()) = self.infcx.commit_if_ok(|_| {
+ let ocx = ObligationCtxt::new_in_snapshot(self.infcx);
+ if let Ok(()) = ocx.eq(&ObligationCause::dummy(), self.param_env, c.ty(), self.ct.ty())
+ && let Ok(()) = ocx.eq(&ObligationCause::dummy(), self.param_env, c, self.ct)
+ && ocx.select_all_or_error().is_empty()
+ {
+ Ok(())
+ } else {
+ Err(())
+ }
+ }) {
+ ControlFlow::BREAK
+ } else if let ty::ConstKind::Expr(e) = c.kind() {
+ e.visit_with(self)
+ } else {
+ // FIXME(generic_const_exprs): This doesn't recurse into `<T as Trait<U>>::ASSOC`'s substs.
+ // This is currently unobservable as `<T as Trait<{ U + 1 }>>::ASSOC` creates an anon const
+ // with its own `ConstEvaluatable` bound in the param env which we will visit separately.
+ //
+ // If we start allowing directly writing `ConstKind::Expr` without an intermediate anon const
+ // this will be incorrect. It might be worth investigating making `predicates_of` elaborate
+ // all of the `ConstEvaluatable` bounds rather than having a visitor here.
+ ControlFlow::CONTINUE
+ }
+ }
+ }
+
for pred in param_env.caller_bounds() {
match pred.kind().skip_binder() {
- ty::PredicateKind::ConstEvaluatable(uv) => {
- if let Some(b_ct) = AbstractConst::from_const(tcx, uv)? {
- let const_unify_ctxt = ConstUnifyCtxt { tcx, param_env };
-
- // Try to unify with each subtree in the AbstractConst to allow for
- // `N + 1` being const evaluatable even if theres only a `ConstEvaluatable`
- // predicate for `(N + 1) * 2`
- let result = walk_abstract_const(tcx, b_ct, |b_ct| {
- match const_unify_ctxt.try_unify(ct, b_ct) {
- true => ControlFlow::BREAK,
- false => ControlFlow::CONTINUE,
- }
- });
-
- if let ControlFlow::Break(()) = result {
- debug!("is_const_evaluatable: abstract_const ~~> ok");
- return Ok(true);
- }
+ ty::PredicateKind::ConstEvaluatable(ce) => {
+ let b_ct = tcx.expand_abstract_consts(ce);
+ let mut v = Visitor { ct, infcx, param_env };
+ let result = b_ct.visit_with(&mut v);
+
+ if let ControlFlow::Break(()) = result {
+ debug!("is_const_evaluatable: yes");
+ return true;
}
}
_ => {} // don't care
}
}
- Ok(false)
+ debug!("is_const_evaluatable: no");
+ false
}
diff --git a/compiler/rustc_trait_selection/src/traits/engine.rs b/compiler/rustc_trait_selection/src/traits/engine.rs
index e0c8deec9..c028e89e4 100644
--- a/compiler/rustc_trait_selection/src/traits/engine.rs
+++ b/compiler/rustc_trait_selection/src/traits/engine.rs
@@ -1,14 +1,21 @@
use std::cell::RefCell;
+use std::fmt::Debug;
use super::TraitEngine;
use super::{ChalkFulfillmentContext, FulfillmentContext};
-use crate::infer::InferCtxtExt;
-use rustc_data_structures::fx::FxHashSet;
+use crate::traits::NormalizeExt;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_infer::infer::at::ToTrace;
+use rustc_infer::infer::canonical::{
+ Canonical, CanonicalVarValues, CanonicalizedQueryResponse, QueryResponse,
+};
use rustc_infer::infer::{InferCtxt, InferOk};
+use rustc_infer::traits::query::Fallible;
use rustc_infer::traits::{
FulfillmentError, Obligation, ObligationCause, PredicateObligation, TraitEngineExt as _,
};
+use rustc_middle::arena::ArenaAllocatable;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::ToPredicate;
use rustc_middle::ty::TypeFoldable;
@@ -31,7 +38,7 @@ impl<'tcx> TraitEngineExt<'tcx> for dyn TraitEngine<'tcx> {
fn new_in_snapshot(tcx: TyCtxt<'tcx>) -> Box<Self> {
if tcx.sess.opts.unstable_opts.chalk {
- Box::new(ChalkFulfillmentContext::new())
+ Box::new(ChalkFulfillmentContext::new_in_snapshot())
} else {
Box::new(FulfillmentContext::new_in_snapshot())
}
@@ -86,7 +93,7 @@ impl<'a, 'tcx> ObligationCtxt<'a, 'tcx> {
def_id: DefId,
) {
let tcx = self.infcx.tcx;
- let trait_ref = ty::TraitRef { def_id, substs: tcx.mk_substs_trait(ty, &[]) };
+ let trait_ref = tcx.mk_trait_ref(def_id, [ty]);
self.register_obligation(Obligation {
cause,
recursion_depth: 0,
@@ -97,28 +104,75 @@ impl<'a, 'tcx> ObligationCtxt<'a, 'tcx> {
pub fn normalize<T: TypeFoldable<'tcx>>(
&self,
- cause: ObligationCause<'tcx>,
+ cause: &ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
value: T,
) -> T {
- let infer_ok = self.infcx.partially_normalize_associated_types_in(cause, param_env, value);
+ let infer_ok = self.infcx.at(&cause, param_env).normalize(value);
self.register_infer_ok_obligations(infer_ok)
}
- pub fn equate_types(
+ /// Makes `expected <: actual`.
+ pub fn eq_exp<T>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ a_is_expected: bool,
+ a: T,
+ b: T,
+ ) -> Result<(), TypeError<'tcx>>
+ where
+ T: ToTrace<'tcx>,
+ {
+ self.infcx
+ .at(cause, param_env)
+ .eq_exp(a_is_expected, a, b)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ pub fn eq<T: ToTrace<'tcx>>(
&self,
cause: &ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
- expected: Ty<'tcx>,
- actual: Ty<'tcx>,
+ expected: T,
+ actual: T,
) -> Result<(), TypeError<'tcx>> {
- match self.infcx.at(cause, param_env).eq(expected, actual) {
- Ok(InferOk { obligations, value: () }) => {
- self.register_obligations(obligations);
- Ok(())
- }
- Err(e) => Err(e),
- }
+ self.infcx
+ .at(cause, param_env)
+ .eq(expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ /// Checks whether `expected` is a subtype of `actual`: `expected <: actual`.
+ pub fn sub<T: ToTrace<'tcx>>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'tcx>> {
+ self.infcx
+ .at(cause, param_env)
+ .sup(expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ /// Checks whether `expected` is a supertype of `actual`: `expected :> actual`.
+ pub fn sup<T: ToTrace<'tcx>>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ expected: T,
+ actual: T,
+ ) -> Result<(), TypeError<'tcx>> {
+ self.infcx
+ .at(cause, param_env)
+ .sup(expected, actual)
+ .map(|infer_ok| self.register_infer_ok_obligations(infer_ok))
+ }
+
+ pub fn select_where_possible(&self) -> Vec<FulfillmentError<'tcx>> {
+ self.engine.borrow_mut().select_where_possible(self.infcx)
}
pub fn select_all_or_error(&self) -> Vec<FulfillmentError<'tcx>> {
@@ -130,10 +184,10 @@ impl<'a, 'tcx> ObligationCtxt<'a, 'tcx> {
param_env: ty::ParamEnv<'tcx>,
span: Span,
def_id: LocalDefId,
- ) -> FxHashSet<Ty<'tcx>> {
+ ) -> FxIndexSet<Ty<'tcx>> {
let tcx = self.infcx.tcx;
let assumed_wf_types = tcx.assumed_wf_types(def_id);
- let mut implied_bounds = FxHashSet::default();
+ let mut implied_bounds = FxIndexSet::default();
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
let cause = ObligationCause::misc(span, hir_id);
for ty in assumed_wf_types {
@@ -149,9 +203,25 @@ impl<'a, 'tcx> ObligationCtxt<'a, 'tcx> {
// sound and then uncomment this line again.
// implied_bounds.insert(ty);
- let normalized = self.normalize(cause.clone(), param_env, ty);
+ let normalized = self.normalize(&cause, param_env, ty);
implied_bounds.insert(normalized);
}
implied_bounds
}
+
+ pub fn make_canonicalized_query_response<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ ) -> Fallible<CanonicalizedQueryResponse<'tcx, T>>
+ where
+ T: Debug + TypeFoldable<'tcx>,
+ Canonical<'tcx, QueryResponse<'tcx, T>>: ArenaAllocatable<'tcx>,
+ {
+ self.infcx.make_canonicalized_query_response(
+ inference_vars,
+ answer,
+ &mut **self.engine.borrow_mut(),
+ )
+ }
}
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs
new file mode 100644
index 000000000..752b53fbc
--- /dev/null
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/ambiguity.rs
@@ -0,0 +1,52 @@
+use rustc_hir::def_id::DefId;
+use rustc_infer::infer::InferCtxt;
+use rustc_infer::traits::{Obligation, ObligationCause, TraitObligation};
+use rustc_span::DUMMY_SP;
+
+use crate::traits::ObligationCtxt;
+
+pub fn recompute_applicable_impls<'tcx>(
+ infcx: &InferCtxt<'tcx>,
+ obligation: &TraitObligation<'tcx>,
+) -> Vec<DefId> {
+ let tcx = infcx.tcx;
+ let param_env = obligation.param_env;
+ let dummy_cause = ObligationCause::dummy();
+ let impl_may_apply = |impl_def_id| {
+ let ocx = ObligationCtxt::new_in_snapshot(infcx);
+ let placeholder_obligation =
+ infcx.replace_bound_vars_with_placeholders(obligation.predicate);
+ let obligation_trait_ref =
+ ocx.normalize(&dummy_cause, param_env, placeholder_obligation.trait_ref);
+
+ let impl_substs = infcx.fresh_substs_for_item(DUMMY_SP, impl_def_id);
+ let impl_trait_ref = tcx.bound_impl_trait_ref(impl_def_id).unwrap().subst(tcx, impl_substs);
+ let impl_trait_ref = ocx.normalize(&ObligationCause::dummy(), param_env, impl_trait_ref);
+
+ if let Err(_) = ocx.eq(&dummy_cause, param_env, obligation_trait_ref, impl_trait_ref) {
+ return false;
+ }
+
+ let impl_predicates = tcx.predicates_of(impl_def_id).instantiate(tcx, impl_substs);
+ ocx.register_obligations(
+ impl_predicates
+ .predicates
+ .iter()
+ .map(|&predicate| Obligation::new(tcx, dummy_cause.clone(), param_env, predicate)),
+ );
+
+ ocx.select_where_possible().is_empty()
+ };
+
+ let mut impls = Vec::new();
+ tcx.for_each_relevant_impl(
+ obligation.predicate.def_id(),
+ obligation.predicate.skip_binder().trait_ref.self_ty(),
+ |impl_def_id| {
+ if infcx.probe(move |_snapshot| impl_may_apply(impl_def_id)) {
+ impls.push(impl_def_id)
+ }
+ },
+ );
+ impls
+}
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs
index 1217d264a..dda7b2b2f 100644
--- a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs
@@ -1,34 +1,40 @@
+mod ambiguity;
pub mod on_unimplemented;
pub mod suggestions;
use super::{
- FulfillmentContext, FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes,
- Obligation, ObligationCause, ObligationCauseCode, OnUnimplementedDirective,
- OnUnimplementedNote, OutputTypeParameterMismatch, Overflow, PredicateObligation,
- SelectionContext, SelectionError, TraitNotObjectSafe,
+ FulfillmentError, FulfillmentErrorCode, MismatchedProjectionTypes, Obligation, ObligationCause,
+ ObligationCauseCode, ObligationCtxt, OutputTypeParameterMismatch, Overflow,
+ PredicateObligation, SelectionContext, SelectionError, TraitNotObjectSafe,
};
-
use crate::infer::error_reporting::{TyCategory, TypeAnnotationNeeded as ErrorCode};
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
-use crate::infer::{self, InferCtxt, TyCtxtInferExt};
-use rustc_data_structures::fx::FxHashMap;
+use crate::infer::{self, InferCtxt};
+use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
+use crate::traits::query::normalize::QueryNormalizeExt as _;
+use crate::traits::specialize::to_pretty_impl_header;
+use crate::traits::NormalizeExt;
+use on_unimplemented::OnUnimplementedNote;
+use on_unimplemented::TypeErrCtxtExt as _;
+use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_errors::{
pluralize, struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
MultiSpan, Style,
};
use rustc_hir as hir;
+use rustc_hir::def::Namespace;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::Visitor;
use rustc_hir::GenericParam;
use rustc_hir::Item;
use rustc_hir::Node;
use rustc_infer::infer::error_reporting::TypeErrCtxt;
-use rustc_infer::infer::TypeTrace;
-use rustc_infer::traits::TraitEngine;
+use rustc_infer::infer::{InferOk, TypeTrace};
use rustc_middle::traits::select::OverflowError;
use rustc_middle::ty::abstract_const::NotConstEvaluatable;
use rustc_middle::ty::error::ExpectedFound;
use rustc_middle::ty::fold::{TypeFolder, TypeSuperFoldable};
+use rustc_middle::ty::print::{FmtPrinter, Print};
use rustc_middle::ty::{
self, SubtypePredicate, ToPolyTraitRef, ToPredicate, TraitRef, Ty, TyCtxt, TypeFoldable,
TypeVisitable,
@@ -40,11 +46,6 @@ use rustc_span::{ExpnKind, Span, DUMMY_SP};
use std::fmt;
use std::iter;
use std::ops::ControlFlow;
-
-use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
-use crate::traits::query::normalize::AtExt as _;
-use crate::traits::specialize::to_pretty_impl_header;
-use on_unimplemented::TypeErrCtxtExt as _;
use suggestions::TypeErrCtxtExt as _;
pub use rustc_infer::traits::error_reporting::*;
@@ -70,7 +71,7 @@ pub trait InferCtxtExt<'tcx> {
/// returns a span and `ArgKind` information that describes the
/// arguments it expects. This can be supplied to
/// `report_arg_count_mismatch`.
- fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Vec<ArgKind>)>;
+ fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option<Span>, Vec<ArgKind>)>;
/// Reports an error when the number of arguments needed by a
/// trait match doesn't match the number that the expression
@@ -82,6 +83,7 @@ pub trait InferCtxtExt<'tcx> {
expected_args: Vec<ArgKind>,
found_args: Vec<ArgKind>,
is_closure: bool,
+ closure_pipe_span: Option<Span>,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>;
/// Checks if the type implements one of `Fn`, `FnMut`, or `FnOnce`
@@ -97,24 +99,36 @@ pub trait InferCtxtExt<'tcx> {
}
pub trait TypeErrCtxtExt<'tcx> {
+ fn report_overflow_error<T>(
+ &self,
+ predicate: &T,
+ span: Span,
+ suggest_increasing_limit: bool,
+ mutate: impl FnOnce(&mut Diagnostic),
+ ) -> !
+ where
+ T: fmt::Display
+ + TypeFoldable<'tcx>
+ + Print<'tcx, FmtPrinter<'tcx, 'tcx>, Output = FmtPrinter<'tcx, 'tcx>>,
+ <T as Print<'tcx, FmtPrinter<'tcx, 'tcx>>>::Error: std::fmt::Debug;
+
fn report_fulfillment_errors(
&self,
errors: &[FulfillmentError<'tcx>],
body_id: Option<hir::BodyId>,
- fallback_has_occurred: bool,
) -> ErrorGuaranteed;
- fn report_overflow_error<T>(
+ fn report_overflow_obligation<T>(
&self,
obligation: &Obligation<'tcx, T>,
suggest_increasing_limit: bool,
) -> !
where
- T: fmt::Display + TypeFoldable<'tcx>;
+ T: ToPredicate<'tcx> + Clone;
fn suggest_new_overflow_limit(&self, err: &mut Diagnostic);
- fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> !;
+ fn report_overflow_obligation_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> !;
/// The `root_obligation` parameter should be the `root_obligation` field
/// from a `FulfillmentError`. If no `FulfillmentError` is available,
@@ -124,7 +138,6 @@ pub trait TypeErrCtxtExt<'tcx> {
obligation: PredicateObligation<'tcx>,
root_obligation: &PredicateObligation<'tcx>,
error: &SelectionError<'tcx>,
- fallback_has_occurred: bool,
);
}
@@ -133,15 +146,16 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
/// returns a span and `ArgKind` information that describes the
/// arguments it expects. This can be supplied to
/// `report_arg_count_mismatch`.
- fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Vec<ArgKind>)> {
+ fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option<Span>, Vec<ArgKind>)> {
let sm = self.tcx.sess.source_map();
let hir = self.tcx.hir();
Some(match node {
Node::Expr(&hir::Expr {
- kind: hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }),
+ kind: hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, fn_arg_span, .. }),
..
}) => (
fn_decl_span,
+ fn_arg_span,
hir.body(body)
.params
.iter()
@@ -172,6 +186,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
kind: hir::TraitItemKind::Fn(ref sig, _), ..
}) => (
sig.span,
+ None,
sig.decl
.inputs
.iter()
@@ -186,7 +201,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
),
Node::Ctor(ref variant_data) => {
let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id));
- (span, vec![ArgKind::empty(); variant_data.fields().len()])
+ (span, None, vec![ArgKind::empty(); variant_data.fields().len()])
}
_ => panic!("non-FnLike node found: {:?}", node),
})
@@ -202,6 +217,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
expected_args: Vec<ArgKind>,
found_args: Vec<ArgKind>,
is_closure: bool,
+ closure_arg_span: Option<Span>,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
let kind = if is_closure { "closure" } else { "function" };
@@ -239,24 +255,13 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
if let Some(found_span) = found_span {
err.span_label(found_span, format!("takes {}", found_str));
- // move |_| { ... }
- // ^^^^^^^^-- def_span
- //
- // move |_| { ... }
- // ^^^^^-- prefix
- let prefix_span = self.tcx.sess.source_map().span_until_non_whitespace(found_span);
- // move |_| { ... }
- // ^^^-- pipe_span
- let pipe_span =
- if let Some(span) = found_span.trim_start(prefix_span) { span } else { found_span };
-
// Suggest to take and ignore the arguments with expected_args_length `_`s if
// found arguments is empty (assume the user just wants to ignore args in this case).
// For example, if `expected_args_length` is 2, suggest `|_, _|`.
if found_args.is_empty() && is_closure {
let underscores = vec!["_"; expected_args.len()].join(", ");
err.span_suggestion_verbose(
- pipe_span,
+ closure_arg_span.unwrap_or(found_span),
&format!(
"consider changing the closure to take and ignore the expected argument{}",
pluralize!(expected_args.len())
@@ -344,22 +349,19 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
span: DUMMY_SP,
kind: TypeVariableOriginKind::MiscVariable,
});
- let substs = self.tcx.mk_substs_trait(ty.skip_binder(), &[var.into()]);
+ let trait_ref = self.tcx.mk_trait_ref(trait_def_id, [ty.skip_binder(), var]);
let obligation = Obligation::new(
+ self.tcx,
ObligationCause::dummy(),
param_env,
- ty.rebind(ty::TraitPredicate {
- trait_ref: ty::TraitRef::new(trait_def_id, substs),
- constness,
- polarity,
- })
- .to_predicate(self.tcx),
+ ty.rebind(ty::TraitPredicate { trait_ref, constness, polarity }),
);
- let mut fulfill_cx = FulfillmentContext::new_in_snapshot();
- fulfill_cx.register_predicate_obligation(self, obligation);
- if fulfill_cx.select_all_or_error(self).is_empty() {
+ let ocx = ObligationCtxt::new_in_snapshot(self);
+ ocx.register_obligation(obligation);
+ if ocx.select_all_or_error().is_empty() {
return Ok((
- ty::ClosureKind::from_def_id(self.tcx, trait_def_id)
+ self.tcx
+ .fn_trait_kind_from_def_id(trait_def_id)
.expect("expected to map DefId to ClosureKind"),
ty.rebind(self.resolve_vars_if_possible(var)),
));
@@ -375,7 +377,6 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
&self,
errors: &[FulfillmentError<'tcx>],
body_id: Option<hir::BodyId>,
- fallback_has_occurred: bool,
) -> ErrorGuaranteed {
#[derive(Debug)]
struct ErrorDescriptor<'tcx> {
@@ -383,7 +384,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
index: Option<usize>, // None if this is an old error
}
- let mut error_map: FxHashMap<_, Vec<_>> = self
+ let mut error_map: FxIndexMap<_, Vec<_>> = self
.reported_trait_errors
.borrow()
.iter()
@@ -452,7 +453,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
for (error, suppressed) in iter::zip(errors, is_suppressed) {
if !suppressed {
- self.report_fulfillment_error(error, body_id, fallback_has_occurred);
+ self.report_fulfillment_error(error, body_id);
}
}
@@ -467,39 +468,84 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
/// occurrences in any case.
fn report_overflow_error<T>(
&self,
- obligation: &Obligation<'tcx, T>,
+ predicate: &T,
+ span: Span,
suggest_increasing_limit: bool,
+ mutate: impl FnOnce(&mut Diagnostic),
) -> !
where
- T: fmt::Display + TypeFoldable<'tcx>,
+ T: fmt::Display
+ + TypeFoldable<'tcx>
+ + Print<'tcx, FmtPrinter<'tcx, 'tcx>, Output = FmtPrinter<'tcx, 'tcx>>,
+ <T as Print<'tcx, FmtPrinter<'tcx, 'tcx>>>::Error: std::fmt::Debug,
{
- let predicate = self.resolve_vars_if_possible(obligation.predicate.clone());
+ let predicate = self.resolve_vars_if_possible(predicate.clone());
+ let mut pred_str = predicate.to_string();
+
+ if pred_str.len() > 50 {
+ // We don't need to save the type to a file, we will be talking about this type already
+ // in a separate note when we explain the obligation, so it will be available that way.
+ pred_str = predicate
+ .print(FmtPrinter::new_with_limit(
+ self.tcx,
+ Namespace::TypeNS,
+ rustc_session::Limit(6),
+ ))
+ .unwrap()
+ .into_buffer();
+ }
let mut err = struct_span_err!(
self.tcx.sess,
- obligation.cause.span,
+ span,
E0275,
"overflow evaluating the requirement `{}`",
- predicate
+ pred_str,
);
if suggest_increasing_limit {
self.suggest_new_overflow_limit(&mut err);
}
- self.note_obligation_cause_code(
- &mut err,
- &obligation.predicate,
- obligation.param_env,
- obligation.cause.code(),
- &mut vec![],
- &mut Default::default(),
- );
+ mutate(&mut err);
err.emit();
self.tcx.sess.abort_if_errors();
bug!();
}
+ /// Reports that an overflow has occurred and halts compilation. We
+ /// halt compilation unconditionally because it is important that
+ /// overflows never be masked -- they basically represent computations
+ /// whose result could not be truly determined and thus we can't say
+ /// if the program type checks or not -- and they are unusual
+ /// occurrences in any case.
+ fn report_overflow_obligation<T>(
+ &self,
+ obligation: &Obligation<'tcx, T>,
+ suggest_increasing_limit: bool,
+ ) -> !
+ where
+ T: ToPredicate<'tcx> + Clone,
+ {
+ let predicate = obligation.predicate.clone().to_predicate(self.tcx);
+ let predicate = self.resolve_vars_if_possible(predicate);
+ self.report_overflow_error(
+ &predicate,
+ obligation.cause.span,
+ suggest_increasing_limit,
+ |err| {
+ self.note_obligation_cause_code(
+ err,
+ &predicate,
+ obligation.param_env,
+ obligation.cause.code(),
+ &mut vec![],
+ &mut Default::default(),
+ );
+ },
+ );
+ }
+
fn suggest_new_overflow_limit(&self, err: &mut Diagnostic) {
let suggested_limit = match self.tcx.recursion_limit() {
Limit(0) => Limit(2),
@@ -514,11 +560,11 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
/// Reports that a cycle was detected which led to overflow and halts
- /// compilation. This is equivalent to `report_overflow_error` except
+ /// compilation. This is equivalent to `report_overflow_obligation` except
/// that we can give a more helpful error message (and, in particular,
/// we do not suggest increasing the overflow limit, which is not
/// going to help).
- fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! {
+ fn report_overflow_obligation_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! {
let cycle = self.resolve_vars_if_possible(cycle.to_owned());
assert!(!cycle.is_empty());
@@ -526,7 +572,10 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// The 'deepest' obligation is most likely to have a useful
// cause 'backtrace'
- self.report_overflow_error(cycle.iter().max_by_key(|p| p.recursion_depth).unwrap(), false);
+ self.report_overflow_obligation(
+ cycle.iter().max_by_key(|p| p.recursion_depth).unwrap(),
+ false,
+ );
}
fn report_selection_error(
@@ -534,22 +583,15 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
mut obligation: PredicateObligation<'tcx>,
root_obligation: &PredicateObligation<'tcx>,
error: &SelectionError<'tcx>,
- fallback_has_occurred: bool,
) {
- self.set_tainted_by_errors();
let tcx = self.tcx;
let mut span = obligation.cause.span;
+ // FIXME: statically guarantee this by tainting after the diagnostic is emitted
+ self.set_tainted_by_errors(
+ tcx.sess.delay_span_bug(span, "`report_selection_error` did not emit an error"),
+ );
let mut err = match *error {
- SelectionError::Ambiguous(ref impls) => {
- let mut err = self.tcx.sess.struct_span_err(
- obligation.cause.span,
- &format!("multiple applicable `impl`s for `{}`", obligation.predicate),
- );
- self.annotate_source_of_ambiguity(&mut err, impls, obligation.predicate);
- err.emit();
- return;
- }
SelectionError::Unimplemented => {
// If this obligation was generated as a result of well-formedness checking, see if we
// can get a better error message by performing HIR-based well-formedness checking.
@@ -582,7 +624,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(trait_predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_predicate)) => {
let trait_predicate = bound_predicate.rebind(trait_predicate);
let mut trait_predicate = self.resolve_vars_if_possible(trait_predicate);
@@ -663,40 +705,32 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
))
);
- if is_try_conversion {
- let none_error = self
- .tcx
- .get_diagnostic_item(sym::none_error)
- .map(|def_id| tcx.type_of(def_id));
- let should_convert_option_to_result =
- Some(trait_ref.skip_binder().substs.type_at(1)) == none_error;
- let should_convert_result_to_option =
- Some(trait_ref.self_ty().skip_binder()) == none_error;
- if should_convert_option_to_result {
- err.span_suggestion_verbose(
- span.shrink_to_lo(),
- "consider converting the `Option<T>` into a `Result<T, _>` \
- using `Option::ok_or` or `Option::ok_or_else`",
- ".ok_or_else(|| /* error value */)",
- Applicability::HasPlaceholders,
- );
- } else if should_convert_result_to_option {
- err.span_suggestion_verbose(
- span.shrink_to_lo(),
- "consider converting the `Result<T, _>` into an `Option<T>` \
- using `Result::ok`",
- ".ok()",
- Applicability::MachineApplicable,
- );
- }
- if let Some(ret_span) = self.return_type_span(&obligation) {
- err.span_label(
- ret_span,
- &format!(
- "expected `{}` because of this",
- trait_ref.skip_binder().self_ty()
- ),
- );
+ if is_try_conversion && let Some(ret_span) = self.return_type_span(&obligation) {
+ err.span_label(
+ ret_span,
+ &format!(
+ "expected `{}` because of this",
+ trait_ref.skip_binder().self_ty()
+ ),
+ );
+ }
+
+ if Some(trait_ref.def_id()) == tcx.lang_items().tuple_trait() {
+ match obligation.cause.code().peel_derives() {
+ ObligationCauseCode::RustCall => {
+ err.set_primary_message("functions with the \"rust-call\" ABI must take a single non-self tuple argument");
+ }
+ ObligationCauseCode::BindingObligation(def_id, _)
+ | ObligationCauseCode::ItemObligation(def_id)
+ if tcx.is_fn_trait(*def_id) =>
+ {
+ err.code(rustc_errors::error_code!(E0059));
+ err.set_primary_message(format!(
+ "type parameter to bare `{}` trait must be a tuple",
+ tcx.def_path_str(*def_id)
+ ));
+ }
+ _ => {}
}
}
@@ -848,12 +882,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
);
}
- let is_fn_trait = [
- self.tcx.lang_items().fn_trait(),
- self.tcx.lang_items().fn_mut_trait(),
- self.tcx.lang_items().fn_once_trait(),
- ]
- .contains(&Some(trait_ref.def_id()));
+ let is_fn_trait = tcx.is_fn_trait(trait_ref.def_id());
let is_target_feature_fn = if let ty::FnDef(def_id, _) =
*trait_ref.skip_binder().self_ty().kind()
{
@@ -883,7 +912,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// Note if the `FnMut` or `FnOnce` is less general than the trait we're trying
// to implement.
let selected_kind =
- ty::ClosureKind::from_def_id(self.tcx, trait_ref.def_id())
+ self.tcx.fn_trait_kind_from_def_id(trait_ref.def_id())
.expect("expected to map DefId to ClosureKind");
if !implemented_kind.extends(selected_kind) {
err.note(
@@ -974,7 +1003,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// useful for less general traits.
if peeled
&& !self.tcx.trait_is_auto(def_id)
- && !self.tcx.lang_items().items().contains(&Some(def_id))
+ && !self.tcx.lang_items().iter().any(|(_, id)| id == def_id)
{
let trait_ref = trait_pred.to_poly_trait_ref();
let impl_candidates =
@@ -1000,16 +1029,12 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// variable that used to fallback to `()` now falling back to `!`. Issue a
// note informing about the change in behaviour.
if trait_predicate.skip_binder().self_ty().is_never()
- && fallback_has_occurred
+ && self.fallback_has_occurred
{
- let predicate = trait_predicate.map_bound(|mut trait_pred| {
- trait_pred.trait_ref.substs = self.tcx.mk_substs_trait(
- self.tcx.mk_unit(),
- &trait_pred.trait_ref.substs[1..],
- );
- trait_pred
+ let predicate = trait_predicate.map_bound(|trait_pred| {
+ trait_pred.with_self_type(self.tcx, self.tcx.mk_unit())
});
- let unit_obligation = obligation.with(predicate.to_predicate(tcx));
+ let unit_obligation = obligation.with(tcx, predicate);
if self.predicate_may_hold(&unit_obligation) {
err.note(
"this error might have been caused by changes to \
@@ -1062,9 +1087,9 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
span_bug!(span, "coerce requirement gave wrong error: `{:?}`", predicate)
}
- ty::PredicateKind::RegionOutlives(..)
- | ty::PredicateKind::Projection(..)
- | ty::PredicateKind::TypeOutlives(..) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..)) => {
let predicate = self.resolve_vars_if_possible(obligation.predicate);
struct_span_err!(
self.tcx.sess,
@@ -1177,6 +1202,8 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
)
}
+ ty::PredicateKind::Ambiguous => span_bug!(span, "ambiguous"),
+
ty::PredicateKind::TypeWellFormedFromEnv(..) => span_bug!(
span,
"TypeWellFormedFromEnv predicate should only exist in the environment"
@@ -1260,13 +1287,14 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
obligation.cause.code(),
)
} else {
- let (closure_span, found) = found_did
+ let (closure_span, closure_arg_span, found) = found_did
.and_then(|did| {
let node = self.tcx.hir().get_if_local(did)?;
- let (found_span, found) = self.get_fn_like_arguments(node)?;
- Some((Some(found_span), found))
+ let (found_span, closure_arg_span, found) =
+ self.get_fn_like_arguments(node)?;
+ Some((Some(found_span), closure_arg_span, found))
})
- .unwrap_or((found_span, found));
+ .unwrap_or((found_span, None, found));
self.report_arg_count_mismatch(
span,
@@ -1274,6 +1302,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
expected,
found,
found_trait_ty.is_closure(),
+ closure_arg_span,
)
}
}
@@ -1366,7 +1395,6 @@ trait InferCtxtPrivExt<'tcx> {
&self,
error: &FulfillmentError<'tcx>,
body_id: Option<hir::BodyId>,
- fallback_has_occurred: bool,
);
fn report_projection_error(
@@ -1483,9 +1511,10 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// FIXME: It should be possible to deal with `ForAll` in a cleaner way.
let bound_error = error.kind();
let (cond, error) = match (cond.kind().skip_binder(), bound_error.skip_binder()) {
- (ty::PredicateKind::Trait(..), ty::PredicateKind::Trait(error)) => {
- (cond, bound_error.rebind(error))
- }
+ (
+ ty::PredicateKind::Clause(ty::Clause::Trait(..)),
+ ty::PredicateKind::Clause(ty::Clause::Trait(error)),
+ ) => (cond, bound_error.rebind(error)),
_ => {
// FIXME: make this work in other cases too.
return false;
@@ -1494,7 +1523,9 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
for obligation in super::elaborate_predicates(self.tcx, std::iter::once(cond)) {
let bound_predicate = obligation.predicate.kind();
- if let ty::PredicateKind::Trait(implication) = bound_predicate.skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(implication)) =
+ bound_predicate.skip_binder()
+ {
let error = error.to_poly_trait_ref();
let implication = bound_predicate.rebind(implication.trait_ref);
// FIXME: I'm just not taking associated types at all here.
@@ -1516,7 +1547,6 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
&self,
error: &FulfillmentError<'tcx>,
body_id: Option<hir::BodyId>,
- fallback_has_occurred: bool,
) {
match error.code {
FulfillmentErrorCode::CodeSelectionError(ref selection_error) => {
@@ -1524,7 +1554,6 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
error.obligation.clone(),
&error.root_obligation,
selection_error,
- fallback_has_occurred,
);
}
FulfillmentErrorCode::CodeProjectionError(ref e) => {
@@ -1567,7 +1596,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
diag.emit();
}
FulfillmentErrorCode::CodeCycle(ref cycle) => {
- self.report_overflow_error_cycle(cycle);
+ self.report_overflow_obligation_cycle(cycle);
}
}
}
@@ -1585,29 +1614,26 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
self.probe(|_| {
- let mut err = error.err;
- let mut values = None;
+ let ocx = ObligationCtxt::new_in_snapshot(self);
// try to find the mismatched types to report the error with.
//
// this can fail if the problem was higher-ranked, in which
// cause I have no idea for a good error message.
let bound_predicate = predicate.kind();
- if let ty::PredicateKind::Projection(data) = bound_predicate.skip_binder() {
- let mut selcx = SelectionContext::new(self);
+ let (values, err) = if let ty::PredicateKind::Clause(ty::Clause::Projection(data)) =
+ bound_predicate.skip_binder()
+ {
let data = self.replace_bound_vars_with_fresh_vars(
obligation.cause.span,
infer::LateBoundRegionConversionTime::HigherRankedType,
bound_predicate.rebind(data),
);
- let mut obligations = vec![];
- let normalized_ty = super::normalize_projection_type(
- &mut selcx,
+ let normalized_ty = ocx.normalize(
+ &obligation.cause,
obligation.param_env,
- data.projection_ty,
- obligation.cause.clone(),
- 0,
- &mut obligations,
+ self.tcx
+ .mk_projection(data.projection_ty.item_def_id, data.projection_ty.substs),
);
debug!(?obligation.cause, ?obligation.param_env);
@@ -1623,25 +1649,40 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
| ObligationCauseCode::ObjectCastObligation(..)
| ObligationCauseCode::OpaqueType
);
- if let Err(new_err) = self.at(&obligation.cause, obligation.param_env).eq_exp(
+ let expected_ty = data.term.ty().unwrap_or_else(|| self.tcx.ty_error());
+
+ // constrain inference variables a bit more to nested obligations from normalize so
+ // we can have more helpful errors.
+ ocx.select_where_possible();
+
+ if let Err(new_err) = ocx.eq_exp(
+ &obligation.cause,
+ obligation.param_env,
is_normalized_ty_expected,
normalized_ty,
- data.term,
+ expected_ty,
) {
- values = Some((data, is_normalized_ty_expected, normalized_ty, data.term));
- err = new_err;
+ (Some((data, is_normalized_ty_expected, normalized_ty, expected_ty)), new_err)
+ } else {
+ (None, error.err)
}
- }
+ } else {
+ (None, error.err)
+ };
let msg = values
.and_then(|(predicate, _, normalized_ty, expected_ty)| {
- self.maybe_detailed_projection_msg(predicate, normalized_ty, expected_ty)
+ self.maybe_detailed_projection_msg(
+ predicate,
+ normalized_ty.into(),
+ expected_ty.into(),
+ )
})
.unwrap_or_else(|| format!("type mismatch resolving `{}`", predicate));
let mut diag = struct_span_err!(self.tcx.sess, obligation.cause.span, E0271, "{msg}");
let secondary_span = match predicate.kind().skip_binder() {
- ty::PredicateKind::Projection(proj) => self
+ ty::PredicateKind::Clause(ty::Clause::Projection(proj)) => self
.tcx
.opt_associated_item(proj.projection_ty.item_def_id)
.and_then(|trait_assoc_item| {
@@ -1677,11 +1718,11 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
&mut diag,
&obligation.cause,
secondary_span,
- values.map(|(_, is_normalized_ty_expected, normalized_ty, term)| {
+ values.map(|(_, is_normalized_ty_expected, normalized_ty, expected_ty)| {
infer::ValuePairs::Terms(ExpectedFound::new(
is_normalized_ty_expected,
- normalized_ty,
- term,
+ normalized_ty.into(),
+ expected_ty.into(),
))
}),
err,
@@ -1733,7 +1774,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
ty::Bool => Some(0),
ty::Char => Some(1),
ty::Str => Some(2),
- ty::Adt(def, _) if tcx.is_diagnostic_item(sym::String, def.did()) => Some(2),
+ ty::Adt(def, _) if Some(def.did()) == tcx.lang_items().string() => Some(2),
ty::Int(..)
| ty::Uint(..)
| ty::Float(..)
@@ -1811,7 +1852,8 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
&self,
trait_pred: ty::PolyTraitPredicate<'tcx>,
) -> Vec<ImplCandidate<'tcx>> {
- self.tcx
+ let mut candidates: Vec<_> = self
+ .tcx
.all_impls(trait_pred.def_id())
.filter_map(|def_id| {
if self.tcx.impl_polarity(def_id) == ty::ImplPolarity::Negative
@@ -1827,7 +1869,14 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
self.fuzzy_match_tys(trait_pred.skip_binder().self_ty(), imp.self_ty(), false)
.map(|similarity| ImplCandidate { trait_ref: imp, similarity })
})
- .collect()
+ .collect();
+ if candidates.iter().any(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })) {
+ // If any of the candidates is a perfect match, we don't want to show all of them.
+ // This is particularly relevant for the case of numeric types (as they all have the
+ // same cathegory).
+ candidates.retain(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. }));
+ }
+ candidates
}
fn report_similar_impl_candidates(
@@ -1898,7 +1947,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let def_id = trait_ref.def_id();
if impl_candidates.is_empty() {
if self.tcx.trait_is_auto(def_id)
- || self.tcx.lang_items().items().contains(&Some(def_id))
+ || self.tcx.lang_items().iter().any(|(_, id)| id == def_id)
|| self.tcx.get_diagnostic_name(def_id).is_some()
{
// Mentioning implementers of `Copy`, `Debug` and friends is not useful.
@@ -1935,14 +1984,6 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
return report(normalized_impl_candidates, err);
}
- let normalize = |candidate| {
- let infcx = self.tcx.infer_ctxt().build();
- infcx
- .at(&ObligationCause::dummy(), ty::ParamEnv::empty())
- .normalize(candidate)
- .map_or(candidate, |normalized| normalized.value)
- };
-
// Sort impl candidates so that ordering is consistent for UI tests.
// because the ordering of `impl_candidates` may not be deterministic:
// https://github.com/rust-lang/rust/pull/57475#issuecomment-455519507
@@ -1952,7 +1993,11 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let mut normalized_impl_candidates_and_similarities = impl_candidates
.into_iter()
.map(|ImplCandidate { trait_ref, similarity }| {
- let normalized = normalize(trait_ref);
+ // FIXME(compiler-errors): This should be using `NormalizeExt::normalize`
+ let normalized = self
+ .at(&ObligationCause::dummy(), ty::ParamEnv::empty())
+ .query_normalize(trait_ref)
+ .map_or(trait_ref, |normalized| normalized.value);
(similarity, normalized)
})
.collect::<Vec<_>>();
@@ -2032,15 +2077,10 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
param_env: ty::ParamEnv<'tcx>,
trait_ref_and_ty: ty::Binder<'tcx, (ty::TraitPredicate<'tcx>, Ty<'tcx>)>,
) -> PredicateObligation<'tcx> {
- let trait_pred = trait_ref_and_ty.map_bound_ref(|(tr, new_self_ty)| ty::TraitPredicate {
- trait_ref: ty::TraitRef {
- substs: self.tcx.mk_substs_trait(*new_self_ty, &tr.trait_ref.substs[1..]),
- ..tr.trait_ref
- },
- ..*tr
- });
+ let trait_pred = trait_ref_and_ty
+ .map_bound(|(tr, new_self_ty)| tr.with_self_type(self.tcx, new_self_ty));
- Obligation::new(ObligationCause::dummy(), param_env, trait_pred.to_predicate(self.tcx))
+ Obligation::new(self.tcx, ObligationCause::dummy(), param_env, trait_pred)
}
#[instrument(skip(self), level = "debug")]
@@ -2064,7 +2104,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let bound_predicate = predicate.kind();
let mut err = match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(data) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => {
let trait_ref = bound_predicate.rebind(data.trait_ref);
debug!(?trait_ref);
@@ -2088,7 +2128,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// check upstream for type errors and don't add the obligations to
// begin with in those cases.
if self.tcx.lang_items().sized_trait() == Some(trait_ref.def_id()) {
- if !self.is_tainted_by_errors() {
+ if let None = self.tainted_by_errors() {
self.emit_inference_failure_err(
body_id,
span,
@@ -2128,21 +2168,27 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
)
};
- let obligation = Obligation::new(
- obligation.cause.clone(),
- obligation.param_env,
- trait_ref.to_poly_trait_predicate(),
- );
- let mut selcx = SelectionContext::with_query_mode(
- &self,
- crate::traits::TraitQueryMode::Standard,
- );
+ let obligation = obligation.with(self.tcx, trait_ref);
+ let mut selcx = SelectionContext::new(&self);
match selcx.select_from_obligation(&obligation) {
- Err(SelectionError::Ambiguous(impls)) if impls.len() > 1 => {
- self.annotate_source_of_ambiguity(&mut err, &impls, predicate);
+ Ok(None) => {
+ let impls = ambiguity::recompute_applicable_impls(self.infcx, &obligation);
+ let has_non_region_infer =
+ trait_ref.skip_binder().substs.types().any(|t| !t.is_ty_infer());
+ // It doesn't make sense to talk about applicable impls if there are more
+ // than a handful of them.
+ if impls.len() > 1 && impls.len() < 5 && has_non_region_infer {
+ self.annotate_source_of_ambiguity(&mut err, &impls, predicate);
+ } else {
+ if self.tainted_by_errors().is_some() {
+ err.cancel();
+ return;
+ }
+ err.note(&format!("cannot satisfy `{}`", predicate));
+ }
}
_ => {
- if self.is_tainted_by_errors() {
+ if self.tainted_by_errors().is_some() {
err.cancel();
return;
}
@@ -2160,7 +2206,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
if generics.params.iter().any(|p| p.name != kw::SelfUpper)
&& !snippet.ends_with('>')
&& !generics.has_impl_trait()
- && !self.tcx.fn_trait_kind_from_lang_item(def_id).is_some()
+ && !self.tcx.is_fn_trait(def_id)
{
// FIXME: To avoid spurious suggestions in functions where type arguments
// where already supplied, we check the snippet to make sure it doesn't
@@ -2245,7 +2291,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
] = path.segments
&& data.trait_ref.def_id == *trait_id
&& self.tcx.trait_of_item(*item_id) == Some(*trait_id)
- && !self.is_tainted_by_errors()
+ && let None = self.tainted_by_errors()
{
let (verb, noun) = match self.tcx.associated_item(item_id).kind {
ty::AssocKind::Const => ("refer to the", "constant"),
@@ -2314,7 +2360,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// with error messages.
if arg.references_error()
|| self.tcx.sess.has_errors().is_some()
- || self.is_tainted_by_errors()
+ || self.tainted_by_errors().is_some()
{
return;
}
@@ -2325,7 +2371,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
ty::PredicateKind::Subtype(data) => {
if data.references_error()
|| self.tcx.sess.has_errors().is_some()
- || self.is_tainted_by_errors()
+ || self.tainted_by_errors().is_some()
{
// no need to overload user in such cases
return;
@@ -2335,8 +2381,8 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
assert!(a.is_ty_var() && b.is_ty_var());
self.emit_inference_failure_err(body_id, span, a.into(), ErrorCode::E0282, true)
}
- ty::PredicateKind::Projection(data) => {
- if predicate.references_error() || self.is_tainted_by_errors() {
+ ty::PredicateKind::Clause(ty::Clause::Projection(data)) => {
+ if predicate.references_error() || self.tainted_by_errors().is_some() {
return;
}
let subst = data
@@ -2370,7 +2416,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
ty::PredicateKind::ConstEvaluatable(data) => {
- if predicate.references_error() || self.is_tainted_by_errors() {
+ if predicate.references_error() || self.tainted_by_errors().is_some() {
return;
}
let subst = data.walk().find(|g| g.is_non_region_infer());
@@ -2397,7 +2443,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
}
_ => {
- if self.tcx.sess.has_errors().is_some() || self.is_tainted_by_errors() {
+ if self.tcx.sess.has_errors().is_some() || self.tainted_by_errors().is_some() {
return;
}
let mut err = struct_span_err!(
@@ -2435,14 +2481,13 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
}
}
- let msg = format!("multiple `impl`s satisfying `{}` found", predicate);
let mut crate_names: Vec<_> = crates.iter().map(|n| format!("`{}`", n)).collect();
crate_names.sort();
crate_names.dedup();
post.sort();
post.dedup();
- if self.is_tainted_by_errors()
+ if self.tainted_by_errors().is_some()
&& (crate_names.len() == 1
&& spans.len() == 0
&& ["`core`", "`alloc`", "`std`"].contains(&crate_names[0].as_str())
@@ -2456,13 +2501,9 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
err.downgrade_to_delayed_bug();
return;
}
- let post = if post.len() > 4 {
- format!(
- ":\n{}\nand {} more",
- post.iter().map(|p| format!("- {}", p)).take(4).collect::<Vec<_>>().join("\n"),
- post.len() - 4,
- )
- } else if post.len() > 1 || (post.len() == 1 && post[0].contains('\n')) {
+
+ let msg = format!("multiple `impl`s satisfying `{}` found", predicate);
+ let post = if post.len() > 1 || (post.len() == 1 && post[0].contains('\n')) {
format!(":\n{}", post.iter().map(|p| format!("- {}", p)).collect::<Vec<_>>().join("\n"),)
} else if post.len() == 1 {
format!(": `{}`", post[0])
@@ -2541,24 +2582,14 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
self.probe(|_| {
- let mut selcx = SelectionContext::new(self);
-
let cleaned_pred =
pred.fold_with(&mut ParamToVarFolder { infcx: self, var_map: Default::default() });
- let cleaned_pred = super::project::normalize(
- &mut selcx,
- param_env,
- ObligationCause::dummy(),
- cleaned_pred,
- )
- .value;
-
- let obligation = Obligation::new(
- ObligationCause::dummy(),
- param_env,
- cleaned_pred.to_predicate(selcx.tcx()),
- );
+ let InferOk { value: cleaned_pred, .. } =
+ self.infcx.at(&ObligationCause::dummy(), param_env).normalize(cleaned_pred);
+
+ let obligation =
+ Obligation::new(self.tcx, ObligationCause::dummy(), param_env, cleaned_pred);
self.predicate_may_hold(&obligation)
})
@@ -2586,7 +2617,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
err: &mut Diagnostic,
obligation: &PredicateObligation<'tcx>,
) {
- let ty::PredicateKind::Trait(pred) = obligation.predicate.kind().skip_binder() else { return; };
+ let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) = obligation.predicate.kind().skip_binder() else { return; };
let (ObligationCauseCode::BindingObligation(item_def_id, span)
| ObligationCauseCode::ExprBindingObligation(item_def_id, span, ..))
= *obligation.cause.code().peel_derives() else { return; };
@@ -2612,11 +2643,10 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let Some(param) = generics.params.iter().find(|param| param.span == span) else {
return;
};
- let param_def_id = self.tcx.hir().local_def_id(param.hir_id);
// Check that none of the explicit trait bounds is `Sized`. Assume that an explicit
// `Sized` bound is there intentionally and we don't need to suggest relaxing it.
let explicitly_sized = generics
- .bounds_for_param(param_def_id)
+ .bounds_for_param(param.def_id)
.flat_map(|bp| bp.bounds)
.any(|bound| bound.trait_ref().and_then(|tr| tr.trait_def_id()) == sized_trait);
if explicitly_sized {
@@ -2639,7 +2669,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
_ => {}
};
// Didn't add an indirection suggestion, so add a general suggestion to relax `Sized`.
- let (span, separator) = if let Some(s) = generics.bounds_span_for_suggestions(param_def_id)
+ let (span, separator) = if let Some(s) = generics.bounds_span_for_suggestions(param.def_id)
{
(s, " +")
} else {
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs
index 5eef54c63..9bfe52764 100644
--- a/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/on_unimplemented.rs
@@ -1,14 +1,22 @@
-use super::{
- ObligationCauseCode, OnUnimplementedDirective, OnUnimplementedNote, PredicateObligation,
-};
+use super::{ObligationCauseCode, PredicateObligation};
use crate::infer::error_reporting::TypeErrCtxt;
+use rustc_ast::{MetaItem, NestedMetaItem};
+use rustc_attr as attr;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_errors::{struct_span_err, ErrorGuaranteed};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::SubstsRef;
-use rustc_middle::ty::{self, GenericParamDefKind};
-use rustc_span::symbol::sym;
+use rustc_middle::ty::{self, GenericParamDefKind, TyCtxt};
+use rustc_parse_format::{ParseMode, Parser, Piece, Position};
+use rustc_span::symbol::{kw, sym, Symbol};
+use rustc_span::{Span, DUMMY_SP};
use std::iter;
+use crate::errors::{
+ EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
+};
+
use super::InferCtxtPrivExt;
pub trait TypeErrCtxtExt<'tcx> {
@@ -276,3 +284,383 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
}
}
+
+#[derive(Clone, Debug)]
+pub struct OnUnimplementedFormatString(Symbol);
+
+#[derive(Debug)]
+pub struct OnUnimplementedDirective {
+ pub condition: Option<MetaItem>,
+ pub subcommands: Vec<OnUnimplementedDirective>,
+ pub message: Option<OnUnimplementedFormatString>,
+ pub label: Option<OnUnimplementedFormatString>,
+ pub note: Option<OnUnimplementedFormatString>,
+ pub parent_label: Option<OnUnimplementedFormatString>,
+ pub append_const_msg: Option<Option<Symbol>>,
+}
+
+/// For the `#[rustc_on_unimplemented]` attribute
+#[derive(Default)]
+pub struct OnUnimplementedNote {
+ pub message: Option<String>,
+ pub label: Option<String>,
+ pub note: Option<String>,
+ pub parent_label: Option<String>,
+ /// Append a message for `~const Trait` errors. `None` means not requested and
+ /// should fallback to a generic message, `Some(None)` suggests using the default
+ /// appended message, `Some(Some(s))` suggests use the `s` message instead of the
+ /// default one..
+ pub append_const_msg: Option<Option<Symbol>>,
+}
+
+impl<'tcx> OnUnimplementedDirective {
+ fn parse(
+ tcx: TyCtxt<'tcx>,
+ item_def_id: DefId,
+ items: &[NestedMetaItem],
+ span: Span,
+ is_root: bool,
+ ) -> Result<Self, ErrorGuaranteed> {
+ let mut errored = None;
+ let mut item_iter = items.iter();
+
+ let parse_value = |value_str| {
+ OnUnimplementedFormatString::try_parse(tcx, item_def_id, value_str, span).map(Some)
+ };
+
+ let condition = if is_root {
+ None
+ } else {
+ let cond = item_iter
+ .next()
+ .ok_or_else(|| tcx.sess.emit_err(EmptyOnClauseInOnUnimplemented { span }))?
+ .meta_item()
+ .ok_or_else(|| tcx.sess.emit_err(InvalidOnClauseInOnUnimplemented { span }))?;
+ attr::eval_condition(cond, &tcx.sess.parse_sess, Some(tcx.features()), &mut |cfg| {
+ if let Some(value) = cfg.value && let Err(guar) = parse_value(value) {
+ errored = Some(guar);
+ }
+ true
+ });
+ Some(cond.clone())
+ };
+
+ let mut message = None;
+ let mut label = None;
+ let mut note = None;
+ let mut parent_label = None;
+ let mut subcommands = vec![];
+ let mut append_const_msg = None;
+
+ for item in item_iter {
+ if item.has_name(sym::message) && message.is_none() {
+ if let Some(message_) = item.value_str() {
+ message = parse_value(message_)?;
+ continue;
+ }
+ } else if item.has_name(sym::label) && label.is_none() {
+ if let Some(label_) = item.value_str() {
+ label = parse_value(label_)?;
+ continue;
+ }
+ } else if item.has_name(sym::note) && note.is_none() {
+ if let Some(note_) = item.value_str() {
+ note = parse_value(note_)?;
+ continue;
+ }
+ } else if item.has_name(sym::parent_label) && parent_label.is_none() {
+ if let Some(parent_label_) = item.value_str() {
+ parent_label = parse_value(parent_label_)?;
+ continue;
+ }
+ } else if item.has_name(sym::on)
+ && is_root
+ && message.is_none()
+ && label.is_none()
+ && note.is_none()
+ {
+ if let Some(items) = item.meta_item_list() {
+ match Self::parse(tcx, item_def_id, &items, item.span(), false) {
+ Ok(subcommand) => subcommands.push(subcommand),
+ Err(reported) => errored = Some(reported),
+ };
+ continue;
+ }
+ } else if item.has_name(sym::append_const_msg) && append_const_msg.is_none() {
+ if let Some(msg) = item.value_str() {
+ append_const_msg = Some(Some(msg));
+ continue;
+ } else if item.is_word() {
+ append_const_msg = Some(None);
+ continue;
+ }
+ }
+
+ // nothing found
+ tcx.sess.emit_err(NoValueInOnUnimplemented { span: item.span() });
+ }
+
+ if let Some(reported) = errored {
+ Err(reported)
+ } else {
+ Ok(OnUnimplementedDirective {
+ condition,
+ subcommands,
+ message,
+ label,
+ note,
+ parent_label,
+ append_const_msg,
+ })
+ }
+ }
+
+ pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<Option<Self>, ErrorGuaranteed> {
+ let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) else {
+ return Ok(None);
+ };
+
+ let result = if let Some(items) = attr.meta_item_list() {
+ Self::parse(tcx, item_def_id, &items, attr.span, true).map(Some)
+ } else if let Some(value) = attr.value_str() {
+ Ok(Some(OnUnimplementedDirective {
+ condition: None,
+ message: None,
+ subcommands: vec![],
+ label: Some(OnUnimplementedFormatString::try_parse(
+ tcx,
+ item_def_id,
+ value,
+ attr.span,
+ )?),
+ note: None,
+ parent_label: None,
+ append_const_msg: None,
+ }))
+ } else {
+ let reported =
+ tcx.sess.delay_span_bug(DUMMY_SP, "of_item: neither meta_item_list nor value_str");
+ return Err(reported);
+ };
+ debug!("of_item({:?}) = {:?}", item_def_id, result);
+ result
+ }
+
+ pub fn evaluate(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+ options: &[(Symbol, Option<String>)],
+ ) -> OnUnimplementedNote {
+ let mut message = None;
+ let mut label = None;
+ let mut note = None;
+ let mut parent_label = None;
+ let mut append_const_msg = None;
+ info!("evaluate({:?}, trait_ref={:?}, options={:?})", self, trait_ref, options);
+
+ let options_map: FxHashMap<Symbol, String> =
+ options.iter().filter_map(|(k, v)| v.clone().map(|v| (*k, v))).collect();
+
+ for command in self.subcommands.iter().chain(Some(self)).rev() {
+ if let Some(ref condition) = command.condition && !attr::eval_condition(
+ condition,
+ &tcx.sess.parse_sess,
+ Some(tcx.features()),
+ &mut |cfg| {
+ let value = cfg.value.map(|v| {
+ OnUnimplementedFormatString(v).format(tcx, trait_ref, &options_map)
+ });
+
+ options.contains(&(cfg.name, value))
+ },
+ ) {
+ debug!("evaluate: skipping {:?} due to condition", command);
+ continue;
+ }
+ debug!("evaluate: {:?} succeeded", command);
+ if let Some(ref message_) = command.message {
+ message = Some(message_.clone());
+ }
+
+ if let Some(ref label_) = command.label {
+ label = Some(label_.clone());
+ }
+
+ if let Some(ref note_) = command.note {
+ note = Some(note_.clone());
+ }
+
+ if let Some(ref parent_label_) = command.parent_label {
+ parent_label = Some(parent_label_.clone());
+ }
+
+ append_const_msg = command.append_const_msg;
+ }
+
+ OnUnimplementedNote {
+ label: label.map(|l| l.format(tcx, trait_ref, &options_map)),
+ message: message.map(|m| m.format(tcx, trait_ref, &options_map)),
+ note: note.map(|n| n.format(tcx, trait_ref, &options_map)),
+ parent_label: parent_label.map(|e_s| e_s.format(tcx, trait_ref, &options_map)),
+ append_const_msg,
+ }
+ }
+}
+
+impl<'tcx> OnUnimplementedFormatString {
+ fn try_parse(
+ tcx: TyCtxt<'tcx>,
+ item_def_id: DefId,
+ from: Symbol,
+ err_sp: Span,
+ ) -> Result<Self, ErrorGuaranteed> {
+ let result = OnUnimplementedFormatString(from);
+ result.verify(tcx, item_def_id, err_sp)?;
+ Ok(result)
+ }
+
+ fn verify(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ item_def_id: DefId,
+ span: Span,
+ ) -> Result<(), ErrorGuaranteed> {
+ let trait_def_id = if tcx.is_trait(item_def_id) {
+ item_def_id
+ } else {
+ tcx.trait_id_of_impl(item_def_id)
+ .expect("expected `on_unimplemented` to correspond to a trait")
+ };
+ let trait_name = tcx.item_name(trait_def_id);
+ let generics = tcx.generics_of(item_def_id);
+ let s = self.0.as_str();
+ let parser = Parser::new(s, None, None, false, ParseMode::Format);
+ let mut result = Ok(());
+ for token in parser {
+ match token {
+ Piece::String(_) => (), // Normal string, no need to check it
+ Piece::NextArgument(a) => match a.position {
+ Position::ArgumentNamed(s) => {
+ match Symbol::intern(s) {
+ // `{Self}` is allowed
+ kw::SelfUpper => (),
+ // `{ThisTraitsName}` is allowed
+ s if s == trait_name => (),
+ // `{from_method}` is allowed
+ sym::from_method => (),
+ // `{from_desugaring}` is allowed
+ sym::from_desugaring => (),
+ // `{ItemContext}` is allowed
+ sym::ItemContext => (),
+ // `{integral}` and `{integer}` and `{float}` are allowed
+ sym::integral | sym::integer_ | sym::float => (),
+ // So is `{A}` if A is a type parameter
+ s => match generics.params.iter().find(|param| param.name == s) {
+ Some(_) => (),
+ None => {
+ let reported = struct_span_err!(
+ tcx.sess,
+ span,
+ E0230,
+ "there is no parameter `{}` on {}",
+ s,
+ if trait_def_id == item_def_id {
+ format!("trait `{}`", trait_name)
+ } else {
+ "impl".to_string()
+ }
+ )
+ .emit();
+ result = Err(reported);
+ }
+ },
+ }
+ }
+ // `{:1}` and `{}` are not to be used
+ Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => {
+ let reported = struct_span_err!(
+ tcx.sess,
+ span,
+ E0231,
+ "only named substitution parameters are allowed"
+ )
+ .emit();
+ result = Err(reported);
+ }
+ },
+ }
+ }
+
+ result
+ }
+
+ pub fn format(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::TraitRef<'tcx>,
+ options: &FxHashMap<Symbol, String>,
+ ) -> String {
+ let name = tcx.item_name(trait_ref.def_id);
+ let trait_str = tcx.def_path_str(trait_ref.def_id);
+ let generics = tcx.generics_of(trait_ref.def_id);
+ let generic_map = generics
+ .params
+ .iter()
+ .filter_map(|param| {
+ let value = match param.kind {
+ GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
+ trait_ref.substs[param.index as usize].to_string()
+ }
+ GenericParamDefKind::Lifetime => return None,
+ };
+ let name = param.name;
+ Some((name, value))
+ })
+ .collect::<FxHashMap<Symbol, String>>();
+ let empty_string = String::new();
+
+ let s = self.0.as_str();
+ let parser = Parser::new(s, None, None, false, ParseMode::Format);
+ let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
+ parser
+ .map(|p| match p {
+ Piece::String(s) => s,
+ Piece::NextArgument(a) => match a.position {
+ Position::ArgumentNamed(s) => {
+ let s = Symbol::intern(s);
+ match generic_map.get(&s) {
+ Some(val) => val,
+ None if s == name => &trait_str,
+ None => {
+ if let Some(val) = options.get(&s) {
+ val
+ } else if s == sym::from_desugaring || s == sym::from_method {
+ // don't break messages using these two arguments incorrectly
+ &empty_string
+ } else if s == sym::ItemContext {
+ &item_context
+ } else if s == sym::integral {
+ "{integral}"
+ } else if s == sym::integer_ {
+ "{integer}"
+ } else if s == sym::float {
+ "{float}"
+ } else {
+ bug!(
+ "broken on_unimplemented {:?} for {:?}: \
+ no argument matching {:?}",
+ self.0,
+ trait_ref,
+ s
+ )
+ }
+ }
+ }
+ }
+ _ => bug!("broken on_unimplemented {:?} - bad format arg", self.0),
+ },
+ })
+ .collect()
+ }
+}
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
index 8c41d9d24..6ea54b625 100644
--- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
@@ -1,11 +1,8 @@
-use super::{
- DefIdOrName, Obligation, ObligationCause, ObligationCauseCode, PredicateObligation,
- SelectionContext,
-};
+use super::{DefIdOrName, Obligation, ObligationCause, ObligationCauseCode, PredicateObligation};
use crate::autoderef::Autoderef;
use crate::infer::InferCtxt;
-use crate::traits::normalize_to;
+use crate::traits::NormalizeExt;
use hir::def::CtorOf;
use hir::HirId;
@@ -23,7 +20,7 @@ use rustc_hir::lang_items::LangItem;
use rustc_hir::{AsyncGeneratorKind, GeneratorKind, Node};
use rustc_infer::infer::error_reporting::TypeErrCtxt;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
-use rustc_infer::infer::LateBoundRegionConversionTime;
+use rustc_infer::infer::{InferOk, LateBoundRegionConversionTime};
use rustc_middle::hir::map;
use rustc_middle::ty::{
self, suggest_arbitrary_trait_bound, suggest_constraining_type_param, AdtKind, DefIdTree,
@@ -44,7 +41,7 @@ use rustc_middle::ty::print::with_no_trimmed_paths;
#[derive(Debug)]
pub enum GeneratorInteriorOrUpvar {
// span of interior type
- Interior(Span),
+ Interior(Span, Option<(Option<Span>, Span, Option<hir::HirId>, Option<Span>)>),
// span of upvar
Upvar(Span),
}
@@ -283,7 +280,6 @@ pub trait TypeErrCtxtExt<'tcx> {
&self,
err: &mut Diagnostic,
interior_or_upvar_span: GeneratorInteriorOrUpvar,
- interior_extra_info: Option<(Option<Span>, Span, Option<hir::HirId>, Option<Span>)>,
is_async: bool,
outer_generator: Option<DefId>,
trait_pred: ty::TraitPredicate<'tcx>,
@@ -302,7 +298,7 @@ pub trait TypeErrCtxtExt<'tcx> {
obligated_types: &mut Vec<Ty<'tcx>>,
seen_requirements: &mut FxHashSet<DefId>,
) where
- T: fmt::Display;
+ T: fmt::Display + ToPredicate<'tcx>;
/// Suggest to await before try: future? => future.await?
fn suggest_await_before_try(
@@ -335,7 +331,7 @@ pub trait TypeErrCtxtExt<'tcx> {
);
}
-fn predicate_constraint(generics: &hir::Generics<'_>, pred: String) -> (Span, String) {
+fn predicate_constraint(generics: &hir::Generics<'_>, pred: ty::Predicate<'_>) -> (Span, String) {
(
generics.tail_span_for_predicate_suggestion(),
format!("{} {}", generics.add_where_or_trailing_comma(), pred),
@@ -417,7 +413,7 @@ fn suggest_restriction<'tcx>(
},
// `fn foo(t: impl Trait)`
// ^ suggest `where <T as Trait>::A: Bound`
- predicate_constraint(hir_generics, trait_pred.to_predicate(tcx).to_string()),
+ predicate_constraint(hir_generics, trait_pred.to_predicate(tcx)),
];
sugg.extend(ty_spans.into_iter().map(|s| (s, type_param_name.to_string())));
@@ -441,9 +437,7 @@ fn suggest_restriction<'tcx>(
.find(|p| !matches!(p.kind, hir::GenericParamKind::Type { synthetic: true, .. })),
super_traits,
) {
- (_, None) => {
- predicate_constraint(hir_generics, trait_pred.to_predicate(tcx).to_string())
- }
+ (_, None) => predicate_constraint(hir_generics, trait_pred.to_predicate(tcx)),
(None, Some((ident, []))) => (
ident.span.shrink_to_hi(),
format!(": {}", trait_pred.print_modifiers_and_trait_path()),
@@ -714,7 +708,6 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
obligation.cause.body_id,
span,
base_ty,
- span,
);
if let Some(steps) = autoderef.find_map(|(ty, steps)| {
// Re-add the `&`
@@ -814,7 +807,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
err: &mut Diagnostic,
trait_pred: ty::PolyTraitPredicate<'tcx>,
) -> bool {
- if let ty::PredicateKind::Trait(trait_pred) = obligation.predicate.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) = obligation.predicate.kind().skip_binder()
&& Some(trait_pred.def_id()) == self.tcx.lang_items().sized_trait()
{
// Don't suggest calling to turn an unsized type into a sized type
@@ -843,7 +836,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
ty::Opaque(def_id, substs) => {
self.tcx.bound_item_bounds(def_id).subst(self.tcx, substs).iter().find_map(|pred| {
- if let ty::PredicateKind::Projection(proj) = pred.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
&& Some(proj.projection_ty.item_def_id) == self.tcx.lang_items().fn_once_output()
// args tuple will always be substs[1]
&& let ty::Tuple(args) = proj.projection_ty.substs.type_at(1).kind()
@@ -877,7 +870,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
ty::Param(_) => {
obligation.param_env.caller_bounds().iter().find_map(|pred| {
- if let ty::PredicateKind::Projection(proj) = pred.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(proj)) = pred.kind().skip_binder()
&& Some(proj.projection_ty.item_def_id) == self.tcx.lang_items().fn_once_output()
&& proj.projection_ty.self_ty() == found
// args tuple will always be substs[1]
@@ -1019,7 +1012,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let mut never_suggest_borrow: Vec<_> =
[LangItem::Copy, LangItem::Clone, LangItem::Unpin, LangItem::Sized]
.iter()
- .filter_map(|lang_item| self.tcx.lang_items().require(*lang_item).ok())
+ .filter_map(|lang_item| self.tcx.lang_items().get(*lang_item))
.collect();
if let Some(def_id) = self.tcx.get_diagnostic_item(sym::Send) {
@@ -1063,7 +1056,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
{
(
mk_result(old_pred.map_bound(|trait_pred| (trait_pred, *ty))),
- matches!(mutability, hir::Mutability::Mut),
+ mutability.is_mut(),
)
} else {
(false, false)
@@ -1117,7 +1110,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
err.span_suggestions(
span.shrink_to_lo(),
"consider borrowing here",
- ["&".to_string(), "&mut ".to_string()].into_iter(),
+ ["&".to_string(), "&mut ".to_string()],
Applicability::MaybeIncorrect,
);
} else {
@@ -1164,7 +1157,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
for predicate in predicates.iter() {
if !self.predicate_must_hold_modulo_regions(
- &obligation.with(predicate.with_self_ty(self.tcx, self_ref_ty)),
+ &obligation.with(self.tcx, predicate.with_self_ty(self.tcx, self_ref_ty)),
) {
return;
}
@@ -1260,7 +1253,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
);
// FIXME: account for associated `async fn`s.
if let hir::Expr { span, kind: hir::ExprKind::Call(base, _), .. } = expr {
- if let ty::PredicateKind::Trait(pred) =
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) =
obligation.predicate.kind().skip_binder()
{
err.span_label(
@@ -1353,7 +1346,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
.sess
.source_map()
.span_take_while(span, |c| c.is_whitespace() || *c == '&');
- if points_at_arg && mutability == hir::Mutability::Not && refs_number > 0 {
+ if points_at_arg && mutability.is_not() && refs_number > 0 {
err.span_suggestion_verbose(
sp,
"consider changing this borrow's mutability",
@@ -1525,7 +1518,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let self_ty_satisfies_dyn_predicates = |self_ty| {
predicates.iter().all(|predicate| {
let pred = predicate.with_self_ty(self.tcx, self_ty);
- let obl = Obligation::new(cause.clone(), param_env, pred);
+ let obl = Obligation::new(self.tcx, cause.clone(), param_env, pred);
self.predicate_may_hold(&obl)
})
};
@@ -1688,9 +1681,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
) -> Ty<'tcx> {
let inputs = trait_ref.skip_binder().substs.type_at(1);
let sig = match inputs.kind() {
- ty::Tuple(inputs)
- if infcx.tcx.fn_trait_kind_from_lang_item(trait_ref.def_id()).is_some() =>
- {
+ ty::Tuple(inputs) if infcx.tcx.is_fn_trait(trait_ref.def_id()) => {
infcx.tcx.mk_fn_sig(
inputs.iter(),
infcx.next_ty_var(TypeVariableOrigin {
@@ -1760,8 +1751,8 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
if let ObligationCauseCode::ExprBindingObligation(def_id, _, _, idx) = cause
&& let predicates = self.tcx.predicates_of(def_id).instantiate_identity(self.tcx)
&& let Some(pred) = predicates.predicates.get(*idx)
- && let ty::PredicateKind::Trait(trait_pred) = pred.kind().skip_binder()
- && ty::ClosureKind::from_def_id(self.tcx, trait_pred.def_id()).is_some()
+ && let ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred)) = pred.kind().skip_binder()
+ && self.tcx.is_fn_trait(trait_pred.def_id())
{
let expected_self =
self.tcx.anonymize_late_bound_regions(pred.kind().rebind(trait_pred.self_ty()));
@@ -1774,9 +1765,8 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let other_pred = std::iter::zip(&predicates.predicates, &predicates.spans)
.enumerate()
.find(|(other_idx, (pred, _))| match pred.kind().skip_binder() {
- ty::PredicateKind::Trait(trait_pred)
- if ty::ClosureKind::from_def_id(self.tcx, trait_pred.def_id())
- .is_some()
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_pred))
+ if self.tcx.is_fn_trait(trait_pred.def_id())
&& other_idx != idx
// Make sure that the self type matches
// (i.e. constraining this closure)
@@ -1890,13 +1880,9 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
//
// - `BuiltinDerivedObligation` with a generator witness (B)
// - `BuiltinDerivedObligation` with a generator (B)
- // - `BuiltinDerivedObligation` with `std::future::GenFuture` (B)
- // - `BuiltinDerivedObligation` with `impl std::future::Future` (B)
// - `BuiltinDerivedObligation` with `impl std::future::Future` (B)
// - `BuiltinDerivedObligation` with a generator witness (A)
// - `BuiltinDerivedObligation` with a generator (A)
- // - `BuiltinDerivedObligation` with `std::future::GenFuture` (A)
- // - `BuiltinDerivedObligation` with `impl std::future::Future` (A)
// - `BuiltinDerivedObligation` with `impl std::future::Future` (A)
// - `BindingObligation` with `impl_send (Send requirement)
//
@@ -1905,7 +1891,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
// bound was introduced. At least one generator should be present for this diagnostic to be
// modified.
let (mut trait_ref, mut target_ty) = match obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(p) => (Some(p), Some(p.self_ty())),
+ ty::PredicateKind::Clause(ty::Clause::Trait(p)) => (Some(p), Some(p.self_ty())),
_ => (None, None),
};
let mut generator = None;
@@ -2004,17 +1990,6 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
.as_local()
.and_then(|def_id| hir.maybe_body_owned_by(def_id))
.map(|body_id| hir.body(body_id));
- let is_async = match generator_did.as_local() {
- Some(_) => generator_body
- .and_then(|body| body.generator_kind())
- .map(|generator_kind| matches!(generator_kind, hir::GeneratorKind::Async(..)))
- .unwrap_or(false),
- None => self
- .tcx
- .generator_kind(generator_did)
- .map(|generator_kind| matches!(generator_kind, hir::GeneratorKind::Async(..)))
- .unwrap_or(false),
- };
let mut visitor = AwaitsVisitor::default();
if let Some(body) = generator_body {
visitor.visit_body(body);
@@ -2044,61 +2019,61 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
eq
};
- let mut interior_or_upvar_span = None;
- let mut interior_extra_info = None;
-
// Get the typeck results from the infcx if the generator is the function we are currently
// type-checking; otherwise, get them by performing a query. This is needed to avoid
// cycles. If we can't use resolved types because the generator comes from another crate,
// we still provide a targeted error but without all the relevant spans.
- let generator_data: Option<GeneratorData<'tcx, '_>> = match &self.typeck_results {
- Some(t) if t.hir_owner.to_def_id() == generator_did_root => {
- Some(GeneratorData::Local(&t))
- }
+ let generator_data = match &self.typeck_results {
+ Some(t) if t.hir_owner.to_def_id() == generator_did_root => GeneratorData::Local(&t),
_ if generator_did.is_local() => {
- Some(GeneratorData::Local(self.tcx.typeck(generator_did.expect_local())))
+ GeneratorData::Local(self.tcx.typeck(generator_did.expect_local()))
}
- _ => self
- .tcx
- .generator_diagnostic_data(generator_did)
- .as_ref()
- .map(|generator_diag_data| GeneratorData::Foreign(generator_diag_data)),
+ _ if let Some(generator_diag_data) = self.tcx.generator_diagnostic_data(generator_did) => {
+ GeneratorData::Foreign(generator_diag_data)
+ }
+ _ => return false,
};
- if let Some(generator_data) = generator_data.as_ref() {
- interior_or_upvar_span =
- generator_data.try_get_upvar_span(&self, generator_did, ty_matches);
+ let mut interior_or_upvar_span = None;
- // The generator interior types share the same binders
- if let Some(cause) =
- generator_data.get_generator_interior_types().skip_binder().iter().find(
- |ty::GeneratorInteriorTypeCause { ty, .. }| {
- ty_matches(generator_data.get_generator_interior_types().rebind(*ty))
- },
- )
- {
- let from_awaited_ty = generator_data.get_from_await_ty(visitor, hir, ty_matches);
- let ty::GeneratorInteriorTypeCause { span, scope_span, yield_span, expr, .. } =
- cause;
+ let from_awaited_ty = generator_data.get_from_await_ty(visitor, hir, ty_matches);
+ debug!(?from_awaited_ty);
- interior_or_upvar_span = Some(GeneratorInteriorOrUpvar::Interior(*span));
- interior_extra_info = Some((*scope_span, *yield_span, *expr, from_awaited_ty));
- }
+ // The generator interior types share the same binders
+ if let Some(cause) =
+ generator_data.get_generator_interior_types().skip_binder().iter().find(
+ |ty::GeneratorInteriorTypeCause { ty, .. }| {
+ ty_matches(generator_data.get_generator_interior_types().rebind(*ty))
+ },
+ )
+ {
+ let ty::GeneratorInteriorTypeCause { span, scope_span, yield_span, expr, .. } = cause;
- if interior_or_upvar_span.is_none() && generator_data.is_foreign() {
- interior_or_upvar_span = Some(GeneratorInteriorOrUpvar::Interior(span));
- }
+ interior_or_upvar_span = Some(GeneratorInteriorOrUpvar::Interior(
+ *span,
+ Some((*scope_span, *yield_span, *expr, from_awaited_ty)),
+ ));
}
+ if interior_or_upvar_span.is_none() {
+ interior_or_upvar_span =
+ generator_data.try_get_upvar_span(&self, generator_did, ty_matches);
+ }
+
+ if interior_or_upvar_span.is_none() && generator_data.is_foreign() {
+ interior_or_upvar_span = Some(GeneratorInteriorOrUpvar::Interior(span, None));
+ }
+
+ debug!(?interior_or_upvar_span);
if let Some(interior_or_upvar_span) = interior_or_upvar_span {
- let typeck_results = generator_data.and_then(|generator_data| match generator_data {
+ let is_async = self.tcx.generator_is_async(generator_did);
+ let typeck_results = match generator_data {
GeneratorData::Local(typeck_results) => Some(typeck_results),
GeneratorData::Foreign(_) => None,
- });
+ };
self.note_obligation_cause_for_async_await(
err,
interior_or_upvar_span,
- interior_extra_info,
is_async,
outer_generator,
trait_ref,
@@ -2120,7 +2095,6 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
&self,
err: &mut Diagnostic,
interior_or_upvar_span: GeneratorInteriorOrUpvar,
- interior_extra_info: Option<(Option<Span>, Span, Option<hir::HirId>, Option<Span>)>,
is_async: bool,
outer_generator: Option<DefId>,
trait_pred: ty::TraitPredicate<'tcx>,
@@ -2242,7 +2216,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
};
match interior_or_upvar_span {
- GeneratorInteriorOrUpvar::Interior(interior_span) => {
+ GeneratorInteriorOrUpvar::Interior(interior_span, interior_extra_info) => {
if let Some((scope_span, yield_span, expr, from_awaited_ty)) = interior_extra_info {
if let Some(await_span) = from_awaited_ty {
// The type causing this obligation is one being awaited at await_span.
@@ -2379,7 +2353,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
obligated_types: &mut Vec<Ty<'tcx>>,
seen_requirements: &mut FxHashSet<DefId>,
) where
- T: fmt::Display,
+ T: fmt::Display + ToPredicate<'tcx>,
{
let tcx = self.tcx;
match *cause_code {
@@ -2407,7 +2381,8 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
| ObligationCauseCode::CheckAssociatedTypeBounds { .. }
| ObligationCauseCode::LetElse
| ObligationCauseCode::BinOp { .. }
- | ObligationCauseCode::AscribeUserTypeProvePredicate(..) => {}
+ | ObligationCauseCode::AscribeUserTypeProvePredicate(..)
+ | ObligationCauseCode::RustCall => {}
ObligationCauseCode::SliceOrArrayElem => {
err.note("slice and array elements must have `Sized` type");
}
@@ -2445,12 +2420,12 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
(Ok(l), Ok(r)) => l.line == r.line,
_ => true,
};
- if !ident.span.overlaps(span) && !same_line {
+ if !ident.span.is_dummy() && !ident.span.overlaps(span) && !same_line {
multispan.push_span_label(ident.span, "required by a bound in this");
}
}
let descr = format!("required by a bound in `{}`", item_name);
- if span != DUMMY_SP {
+ if !span.is_dummy() {
let msg = format!("required by this bound in `{}`", item_name);
multispan.push_span_label(span, msg);
err.span_note(multispan, &descr);
@@ -2636,30 +2611,24 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
};
- let from_generator = tcx.lang_items().from_generator_fn().unwrap();
+ let identity_future = tcx.require_lang_item(LangItem::IdentityFuture, None);
// Don't print the tuple of capture types
'print: {
if !is_upvar_tys_infer_tuple {
let msg = format!("required because it appears within the type `{}`", ty);
match ty.kind() {
- ty::Adt(def, _) => {
- // `gen_future` is used in all async functions; it doesn't add any additional info.
- if self.tcx.is_diagnostic_item(sym::gen_future, def.did()) {
- break 'print;
- }
- match self.tcx.opt_item_ident(def.did()) {
- Some(ident) => err.span_note(ident.span, &msg),
- None => err.note(&msg),
- }
- }
+ ty::Adt(def, _) => match self.tcx.opt_item_ident(def.did()) {
+ Some(ident) => err.span_note(ident.span, &msg),
+ None => err.note(&msg),
+ },
ty::Opaque(def_id, _) => {
- // Avoid printing the future from `core::future::from_generator`, it's not helpful
- if tcx.parent(*def_id) == from_generator {
+ // Avoid printing the future from `core::future::identity_future`, it's not helpful
+ if tcx.parent(*def_id) == identity_future {
break 'print;
}
- // If the previous type is `from_generator`, this is the future generated by the body of an async function.
+ // If the previous type is `identity_future`, this is the future generated by the body of an async function.
// Avoid printing it twice (it was already printed in the `ty::Generator` arm below).
let is_future = tcx.ty_is_opaque_future(ty);
debug!(
@@ -2669,8 +2638,8 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
);
if is_future
&& obligated_types.last().map_or(false, |ty| match ty.kind() {
- ty::Opaque(last_def_id, _) => {
- tcx.parent(*last_def_id) == from_generator
+ ty::Generator(last_def_id, ..) => {
+ tcx.generator_is_async(*last_def_id)
}
_ => false,
})
@@ -2696,7 +2665,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let sp = self.tcx.def_span(def_id);
// Special-case this to say "async block" instead of `[static generator]`.
- let kind = tcx.generator_kind(def_id).unwrap();
+ let kind = tcx.generator_kind(def_id).unwrap().descr();
err.span_note(
sp,
&format!("required because it's used within this {}", kind),
@@ -2713,7 +2682,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
obligated_types.push(ty);
- let parent_predicate = parent_trait_ref.to_predicate(tcx);
+ let parent_predicate = parent_trait_ref;
if !self.is_recursive_obligation(obligated_types, &data.parent_code) {
// #74711: avoid a stack overflow
ensure_sufficient_stack(|| {
@@ -2744,9 +2713,10 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
self.resolve_vars_if_possible(data.derived.parent_trait_pred);
parent_trait_pred.remap_constness_diag(param_env);
let parent_def_id = parent_trait_pred.def_id();
+ let (self_ty, file) =
+ self.tcx.short_ty_string(parent_trait_pred.skip_binder().self_ty());
let msg = format!(
- "required for `{}` to implement `{}`",
- parent_trait_pred.skip_binder().self_ty(),
+ "required for `{self_ty}` to implement `{}`",
parent_trait_pred.print_modifiers_and_trait_path()
);
let mut is_auto_trait = false;
@@ -2775,7 +2745,13 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
_ => err.note(&msg),
};
- let mut parent_predicate = parent_trait_pred.to_predicate(tcx);
+ if let Some(file) = file {
+ err.note(&format!(
+ "the full type name has been written to '{}'",
+ file.display(),
+ ));
+ }
+ let mut parent_predicate = parent_trait_pred;
let mut data = &data.derived;
let mut count = 0;
seen_requirements.insert(parent_def_id);
@@ -2815,11 +2791,18 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
count,
pluralize!(count)
));
+ let (self_ty, file) =
+ self.tcx.short_ty_string(parent_trait_pred.skip_binder().self_ty());
err.note(&format!(
- "required for `{}` to implement `{}`",
- parent_trait_pred.skip_binder().self_ty(),
+ "required for `{self_ty}` to implement `{}`",
parent_trait_pred.print_modifiers_and_trait_path()
));
+ if let Some(file) = file {
+ err.note(&format!(
+ "the full type name has been written to '{}'",
+ file.display(),
+ ));
+ }
}
// #74711: avoid a stack overflow
ensure_sufficient_stack(|| {
@@ -2835,7 +2818,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
ObligationCauseCode::DerivedObligation(ref data) => {
let parent_trait_ref = self.resolve_vars_if_possible(data.parent_trait_pred);
- let parent_predicate = parent_trait_ref.to_predicate(tcx);
+ let parent_predicate = parent_trait_ref;
// #74711: avoid a stack overflow
ensure_sufficient_stack(|| {
self.note_obligation_cause_code(
@@ -2968,8 +2951,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let self_ty = self.resolve_vars_if_possible(trait_pred.self_ty());
let impls_future = self.type_implements_trait(
future_trait,
- self.tcx.erase_late_bound_regions(self_ty),
- ty::List::empty(),
+ [self.tcx.erase_late_bound_regions(self_ty)],
obligation.param_env,
);
if !impls_future.must_apply_modulo_regions() {
@@ -2982,16 +2964,11 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
self.tcx.mk_projection(
item_def_id,
// Future::Output has no substs
- self.tcx.mk_substs_trait(trait_pred.self_ty(), &[]),
+ self.tcx.mk_substs_trait(trait_pred.self_ty(), []),
)
});
- let projection_ty = normalize_to(
- &mut SelectionContext::new(self),
- obligation.param_env,
- obligation.cause.clone(),
- projection_ty,
- &mut vec![],
- );
+ let InferOk { value: projection_ty, .. } =
+ self.at(&obligation.cause, obligation.param_env).normalize(projection_ty);
debug!(
normalized_projection_type = ?self.resolve_vars_if_possible(projection_ty)
@@ -3067,21 +3044,22 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
let field_ty = field.ty(self.tcx, substs);
let trait_substs = match diagnostic_name {
sym::PartialEq | sym::PartialOrd => {
- self.tcx.mk_substs_trait(field_ty, &[field_ty.into()])
+ Some(field_ty)
}
- _ => self.tcx.mk_substs_trait(field_ty, &[]),
+ _ => None,
};
let trait_pred = trait_pred.map_bound_ref(|tr| ty::TraitPredicate {
- trait_ref: ty::TraitRef {
- substs: trait_substs,
- ..trait_pred.skip_binder().trait_ref
- },
+ trait_ref: self.tcx.mk_trait_ref(
+ trait_pred.def_id(),
+ [field_ty].into_iter().chain(trait_substs),
+ ),
..*tr
});
let field_obl = Obligation::new(
+ self.tcx,
obligation.cause.clone(),
obligation.param_env,
- trait_pred.to_predicate(self.tcx),
+ trait_pred,
);
self.predicate_must_hold_modulo_regions(&field_obl)
})
diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs
index a417e1440..76a755ed9 100644
--- a/compiler/rustc_trait_selection/src/traits/fulfill.rs
+++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs
@@ -4,13 +4,12 @@ use rustc_data_structures::obligation_forest::ProcessResult;
use rustc_data_structures::obligation_forest::{Error, ForestObligation, Outcome};
use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor};
use rustc_infer::traits::ProjectionCacheKey;
-use rustc_infer::traits::{SelectionError, TraitEngine, TraitEngineExt as _, TraitObligation};
+use rustc_infer::traits::{SelectionError, TraitEngine, TraitObligation};
use rustc_middle::mir::interpret::ErrorHandled;
use rustc_middle::ty::abstract_const::NotConstEvaluatable;
use rustc_middle::ty::error::{ExpectedFound, TypeError};
use rustc_middle::ty::subst::SubstsRef;
-use rustc_middle::ty::ToPredicate;
-use rustc_middle::ty::{self, Binder, Const, Ty, TypeVisitable};
+use rustc_middle::ty::{self, Binder, Const, TypeVisitable};
use std::marker::PhantomData;
use super::const_evaluatable;
@@ -21,9 +20,9 @@ use super::CodeAmbiguity;
use super::CodeProjectionError;
use super::CodeSelectionError;
use super::EvaluationResult;
+use super::PredicateObligation;
use super::Unimplemented;
use super::{FulfillmentError, FulfillmentErrorCode};
-use super::{ObligationCause, PredicateObligation};
use crate::traits::project::PolyProjectionObligation;
use crate::traits::project::ProjectionCacheKeyExt as _;
@@ -85,7 +84,7 @@ static_assert_size!(PendingPredicateObligation<'_>, 72);
impl<'a, 'tcx> FulfillmentContext<'tcx> {
/// Creates a new fulfillment context.
- pub fn new() -> FulfillmentContext<'tcx> {
+ pub(super) fn new() -> FulfillmentContext<'tcx> {
FulfillmentContext {
predicates: ObligationForest::new(),
relationships: FxHashMap::default(),
@@ -93,7 +92,7 @@ impl<'a, 'tcx> FulfillmentContext<'tcx> {
}
}
- pub fn new_in_snapshot() -> FulfillmentContext<'tcx> {
+ pub(super) fn new_in_snapshot() -> FulfillmentContext<'tcx> {
FulfillmentContext {
predicates: ObligationForest::new(),
relationships: FxHashMap::default(),
@@ -127,42 +126,6 @@ impl<'a, 'tcx> FulfillmentContext<'tcx> {
}
impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
- /// "Normalize" a projection type `<SomeType as SomeTrait>::X` by
- /// creating a fresh type variable `$0` as well as a projection
- /// predicate `<SomeType as SomeTrait>::X == $0`. When the
- /// inference engine runs, it will attempt to find an impl of
- /// `SomeTrait` or a where-clause that lets us unify `$0` with
- /// something concrete. If this fails, we'll unify `$0` with
- /// `projection_ty` again.
- #[instrument(level = "debug", skip(self, infcx, param_env, cause))]
- fn normalize_projection_type(
- &mut self,
- infcx: &InferCtxt<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- projection_ty: ty::ProjectionTy<'tcx>,
- cause: ObligationCause<'tcx>,
- ) -> Ty<'tcx> {
- debug_assert!(!projection_ty.has_escaping_bound_vars());
-
- // FIXME(#20304) -- cache
-
- let mut selcx = SelectionContext::new(infcx);
- let mut obligations = vec![];
- let normalized_ty = project::normalize_projection_type(
- &mut selcx,
- param_env,
- projection_ty,
- cause,
- 0,
- &mut obligations,
- );
- self.register_predicate_obligations(infcx, obligations);
-
- debug!(?normalized_ty);
-
- normalized_ty.ty().unwrap()
- }
-
fn register_predicate_obligation(
&mut self,
infcx: &InferCtxt<'tcx>,
@@ -236,7 +199,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
// code is so hot. 1 and 0 dominate; 2+ is fairly rare.
1 => {
let infer_var = pending_obligation.stalled_on[0];
- self.selcx.infcx().ty_or_const_infer_var_changed(infer_var)
+ self.selcx.infcx.ty_or_const_infer_var_changed(infer_var)
}
0 => {
// In this case we haven't changed, but wish to make a change.
@@ -247,7 +210,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
// form was a perf win. See #64545 for details.
(|| {
for &infer_var in &pending_obligation.stalled_on {
- if self.selcx.infcx().ty_or_const_infer_var_changed(infer_var) {
+ if self.selcx.infcx.ty_or_const_infer_var_changed(infer_var) {
return true;
}
}
@@ -277,13 +240,12 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
debug!(?obligation, "pre-resolve");
if obligation.predicate.has_non_region_infer() {
- obligation.predicate =
- self.selcx.infcx().resolve_vars_if_possible(obligation.predicate);
+ obligation.predicate = self.selcx.infcx.resolve_vars_if_possible(obligation.predicate);
}
let obligation = &pending_obligation.obligation;
- let infcx = self.selcx.infcx();
+ let infcx = self.selcx.infcx;
if obligation.predicate.has_projections() {
let mut obligations = Vec::new();
@@ -296,7 +258,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
&mut obligations,
);
if predicate != obligation.predicate {
- obligations.push(obligation.with(predicate));
+ obligations.push(obligation.with(infcx.tcx, predicate));
return ProcessResult::Changed(mk_pending(obligations));
}
}
@@ -306,8 +268,8 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
// Evaluation will discard candidates using the leak check.
// This means we need to pass it the bound version of our
// predicate.
- ty::PredicateKind::Trait(trait_ref) => {
- let trait_obligation = obligation.with(binder.rebind(trait_ref));
+ ty::PredicateKind::Clause(ty::Clause::Trait(trait_ref)) => {
+ let trait_obligation = obligation.with(infcx.tcx, binder.rebind(trait_ref));
self.process_trait_obligation(
obligation,
@@ -315,8 +277,8 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
&mut pending_obligation.stalled_on,
)
}
- ty::PredicateKind::Projection(data) => {
- let project_obligation = obligation.with(binder.rebind(data));
+ ty::PredicateKind::Clause(ty::Clause::Projection(data)) => {
+ let project_obligation = obligation.with(infcx.tcx, binder.rebind(data));
self.process_projection_obligation(
obligation,
@@ -324,8 +286,8 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
&mut pending_obligation.stalled_on,
)
}
- ty::PredicateKind::RegionOutlives(_)
- | ty::PredicateKind::TypeOutlives(_)
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(_))
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(_))
| ty::PredicateKind::WellFormed(_)
| ty::PredicateKind::ObjectSafe(_)
| ty::PredicateKind::ClosureKind(..)
@@ -335,17 +297,16 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
| ty::PredicateKind::ConstEquate(..) => {
let pred =
ty::Binder::dummy(infcx.replace_bound_vars_with_placeholders(binder));
- ProcessResult::Changed(mk_pending(vec![
- obligation.with(pred.to_predicate(self.selcx.tcx())),
- ]))
+ ProcessResult::Changed(mk_pending(vec![obligation.with(infcx.tcx, pred)]))
}
+ ty::PredicateKind::Ambiguous => ProcessResult::Unchanged,
ty::PredicateKind::TypeWellFormedFromEnv(..) => {
bug!("TypeWellFormedFromEnv is only used for Chalk")
}
},
Some(pred) => match pred {
- ty::PredicateKind::Trait(data) => {
- let trait_obligation = obligation.with(Binder::dummy(data));
+ ty::PredicateKind::Clause(ty::Clause::Trait(data)) => {
+ let trait_obligation = obligation.with(infcx.tcx, Binder::dummy(data));
self.process_trait_obligation(
obligation,
@@ -354,7 +315,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
)
}
- ty::PredicateKind::RegionOutlives(data) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(data)) => {
if infcx.considering_regions {
infcx.region_outlives_predicate(&obligation.cause, Binder::dummy(data));
}
@@ -362,15 +323,18 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
ProcessResult::Changed(vec![])
}
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(t_a, r_b)) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ t_a,
+ r_b,
+ ))) => {
if infcx.considering_regions {
infcx.register_region_obligation_with_cause(t_a, r_b, &obligation.cause);
}
ProcessResult::Changed(vec![])
}
- ty::PredicateKind::Projection(ref data) => {
- let project_obligation = obligation.with(Binder::dummy(*data));
+ ty::PredicateKind::Clause(ty::Clause::Projection(ref data)) => {
+ let project_obligation = obligation.with(infcx.tcx, Binder::dummy(*data));
self.process_projection_obligation(
obligation,
@@ -388,7 +352,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
}
ty::PredicateKind::ClosureKind(_, closure_substs, kind) => {
- match self.selcx.infcx().closure_kind(closure_substs) {
+ match self.selcx.infcx.closure_kind(closure_substs) {
Some(closure_kind) => {
if closure_kind.extends(kind) {
ProcessResult::Changed(vec![])
@@ -402,7 +366,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
ty::PredicateKind::WellFormed(arg) => {
match wf::obligations(
- self.selcx.infcx(),
+ self.selcx.infcx,
obligation.param_env,
obligation.cause.body_id,
obligation.recursion_depth + 1,
@@ -419,7 +383,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
}
ty::PredicateKind::Subtype(subtype) => {
- match self.selcx.infcx().subtype_predicate(
+ match self.selcx.infcx.subtype_predicate(
&obligation.cause,
obligation.param_env,
Binder::dummy(subtype),
@@ -443,7 +407,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
}
ty::PredicateKind::Coerce(coerce) => {
- match self.selcx.infcx().coerce_predicate(
+ match self.selcx.infcx.coerce_predicate(
&obligation.cause,
obligation.param_env,
Binder::dummy(coerce),
@@ -467,7 +431,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
ty::PredicateKind::ConstEvaluatable(uv) => {
match const_evaluatable::is_const_evaluatable(
- self.selcx.infcx(),
+ self.selcx.infcx,
uv,
obligation.param_env,
obligation.cause.span,
@@ -490,20 +454,47 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
}
ty::PredicateKind::ConstEquate(c1, c2) => {
+ let tcx = self.selcx.tcx();
assert!(
- self.selcx.tcx().features().generic_const_exprs,
+ tcx.features().generic_const_exprs,
"`ConstEquate` without a feature gate: {c1:?} {c2:?}",
);
- debug!(?c1, ?c2, "equating consts");
// FIXME: we probably should only try to unify abstract constants
// if the constants depend on generic parameters.
//
// Let's just see where this breaks :shrug:
- if let (ty::ConstKind::Unevaluated(a), ty::ConstKind::Unevaluated(b)) =
- (c1.kind(), c2.kind())
{
- if infcx.try_unify_abstract_consts(a, b, obligation.param_env) {
- return ProcessResult::Changed(vec![]);
+ let c1 = tcx.expand_abstract_consts(c1);
+ let c2 = tcx.expand_abstract_consts(c2);
+ debug!("equating consts:\nc1= {:?}\nc2= {:?}", c1, c2);
+
+ use rustc_hir::def::DefKind;
+ use ty::ConstKind::Unevaluated;
+ match (c1.kind(), c2.kind()) {
+ (Unevaluated(a), Unevaluated(b))
+ if a.def.did == b.def.did
+ && tcx.def_kind(a.def.did) == DefKind::AssocConst =>
+ {
+ if let Ok(new_obligations) = infcx
+ .at(&obligation.cause, obligation.param_env)
+ .trace(c1, c2)
+ .eq(a.substs, b.substs)
+ {
+ return ProcessResult::Changed(mk_pending(
+ new_obligations.into_obligations(),
+ ));
+ }
+ }
+ (_, Unevaluated(_)) | (Unevaluated(_), _) => (),
+ (_, _) => {
+ if let Ok(new_obligations) =
+ infcx.at(&obligation.cause, obligation.param_env).eq(c1, c2)
+ {
+ return ProcessResult::Changed(mk_pending(
+ new_obligations.into_obligations(),
+ ));
+ }
+ }
}
}
@@ -511,7 +502,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
let mut evaluate = |c: Const<'tcx>| {
if let ty::ConstKind::Unevaluated(unevaluated) = c.kind() {
- match self.selcx.infcx().try_const_eval_resolve(
+ match self.selcx.infcx.try_const_eval_resolve(
obligation.param_env,
unevaluated,
c.ty(),
@@ -539,11 +530,13 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
(Ok(c1), Ok(c2)) => {
match self
.selcx
- .infcx()
+ .infcx
.at(&obligation.cause, obligation.param_env)
.eq(c1, c2)
{
- Ok(_) => ProcessResult::Changed(vec![]),
+ Ok(inf_ok) => {
+ ProcessResult::Changed(mk_pending(inf_ok.into_obligations()))
+ }
Err(err) => ProcessResult::Error(
FulfillmentErrorCode::CodeConstEquateError(
ExpectedFound::new(true, c1, c2),
@@ -558,12 +551,6 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
NotConstEvaluatable::Error(reported),
)),
),
- (Err(ErrorHandled::Linted), _) | (_, Err(ErrorHandled::Linted)) => {
- span_bug!(
- obligation.cause.span(),
- "ConstEquate: const_eval_resolve returned an unexpected error"
- )
- }
(Err(ErrorHandled::TooGeneric), _) | (_, Err(ErrorHandled::TooGeneric)) => {
if c1.has_non_region_infer() || c2.has_non_region_infer() {
ProcessResult::Unchanged
@@ -578,6 +565,7 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> {
}
}
}
+ ty::PredicateKind::Ambiguous => ProcessResult::Unchanged,
ty::PredicateKind::TypeWellFormedFromEnv(..) => {
bug!("TypeWellFormedFromEnv is only used for Chalk")
}
@@ -612,7 +600,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> {
trait_obligation: TraitObligation<'tcx>,
stalled_on: &mut Vec<TyOrConstInferVar<'tcx>>,
) -> ProcessResult<PendingPredicateObligation<'tcx>, FulfillmentErrorCode<'tcx>> {
- let infcx = self.selcx.infcx();
+ let infcx = self.selcx.infcx;
if obligation.predicate.is_global() {
// no type variables present, can use evaluation for better caching.
// FIXME: consider caching errors too.
@@ -670,7 +658,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> {
if obligation.predicate.is_global() {
// no type variables present, can use evaluation for better caching.
// FIXME: consider caching errors too.
- if self.selcx.infcx().predicate_must_hold_considering_regions(obligation) {
+ if self.selcx.infcx.predicate_must_hold_considering_regions(obligation) {
if let Some(key) = ProjectionCacheKey::from_poly_projection_predicate(
&mut self.selcx,
project_obligation.predicate,
@@ -679,7 +667,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> {
// evaluated all sub-obligations. We can therefore mark the 'root'
// obligation as complete, and skip evaluating sub-obligations.
self.selcx
- .infcx()
+ .infcx
.inner
.borrow_mut()
.projection_cache()
@@ -703,7 +691,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> {
}
// Let the caller handle the recursion
ProjectAndUnifyResult::Recursive => ProcessResult::Changed(mk_pending(vec![
- project_obligation.with(project_obligation.predicate.to_predicate(tcx)),
+ project_obligation.with(tcx, project_obligation.predicate),
])),
ProjectAndUnifyResult::MismatchedProjectionTypes(e) => {
ProcessResult::Error(CodeProjectionError(e))
@@ -718,7 +706,7 @@ fn substs_infer_vars<'a, 'tcx>(
substs: ty::Binder<'tcx, SubstsRef<'tcx>>,
) -> impl Iterator<Item = TyOrConstInferVar<'tcx>> {
selcx
- .infcx()
+ .infcx
.resolve_vars_if_possible(substs)
.skip_binder() // ok because this check doesn't care about regions
.iter()
diff --git a/compiler/rustc_trait_selection/src/traits/misc.rs b/compiler/rustc_trait_selection/src/traits/misc.rs
index be603c609..b6ded4ce5 100644
--- a/compiler/rustc_trait_selection/src/traits/misc.rs
+++ b/compiler/rustc_trait_selection/src/traits/misc.rs
@@ -70,7 +70,7 @@ pub fn can_type_implement_copy<'tcx>(
}
}
Err(errors) => {
- infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ infcx.err_ctxt().report_fulfillment_errors(&errors, None);
}
};
}
diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs
index 0bf54c096..ea4bf42c5 100644
--- a/compiler/rustc_trait_selection/src/traits/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/mod.rs
@@ -4,7 +4,6 @@
pub mod auto_trait;
mod chalk_fulfill;
-pub mod codegen;
mod coherence;
pub mod const_evaluatable;
mod engine;
@@ -12,7 +11,6 @@ pub mod error_reporting;
mod fulfill;
pub mod misc;
mod object_safety;
-mod on_unimplemented;
pub mod outlives_bounds;
mod project;
pub mod query;
@@ -21,9 +19,9 @@ mod select;
mod specialize;
mod structural_match;
mod util;
+mod vtable;
pub mod wf;
-use crate::errors::DumpVTableEntries;
use crate::infer::outlives::env::OutlivesEnvironment;
use crate::infer::{InferCtxt, TyCtxtInferExt};
use crate::traits::error_reporting::TypeErrCtxtExt as _;
@@ -31,16 +29,11 @@ use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
-use rustc_hir::lang_items::LangItem;
-use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::visit::TypeVisitable;
-use rustc_middle::ty::{
- self, DefIdTree, GenericParamDefKind, ToPredicate, Ty, TyCtxt, TypeSuperVisitable, VtblEntry,
-};
+use rustc_middle::ty::{self, DefIdTree, ToPredicate, Ty, TyCtxt, TypeSuperVisitable};
use rustc_middle::ty::{InternalSubsts, SubstsRef};
-use rustc_span::{sym, Span};
-use smallvec::SmallVec;
+use rustc_span::Span;
use std::fmt::Debug;
use std::ops::ControlFlow;
@@ -58,8 +51,7 @@ pub use self::object_safety::astconv_object_safety_violations;
pub use self::object_safety::is_vtable_safe_method;
pub use self::object_safety::MethodViolationCode;
pub use self::object_safety::ObjectSafetyViolation;
-pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote};
-pub use self::project::{normalize, normalize_projection_type, normalize_to};
+pub use self::project::{normalize_projection_type, NormalizeExt};
pub use self::select::{EvaluationCache, SelectionCache, SelectionContext};
pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError};
pub use self::specialize::specialization_graph::FutureCompatOverlapError;
@@ -117,14 +109,12 @@ pub enum TraitQueryMode {
}
/// Creates predicate obligations from the generic bounds.
+#[instrument(level = "debug", skip(cause, param_env))]
pub fn predicates_for_generics<'tcx>(
cause: impl Fn(usize, Span) -> ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
generic_bounds: ty::InstantiatedPredicates<'tcx>,
) -> impl Iterator<Item = PredicateObligation<'tcx>> {
- let generic_bounds = generic_bounds;
- debug!("predicates_for_generics(generic_bounds={:?})", generic_bounds);
-
std::iter::zip(generic_bounds.predicates, generic_bounds.spans).enumerate().map(
move |(idx, (predicate, span))| Obligation {
cause: cause(idx, span),
@@ -147,64 +137,50 @@ pub fn type_known_to_meet_bound_modulo_regions<'tcx>(
def_id: DefId,
span: Span,
) -> bool {
- debug!(
- "type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})",
- ty,
- infcx.tcx.def_path_str(def_id)
- );
+ let trait_ref = ty::Binder::dummy(infcx.tcx.mk_trait_ref(def_id, [ty]));
+ pred_known_to_hold_modulo_regions(infcx, param_env, trait_ref.without_const(), span)
+}
- let trait_ref =
- ty::Binder::dummy(ty::TraitRef { def_id, substs: infcx.tcx.mk_substs_trait(ty, &[]) });
+#[instrument(level = "debug", skip(infcx, param_env, span, pred), ret)]
+fn pred_known_to_hold_modulo_regions<'tcx>(
+ infcx: &InferCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ pred: impl ToPredicate<'tcx> + TypeVisitable<'tcx>,
+ span: Span,
+) -> bool {
+ let has_non_region_infer = pred.has_non_region_infer();
let obligation = Obligation {
param_env,
+ // We can use a dummy node-id here because we won't pay any mind
+ // to region obligations that arise (there shouldn't really be any
+ // anyhow).
cause: ObligationCause::misc(span, hir::CRATE_HIR_ID),
recursion_depth: 0,
- predicate: trait_ref.without_const().to_predicate(infcx.tcx),
+ predicate: pred.to_predicate(infcx.tcx),
};
let result = infcx.predicate_must_hold_modulo_regions(&obligation);
- debug!(
- "type_known_to_meet_ty={:?} bound={} => {:?}",
- ty,
- infcx.tcx.def_path_str(def_id),
- result
- );
+ debug!(?result);
- if result && ty.has_non_region_infer() {
+ if result && has_non_region_infer {
// Because of inference "guessing", selection can sometimes claim
// to succeed while the success requires a guess. To ensure
// this function's result remains infallible, we must confirm
// that guess. While imperfect, I believe this is sound.
- // We can use a dummy node-id here because we won't pay any mind
- // to region obligations that arise (there shouldn't really be any
- // anyhow).
- let cause = ObligationCause::misc(span, hir::CRATE_HIR_ID);
-
+ // FIXME(@lcnr): this function doesn't seem right.
// The handling of regions in this area of the code is terrible,
// see issue #29149. We should be able to improve on this with
// NLL.
- let errors = fully_solve_bound(infcx, cause, param_env, ty, def_id);
+ let errors = fully_solve_obligation(infcx, obligation);
// Note: we only assume something is `Copy` if we can
// *definitively* show that it implements `Copy`. Otherwise,
// assume it is move; linear is always ok.
match &errors[..] {
- [] => {
- debug!(
- "type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success",
- ty,
- infcx.tcx.def_path_str(def_id)
- );
- true
- }
+ [] => true,
errors => {
- debug!(
- ?ty,
- bound = %infcx.tcx.def_path_str(def_id),
- ?errors,
- "type_known_to_meet_bound_modulo_regions"
- );
+ debug!(?errors);
false
}
}
@@ -238,7 +214,7 @@ fn do_normalize_predicates<'tcx>(
let predicates = match fully_normalize(&infcx, cause, elaborated_env, predicates) {
Ok(predicates) => predicates,
Err(errors) => {
- let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
+ let reported = infcx.err_ctxt().report_fulfillment_errors(&errors, None);
return Err(reported);
}
};
@@ -336,7 +312,10 @@ pub fn normalize_param_env_or_error<'tcx>(
// TypeOutlives predicates - these are normally used by regionck.
let outlives_predicates: Vec<_> = predicates
.drain_filter(|predicate| {
- matches!(predicate.kind().skip_binder(), ty::PredicateKind::TypeOutlives(..))
+ matches!(
+ predicate.kind().skip_binder(),
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..))
+ )
})
.collect();
@@ -390,6 +369,7 @@ pub fn normalize_param_env_or_error<'tcx>(
}
/// Normalize a type and process all resulting obligations, returning any errors
+#[instrument(skip_all)]
pub fn fully_normalize<'tcx, T>(
infcx: &InferCtxt<'tcx>,
cause: ObligationCause<'tcx>,
@@ -399,28 +379,18 @@ pub fn fully_normalize<'tcx, T>(
where
T: TypeFoldable<'tcx>,
{
- debug!("fully_normalize_with_fulfillcx(value={:?})", value);
- let selcx = &mut SelectionContext::new(infcx);
- let Normalized { value: normalized_value, obligations } =
- project::normalize(selcx, param_env, cause, value);
- debug!(
- "fully_normalize: normalized_value={:?} obligations={:?}",
- normalized_value, obligations
- );
-
- let mut fulfill_cx = FulfillmentContext::new();
- for obligation in obligations {
- fulfill_cx.register_predicate_obligation(infcx, obligation);
- }
-
- debug!("fully_normalize: select_all_or_error start");
- let errors = fulfill_cx.select_all_or_error(infcx);
+ let ocx = ObligationCtxt::new(infcx);
+ debug!(?value);
+ let normalized_value = ocx.normalize(&cause, param_env, value);
+ debug!(?normalized_value);
+ debug!("select_all_or_error start");
+ let errors = ocx.select_all_or_error();
if !errors.is_empty() {
return Err(errors);
}
- debug!("fully_normalize: select_all_or_error complete");
+ debug!("select_all_or_error complete");
let resolved_value = infcx.resolve_vars_if_possible(normalized_value);
- debug!("fully_normalize: resolved_value={:?}", resolved_value);
+ debug!(?resolved_value);
Ok(resolved_value)
}
@@ -430,9 +400,7 @@ pub fn fully_solve_obligation<'tcx>(
infcx: &InferCtxt<'tcx>,
obligation: PredicateObligation<'tcx>,
) -> Vec<FulfillmentError<'tcx>> {
- let mut engine = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
- engine.register_predicate_obligation(infcx, obligation);
- engine.select_all_or_error(infcx)
+ fully_solve_obligations(infcx, [obligation])
}
/// Process a set of obligations (and any nested obligations that come from them)
@@ -441,9 +409,9 @@ pub fn fully_solve_obligations<'tcx>(
infcx: &InferCtxt<'tcx>,
obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
) -> Vec<FulfillmentError<'tcx>> {
- let mut engine = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
- engine.register_predicate_obligations(infcx, obligations);
- engine.select_all_or_error(infcx)
+ let ocx = ObligationCtxt::new(infcx);
+ ocx.register_obligations(obligations);
+ ocx.select_all_or_error()
}
/// Process a bound (and any nested obligations that come from it) to completion.
@@ -456,9 +424,16 @@ pub fn fully_solve_bound<'tcx>(
ty: Ty<'tcx>,
bound: DefId,
) -> Vec<FulfillmentError<'tcx>> {
- let mut engine = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
- engine.register_bound(infcx, param_env, ty, bound, cause);
- engine.select_all_or_error(infcx)
+ let tcx = infcx.tcx;
+ let trait_ref = ty::TraitRef { def_id: bound, substs: tcx.mk_substs_trait(ty, []) };
+ let obligation = Obligation {
+ cause,
+ recursion_depth: 0,
+ param_env,
+ predicate: ty::Binder::dummy(trait_ref).without_const().to_predicate(tcx),
+ };
+
+ fully_solve_obligation(infcx, obligation)
}
/// Normalizes the predicates and checks whether they hold in an empty environment. If this
@@ -473,9 +448,9 @@ pub fn impossible_predicates<'tcx>(
let infcx = tcx.infer_ctxt().build();
let param_env = ty::ParamEnv::reveal_all();
let ocx = ObligationCtxt::new(&infcx);
- let predicates = ocx.normalize(ObligationCause::dummy(), param_env, predicates);
+ let predicates = ocx.normalize(&ObligationCause::dummy(), param_env, predicates);
for predicate in predicates {
- let obligation = Obligation::new(ObligationCause::dummy(), param_env, predicate);
+ let obligation = Obligation::new(tcx, ObligationCause::dummy(), param_env, predicate);
ocx.register_obligation(obligation);
}
let errors = ocx.select_all_or_error();
@@ -500,7 +475,7 @@ fn subst_and_check_impossible_predicates<'tcx>(
// associated items.
if let Some(trait_def_id) = tcx.trait_of_item(key.0) {
let trait_ref = ty::TraitRef::from_method(tcx, trait_def_id, key.1);
- predicates.push(ty::Binder::dummy(trait_ref).to_poly_trait_predicate().to_predicate(tcx));
+ predicates.push(ty::Binder::dummy(trait_ref).to_predicate(tcx));
}
predicates.retain(|predicate| !predicate.needs_subst());
@@ -565,6 +540,7 @@ fn is_impossible_method<'tcx>(
let predicates_for_trait = predicates.predicates.iter().filter_map(|(pred, span)| {
if pred.visit_with(&mut visitor).is_continue() {
Some(Obligation::new(
+ tcx,
ObligationCause::dummy_with_span(*span),
param_env,
ty::EarlyBinder(*pred).subst(tcx, impl_trait_ref.substs),
@@ -586,390 +562,14 @@ fn is_impossible_method<'tcx>(
false
}
-#[derive(Clone, Debug)]
-enum VtblSegment<'tcx> {
- MetadataDSA,
- TraitOwnEntries { trait_ref: ty::PolyTraitRef<'tcx>, emit_vptr: bool },
-}
-
-/// Prepare the segments for a vtable
-fn prepare_vtable_segments<'tcx, T>(
- tcx: TyCtxt<'tcx>,
- trait_ref: ty::PolyTraitRef<'tcx>,
- mut segment_visitor: impl FnMut(VtblSegment<'tcx>) -> ControlFlow<T>,
-) -> Option<T> {
- // The following constraints holds for the final arrangement.
- // 1. The whole virtual table of the first direct super trait is included as the
- // the prefix. If this trait doesn't have any super traits, then this step
- // consists of the dsa metadata.
- // 2. Then comes the proper pointer metadata(vptr) and all own methods for all
- // other super traits except those already included as part of the first
- // direct super trait virtual table.
- // 3. finally, the own methods of this trait.
-
- // This has the advantage that trait upcasting to the first direct super trait on each level
- // is zero cost, and to another trait includes only replacing the pointer with one level indirection,
- // while not using too much extra memory.
-
- // For a single inheritance relationship like this,
- // D --> C --> B --> A
- // The resulting vtable will consists of these segments:
- // DSA, A, B, C, D
-
- // For a multiple inheritance relationship like this,
- // D --> C --> A
- // \-> B
- // The resulting vtable will consists of these segments:
- // DSA, A, B, B-vptr, C, D
-
- // For a diamond inheritance relationship like this,
- // D --> B --> A
- // \-> C -/
- // The resulting vtable will consists of these segments:
- // DSA, A, B, C, C-vptr, D
-
- // For a more complex inheritance relationship like this:
- // O --> G --> C --> A
- // \ \ \-> B
- // | |-> F --> D
- // | \-> E
- // |-> N --> J --> H
- // \ \-> I
- // |-> M --> K
- // \-> L
- // The resulting vtable will consists of these segments:
- // DSA, A, B, B-vptr, C, D, D-vptr, E, E-vptr, F, F-vptr, G,
- // H, H-vptr, I, I-vptr, J, J-vptr, K, K-vptr, L, L-vptr, M, M-vptr,
- // N, N-vptr, O
-
- // emit dsa segment first.
- if let ControlFlow::Break(v) = (segment_visitor)(VtblSegment::MetadataDSA) {
- return Some(v);
- }
-
- let mut emit_vptr_on_new_entry = false;
- let mut visited = util::PredicateSet::new(tcx);
- let predicate = trait_ref.without_const().to_predicate(tcx);
- let mut stack: SmallVec<[(ty::PolyTraitRef<'tcx>, _, _); 5]> =
- smallvec![(trait_ref, emit_vptr_on_new_entry, None)];
- visited.insert(predicate);
-
- // the main traversal loop:
- // basically we want to cut the inheritance directed graph into a few non-overlapping slices of nodes
- // that each node is emitted after all its descendents have been emitted.
- // so we convert the directed graph into a tree by skipping all previously visited nodes using a visited set.
- // this is done on the fly.
- // Each loop run emits a slice - it starts by find a "childless" unvisited node, backtracking upwards, and it
- // stops after it finds a node that has a next-sibling node.
- // This next-sibling node will used as the starting point of next slice.
-
- // Example:
- // For a diamond inheritance relationship like this,
- // D#1 --> B#0 --> A#0
- // \-> C#1 -/
-
- // Starting point 0 stack [D]
- // Loop run #0: Stack after diving in is [D B A], A is "childless"
- // after this point, all newly visited nodes won't have a vtable that equals to a prefix of this one.
- // Loop run #0: Emitting the slice [B A] (in reverse order), B has a next-sibling node, so this slice stops here.
- // Loop run #0: Stack after exiting out is [D C], C is the next starting point.
- // Loop run #1: Stack after diving in is [D C], C is "childless", since its child A is skipped(already emitted).
- // Loop run #1: Emitting the slice [D C] (in reverse order). No one has a next-sibling node.
- // Loop run #1: Stack after exiting out is []. Now the function exits.
-
- loop {
- // dive deeper into the stack, recording the path
- 'diving_in: loop {
- if let Some((inner_most_trait_ref, _, _)) = stack.last() {
- let inner_most_trait_ref = *inner_most_trait_ref;
- let mut direct_super_traits_iter = tcx
- .super_predicates_of(inner_most_trait_ref.def_id())
- .predicates
- .into_iter()
- .filter_map(move |(pred, _)| {
- pred.subst_supertrait(tcx, &inner_most_trait_ref).to_opt_poly_trait_pred()
- });
-
- 'diving_in_skip_visited_traits: loop {
- if let Some(next_super_trait) = direct_super_traits_iter.next() {
- if visited.insert(next_super_trait.to_predicate(tcx)) {
- // We're throwing away potential constness of super traits here.
- // FIXME: handle ~const super traits
- let next_super_trait = next_super_trait.map_bound(|t| t.trait_ref);
- stack.push((
- next_super_trait,
- emit_vptr_on_new_entry,
- Some(direct_super_traits_iter),
- ));
- break 'diving_in_skip_visited_traits;
- } else {
- continue 'diving_in_skip_visited_traits;
- }
- } else {
- break 'diving_in;
- }
- }
- }
- }
-
- // Other than the left-most path, vptr should be emitted for each trait.
- emit_vptr_on_new_entry = true;
-
- // emit innermost item, move to next sibling and stop there if possible, otherwise jump to outer level.
- 'exiting_out: loop {
- if let Some((inner_most_trait_ref, emit_vptr, siblings_opt)) = stack.last_mut() {
- if let ControlFlow::Break(v) = (segment_visitor)(VtblSegment::TraitOwnEntries {
- trait_ref: *inner_most_trait_ref,
- emit_vptr: *emit_vptr,
- }) {
- return Some(v);
- }
-
- 'exiting_out_skip_visited_traits: loop {
- if let Some(siblings) = siblings_opt {
- if let Some(next_inner_most_trait_ref) = siblings.next() {
- if visited.insert(next_inner_most_trait_ref.to_predicate(tcx)) {
- // We're throwing away potential constness of super traits here.
- // FIXME: handle ~const super traits
- let next_inner_most_trait_ref =
- next_inner_most_trait_ref.map_bound(|t| t.trait_ref);
- *inner_most_trait_ref = next_inner_most_trait_ref;
- *emit_vptr = emit_vptr_on_new_entry;
- break 'exiting_out;
- } else {
- continue 'exiting_out_skip_visited_traits;
- }
- }
- }
- stack.pop();
- continue 'exiting_out;
- }
- }
- // all done
- return None;
- }
- }
-}
-
-fn dump_vtable_entries<'tcx>(
- tcx: TyCtxt<'tcx>,
- sp: Span,
- trait_ref: ty::PolyTraitRef<'tcx>,
- entries: &[VtblEntry<'tcx>],
-) {
- tcx.sess.emit_err(DumpVTableEntries {
- span: sp,
- trait_ref,
- entries: format!("{:#?}", entries),
- });
-}
-
-fn own_existential_vtable_entries<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> &'tcx [DefId] {
- let trait_methods = tcx
- .associated_items(trait_def_id)
- .in_definition_order()
- .filter(|item| item.kind == ty::AssocKind::Fn);
- // Now list each method's DefId (for within its trait).
- let own_entries = trait_methods.filter_map(move |trait_method| {
- debug!("own_existential_vtable_entry: trait_method={:?}", trait_method);
- let def_id = trait_method.def_id;
-
- // Some methods cannot be called on an object; skip those.
- if !is_vtable_safe_method(tcx, trait_def_id, &trait_method) {
- debug!("own_existential_vtable_entry: not vtable safe");
- return None;
- }
-
- Some(def_id)
- });
-
- tcx.arena.alloc_from_iter(own_entries.into_iter())
-}
-
-/// Given a trait `trait_ref`, iterates the vtable entries
-/// that come from `trait_ref`, including its supertraits.
-fn vtable_entries<'tcx>(
- tcx: TyCtxt<'tcx>,
- trait_ref: ty::PolyTraitRef<'tcx>,
-) -> &'tcx [VtblEntry<'tcx>] {
- debug!("vtable_entries({:?})", trait_ref);
-
- let mut entries = vec![];
-
- let vtable_segment_callback = |segment| -> ControlFlow<()> {
- match segment {
- VtblSegment::MetadataDSA => {
- entries.extend(TyCtxt::COMMON_VTABLE_ENTRIES);
- }
- VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => {
- let existential_trait_ref = trait_ref
- .map_bound(|trait_ref| ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref));
-
- // Lookup the shape of vtable for the trait.
- let own_existential_entries =
- tcx.own_existential_vtable_entries(existential_trait_ref.def_id());
-
- let own_entries = own_existential_entries.iter().copied().map(|def_id| {
- debug!("vtable_entries: trait_method={:?}", def_id);
-
- // The method may have some early-bound lifetimes; add regions for those.
- let substs = trait_ref.map_bound(|trait_ref| {
- InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind {
- GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
- GenericParamDefKind::Type { .. }
- | GenericParamDefKind::Const { .. } => {
- trait_ref.substs[param.index as usize]
- }
- })
- });
-
- // The trait type may have higher-ranked lifetimes in it;
- // erase them if they appear, so that we get the type
- // at some particular call site.
- let substs = tcx
- .normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), substs);
-
- // It's possible that the method relies on where-clauses that
- // do not hold for this particular set of type parameters.
- // Note that this method could then never be called, so we
- // do not want to try and codegen it, in that case (see #23435).
- let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
- if impossible_predicates(tcx, predicates.predicates) {
- debug!("vtable_entries: predicates do not hold");
- return VtblEntry::Vacant;
- }
-
- let instance = ty::Instance::resolve_for_vtable(
- tcx,
- ty::ParamEnv::reveal_all(),
- def_id,
- substs,
- )
- .expect("resolution failed during building vtable representation");
- VtblEntry::Method(instance)
- });
-
- entries.extend(own_entries);
-
- if emit_vptr {
- entries.push(VtblEntry::TraitVPtr(trait_ref));
- }
- }
- }
-
- ControlFlow::Continue(())
- };
-
- let _ = prepare_vtable_segments(tcx, trait_ref, vtable_segment_callback);
-
- if tcx.has_attr(trait_ref.def_id(), sym::rustc_dump_vtable) {
- let sp = tcx.def_span(trait_ref.def_id());
- dump_vtable_entries(tcx, sp, trait_ref, &entries);
- }
-
- tcx.arena.alloc_from_iter(entries.into_iter())
-}
-
-/// Find slot base for trait methods within vtable entries of another trait
-fn vtable_trait_first_method_offset<'tcx>(
- tcx: TyCtxt<'tcx>,
- key: (
- ty::PolyTraitRef<'tcx>, // trait_to_be_found
- ty::PolyTraitRef<'tcx>, // trait_owning_vtable
- ),
-) -> usize {
- let (trait_to_be_found, trait_owning_vtable) = key;
-
- // #90177
- let trait_to_be_found_erased = tcx.erase_regions(trait_to_be_found);
-
- let vtable_segment_callback = {
- let mut vtable_base = 0;
-
- move |segment| {
- match segment {
- VtblSegment::MetadataDSA => {
- vtable_base += TyCtxt::COMMON_VTABLE_ENTRIES.len();
- }
- VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => {
- if tcx.erase_regions(trait_ref) == trait_to_be_found_erased {
- return ControlFlow::Break(vtable_base);
- }
- vtable_base += util::count_own_vtable_entries(tcx, trait_ref);
- if emit_vptr {
- vtable_base += 1;
- }
- }
- }
- ControlFlow::Continue(())
- }
- };
-
- if let Some(vtable_base) =
- prepare_vtable_segments(tcx, trait_owning_vtable, vtable_segment_callback)
- {
- vtable_base
- } else {
- bug!("Failed to find info for expected trait in vtable");
- }
-}
-
-/// Find slot offset for trait vptr within vtable entries of another trait
-pub fn vtable_trait_upcasting_coercion_new_vptr_slot<'tcx>(
- tcx: TyCtxt<'tcx>,
- key: (
- Ty<'tcx>, // trait object type whose trait owning vtable
- Ty<'tcx>, // trait object for supertrait
- ),
-) -> Option<usize> {
- let (source, target) = key;
- assert!(matches!(&source.kind(), &ty::Dynamic(..)) && !source.needs_infer());
- assert!(matches!(&target.kind(), &ty::Dynamic(..)) && !target.needs_infer());
-
- // this has been typecked-before, so diagnostics is not really needed.
- let unsize_trait_did = tcx.require_lang_item(LangItem::Unsize, None);
-
- let trait_ref = ty::TraitRef {
- def_id: unsize_trait_did,
- substs: tcx.mk_substs_trait(source, &[target.into()]),
- };
- let obligation = Obligation::new(
- ObligationCause::dummy(),
- ty::ParamEnv::reveal_all(),
- ty::Binder::dummy(ty::TraitPredicate {
- trait_ref,
- constness: ty::BoundConstness::NotConst,
- polarity: ty::ImplPolarity::Positive,
- }),
- );
-
- let infcx = tcx.infer_ctxt().build();
- let mut selcx = SelectionContext::new(&infcx);
- let implsrc = selcx.select(&obligation).unwrap();
-
- let Some(ImplSource::TraitUpcasting(implsrc_traitcasting)) = implsrc else {
- bug!();
- };
-
- implsrc_traitcasting.vtable_vptr_slot
-}
-
pub fn provide(providers: &mut ty::query::Providers) {
object_safety::provide(providers);
- structural_match::provide(providers);
+ vtable::provide(providers);
*providers = ty::query::Providers {
specialization_graph_of: specialize::specialization_graph_provider,
specializes: specialize::specializes,
- codegen_select_candidate: codegen::codegen_select_candidate,
- own_existential_vtable_entries,
- vtable_entries,
- vtable_trait_upcasting_coercion_new_vptr_slot,
subst_and_check_impossible_predicates,
is_impossible_method,
- try_unify_abstract_consts: |tcx, param_env_and| {
- let (param_env, (a, b)) = param_env_and.into_parts();
- const_evaluatable::try_unify_abstract_consts(tcx, (a, b), param_env)
- },
..*providers
};
}
diff --git a/compiler/rustc_trait_selection/src/traits/object_safety.rs b/compiler/rustc_trait_selection/src/traits/object_safety.rs
index 0bb25a74d..a45749fe4 100644
--- a/compiler/rustc_trait_selection/src/traits/object_safety.rs
+++ b/compiler/rustc_trait_selection/src/traits/object_safety.rs
@@ -17,11 +17,10 @@ use hir::def::DefKind;
use rustc_errors::{DelayDm, FatalError, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
-use rustc_middle::ty::abstract_const::{walk_abstract_const, AbstractConst};
+use rustc_middle::ty::subst::{GenericArg, InternalSubsts};
use rustc_middle::ty::{
self, EarlyBinder, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor,
};
-use rustc_middle::ty::{GenericArg, InternalSubsts};
use rustc_middle::ty::{Predicate, ToPredicate};
use rustc_session::lint::builtin::WHERE_CLAUSES_OBJECT_SAFETY;
use rustc_span::symbol::Symbol;
@@ -288,11 +287,11 @@ fn predicate_references_self<'tcx>(
let self_ty = tcx.types.self_param;
let has_self_ty = |arg: &GenericArg<'tcx>| arg.walk().any(|arg| arg == self_ty.into());
match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(ref data) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(ref data)) => {
// In the case of a trait predicate, we can skip the "self" type.
if data.trait_ref.substs[1..].iter().any(has_self_ty) { Some(sp) } else { None }
}
- ty::PredicateKind::Projection(ref data) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(ref data)) => {
// And similarly for projections. This should be redundant with
// the previous check because any projection should have a
// matching `Trait` predicate with the same inputs, but we do
@@ -312,13 +311,14 @@ fn predicate_references_self<'tcx>(
}
ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
- | ty::PredicateKind::TypeOutlives(..)
- | ty::PredicateKind::RegionOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..))
+ | ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
@@ -337,19 +337,20 @@ fn generics_require_sized_self(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
let predicates = predicates.instantiate_identity(tcx).predicates;
elaborate_predicates(tcx, predicates.into_iter()).any(|obligation| {
match obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(ref trait_pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(ref trait_pred)) => {
trait_pred.def_id() == sized_def_id && trait_pred.self_ty().is_param(0)
}
- ty::PredicateKind::Projection(..)
+ ty::PredicateKind::Clause(ty::Clause::Projection(..))
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
- | ty::PredicateKind::RegionOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
- | ty::PredicateKind::TypeOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..))
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => false,
}
})
@@ -375,6 +376,7 @@ fn object_safety_violation_for_method(
let span = match (&v, node) {
(MethodViolationCode::ReferencesSelfInput(Some(span)), _) => *span,
(MethodViolationCode::UndispatchableReceiver(Some(span)), _) => *span,
+ (MethodViolationCode::ReferencesImplTraitInTrait(span), _) => *span,
(MethodViolationCode::ReferencesSelfOutput, Some(node)) => {
node.fn_decl().map_or(method.ident(tcx).span, |decl| decl.output.span())
}
@@ -437,8 +439,8 @@ fn virtual_call_violation_for_method<'tcx>(
if contains_illegal_self_type_reference(tcx, trait_def_id, sig.output()) {
return Some(MethodViolationCode::ReferencesSelfOutput);
}
- if contains_illegal_impl_trait_in_trait(tcx, sig.output()) {
- return Some(MethodViolationCode::ReferencesImplTraitInTrait);
+ if let Some(code) = contains_illegal_impl_trait_in_trait(tcx, method.def_id, sig.output()) {
+ return Some(code);
}
// We can't monomorphize things like `fn foo<A>(...)`.
@@ -684,10 +686,9 @@ fn receiver_is_dispatchable<'tcx>(
let param_env = tcx.param_env(method.def_id);
// Self: Unsize<U>
- let unsize_predicate = ty::Binder::dummy(ty::TraitRef {
- def_id: unsize_did,
- substs: tcx.mk_substs_trait(tcx.types.self_param, &[unsized_self_ty.into()]),
- })
+ let unsize_predicate = ty::Binder::dummy(
+ tcx.mk_trait_ref(unsize_did, [tcx.types.self_param, unsized_self_ty]),
+ )
.without_const()
.to_predicate(tcx);
@@ -719,14 +720,11 @@ fn receiver_is_dispatchable<'tcx>(
// Receiver: DispatchFromDyn<Receiver[Self => U]>
let obligation = {
- let predicate = ty::Binder::dummy(ty::TraitRef {
- def_id: dispatch_from_dyn_did,
- substs: tcx.mk_substs_trait(receiver_ty, &[unsized_receiver_ty.into()]),
- })
- .without_const()
- .to_predicate(tcx);
+ let predicate = ty::Binder::dummy(
+ tcx.mk_trait_ref(dispatch_from_dyn_did, [receiver_ty, unsized_receiver_ty]),
+ );
- Obligation::new(ObligationCause::dummy(), param_env, predicate)
+ Obligation::new(tcx, ObligationCause::dummy(), param_env, predicate)
};
let infcx = tcx.infer_ctxt().build();
@@ -838,23 +836,9 @@ fn contains_illegal_self_type_reference<'tcx, T: TypeVisitable<'tcx>>(
}
fn visit_const(&mut self, ct: ty::Const<'tcx>) -> ControlFlow<Self::BreakTy> {
- // Constants can only influence object safety if they reference `Self`.
+ // Constants can only influence object safety if they are generic and reference `Self`.
// This is only possible for unevaluated constants, so we walk these here.
- //
- // If `AbstractConst::from_const` returned an error we already failed compilation
- // so we don't have to emit an additional error here.
- use rustc_middle::ty::abstract_const::Node;
- if let Ok(Some(ct)) = AbstractConst::from_const(self.tcx, ct) {
- walk_abstract_const(self.tcx, ct, |node| match node.root(self.tcx) {
- Node::Leaf(leaf) => self.visit_const(leaf),
- Node::Cast(_, _, ty) => self.visit_ty(ty),
- Node::Binop(..) | Node::UnaryOp(..) | Node::FunctionCall(_, _) => {
- ControlFlow::CONTINUE
- }
- })
- } else {
- ct.super_visit_with(self)
- }
+ self.tcx.expand_abstract_consts(ct).super_visit_with(self)
}
}
@@ -865,16 +849,24 @@ fn contains_illegal_self_type_reference<'tcx, T: TypeVisitable<'tcx>>(
pub fn contains_illegal_impl_trait_in_trait<'tcx>(
tcx: TyCtxt<'tcx>,
+ fn_def_id: DefId,
ty: ty::Binder<'tcx, Ty<'tcx>>,
-) -> bool {
+) -> Option<MethodViolationCode> {
+ // This would be caught below, but rendering the error as a separate
+ // `async-specific` message is better.
+ if tcx.asyncness(fn_def_id).is_async() {
+ return Some(MethodViolationCode::AsyncFn);
+ }
+
// FIXME(RPITIT): Perhaps we should use a visitor here?
- ty.skip_binder().walk().any(|arg| {
+ ty.skip_binder().walk().find_map(|arg| {
if let ty::GenericArgKind::Type(ty) = arg.unpack()
&& let ty::Projection(proj) = ty.kind()
+ && tcx.def_kind(proj.item_def_id) == DefKind::ImplTraitPlaceholder
{
- tcx.def_kind(proj.item_def_id) == DefKind::ImplTraitPlaceholder
+ Some(MethodViolationCode::ReferencesImplTraitInTrait(tcx.def_span(proj.item_def_id)))
} else {
- false
+ None
}
})
}
diff --git a/compiler/rustc_trait_selection/src/traits/on_unimplemented.rs b/compiler/rustc_trait_selection/src/traits/on_unimplemented.rs
deleted file mode 100644
index 4a4f34b76..000000000
--- a/compiler/rustc_trait_selection/src/traits/on_unimplemented.rs
+++ /dev/null
@@ -1,392 +0,0 @@
-use rustc_ast::{MetaItem, NestedMetaItem};
-use rustc_attr as attr;
-use rustc_data_structures::fx::FxHashMap;
-use rustc_errors::{struct_span_err, ErrorGuaranteed};
-use rustc_hir::def_id::DefId;
-use rustc_middle::ty::{self, GenericParamDefKind, TyCtxt};
-use rustc_parse_format::{ParseMode, Parser, Piece, Position};
-use rustc_span::symbol::{kw, sym, Symbol};
-use rustc_span::{Span, DUMMY_SP};
-
-use crate::errors::{
- EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
-};
-
-#[derive(Clone, Debug)]
-pub struct OnUnimplementedFormatString(Symbol);
-
-#[derive(Debug)]
-pub struct OnUnimplementedDirective {
- pub condition: Option<MetaItem>,
- pub subcommands: Vec<OnUnimplementedDirective>,
- pub message: Option<OnUnimplementedFormatString>,
- pub label: Option<OnUnimplementedFormatString>,
- pub note: Option<OnUnimplementedFormatString>,
- pub parent_label: Option<OnUnimplementedFormatString>,
- pub append_const_msg: Option<Option<Symbol>>,
-}
-
-#[derive(Default)]
-pub struct OnUnimplementedNote {
- pub message: Option<String>,
- pub label: Option<String>,
- pub note: Option<String>,
- pub parent_label: Option<String>,
- /// Append a message for `~const Trait` errors. `None` means not requested and
- /// should fallback to a generic message, `Some(None)` suggests using the default
- /// appended message, `Some(Some(s))` suggests use the `s` message instead of the
- /// default one..
- pub append_const_msg: Option<Option<Symbol>>,
-}
-
-impl<'tcx> OnUnimplementedDirective {
- fn parse(
- tcx: TyCtxt<'tcx>,
- item_def_id: DefId,
- items: &[NestedMetaItem],
- span: Span,
- is_root: bool,
- ) -> Result<Self, ErrorGuaranteed> {
- let mut errored = None;
- let mut item_iter = items.iter();
-
- let parse_value = |value_str| {
- OnUnimplementedFormatString::try_parse(tcx, item_def_id, value_str, span).map(Some)
- };
-
- let condition = if is_root {
- None
- } else {
- let cond = item_iter
- .next()
- .ok_or_else(|| tcx.sess.emit_err(EmptyOnClauseInOnUnimplemented { span }))?
- .meta_item()
- .ok_or_else(|| tcx.sess.emit_err(InvalidOnClauseInOnUnimplemented { span }))?;
- attr::eval_condition(cond, &tcx.sess.parse_sess, Some(tcx.features()), &mut |cfg| {
- if let Some(value) = cfg.value && let Err(guar) = parse_value(value) {
- errored = Some(guar);
- }
- true
- });
- Some(cond.clone())
- };
-
- let mut message = None;
- let mut label = None;
- let mut note = None;
- let mut parent_label = None;
- let mut subcommands = vec![];
- let mut append_const_msg = None;
-
- for item in item_iter {
- if item.has_name(sym::message) && message.is_none() {
- if let Some(message_) = item.value_str() {
- message = parse_value(message_)?;
- continue;
- }
- } else if item.has_name(sym::label) && label.is_none() {
- if let Some(label_) = item.value_str() {
- label = parse_value(label_)?;
- continue;
- }
- } else if item.has_name(sym::note) && note.is_none() {
- if let Some(note_) = item.value_str() {
- note = parse_value(note_)?;
- continue;
- }
- } else if item.has_name(sym::parent_label) && parent_label.is_none() {
- if let Some(parent_label_) = item.value_str() {
- parent_label = parse_value(parent_label_)?;
- continue;
- }
- } else if item.has_name(sym::on)
- && is_root
- && message.is_none()
- && label.is_none()
- && note.is_none()
- {
- if let Some(items) = item.meta_item_list() {
- match Self::parse(tcx, item_def_id, &items, item.span(), false) {
- Ok(subcommand) => subcommands.push(subcommand),
- Err(reported) => errored = Some(reported),
- };
- continue;
- }
- } else if item.has_name(sym::append_const_msg) && append_const_msg.is_none() {
- if let Some(msg) = item.value_str() {
- append_const_msg = Some(Some(msg));
- continue;
- } else if item.is_word() {
- append_const_msg = Some(None);
- continue;
- }
- }
-
- // nothing found
- tcx.sess.emit_err(NoValueInOnUnimplemented { span: item.span() });
- }
-
- if let Some(reported) = errored {
- Err(reported)
- } else {
- Ok(OnUnimplementedDirective {
- condition,
- subcommands,
- message,
- label,
- note,
- parent_label,
- append_const_msg,
- })
- }
- }
-
- pub fn of_item(tcx: TyCtxt<'tcx>, item_def_id: DefId) -> Result<Option<Self>, ErrorGuaranteed> {
- let Some(attr) = tcx.get_attr(item_def_id, sym::rustc_on_unimplemented) else {
- return Ok(None);
- };
-
- let result = if let Some(items) = attr.meta_item_list() {
- Self::parse(tcx, item_def_id, &items, attr.span, true).map(Some)
- } else if let Some(value) = attr.value_str() {
- Ok(Some(OnUnimplementedDirective {
- condition: None,
- message: None,
- subcommands: vec![],
- label: Some(OnUnimplementedFormatString::try_parse(
- tcx,
- item_def_id,
- value,
- attr.span,
- )?),
- note: None,
- parent_label: None,
- append_const_msg: None,
- }))
- } else {
- let reported =
- tcx.sess.delay_span_bug(DUMMY_SP, "of_item: neither meta_item_list nor value_str");
- return Err(reported);
- };
- debug!("of_item({:?}) = {:?}", item_def_id, result);
- result
- }
-
- pub fn evaluate(
- &self,
- tcx: TyCtxt<'tcx>,
- trait_ref: ty::TraitRef<'tcx>,
- options: &[(Symbol, Option<String>)],
- ) -> OnUnimplementedNote {
- let mut message = None;
- let mut label = None;
- let mut note = None;
- let mut parent_label = None;
- let mut append_const_msg = None;
- info!("evaluate({:?}, trait_ref={:?}, options={:?})", self, trait_ref, options);
-
- let options_map: FxHashMap<Symbol, String> =
- options.iter().filter_map(|(k, v)| v.as_ref().map(|v| (*k, v.to_owned()))).collect();
-
- for command in self.subcommands.iter().chain(Some(self)).rev() {
- if let Some(ref condition) = command.condition && !attr::eval_condition(
- condition,
- &tcx.sess.parse_sess,
- Some(tcx.features()),
- &mut |cfg| {
- let value = cfg.value.map(|v| {
- OnUnimplementedFormatString(v).format(tcx, trait_ref, &options_map)
- });
-
- options.contains(&(cfg.name, value))
- },
- ) {
- debug!("evaluate: skipping {:?} due to condition", command);
- continue;
- }
- debug!("evaluate: {:?} succeeded", command);
- if let Some(ref message_) = command.message {
- message = Some(message_.clone());
- }
-
- if let Some(ref label_) = command.label {
- label = Some(label_.clone());
- }
-
- if let Some(ref note_) = command.note {
- note = Some(note_.clone());
- }
-
- if let Some(ref parent_label_) = command.parent_label {
- parent_label = Some(parent_label_.clone());
- }
-
- append_const_msg = command.append_const_msg;
- }
-
- OnUnimplementedNote {
- label: label.map(|l| l.format(tcx, trait_ref, &options_map)),
- message: message.map(|m| m.format(tcx, trait_ref, &options_map)),
- note: note.map(|n| n.format(tcx, trait_ref, &options_map)),
- parent_label: parent_label.map(|e_s| e_s.format(tcx, trait_ref, &options_map)),
- append_const_msg,
- }
- }
-}
-
-impl<'tcx> OnUnimplementedFormatString {
- fn try_parse(
- tcx: TyCtxt<'tcx>,
- item_def_id: DefId,
- from: Symbol,
- err_sp: Span,
- ) -> Result<Self, ErrorGuaranteed> {
- let result = OnUnimplementedFormatString(from);
- result.verify(tcx, item_def_id, err_sp)?;
- Ok(result)
- }
-
- fn verify(
- &self,
- tcx: TyCtxt<'tcx>,
- item_def_id: DefId,
- span: Span,
- ) -> Result<(), ErrorGuaranteed> {
- let trait_def_id = if tcx.is_trait(item_def_id) {
- item_def_id
- } else {
- tcx.trait_id_of_impl(item_def_id)
- .expect("expected `on_unimplemented` to correspond to a trait")
- };
- let trait_name = tcx.item_name(trait_def_id);
- let generics = tcx.generics_of(item_def_id);
- let s = self.0.as_str();
- let parser = Parser::new(s, None, None, false, ParseMode::Format);
- let mut result = Ok(());
- for token in parser {
- match token {
- Piece::String(_) => (), // Normal string, no need to check it
- Piece::NextArgument(a) => match a.position {
- Position::ArgumentNamed(s) => {
- match Symbol::intern(s) {
- // `{Self}` is allowed
- kw::SelfUpper => (),
- // `{ThisTraitsName}` is allowed
- s if s == trait_name => (),
- // `{from_method}` is allowed
- sym::from_method => (),
- // `{from_desugaring}` is allowed
- sym::from_desugaring => (),
- // `{ItemContext}` is allowed
- sym::ItemContext => (),
- // `{integral}` and `{integer}` and `{float}` are allowed
- sym::integral | sym::integer_ | sym::float => (),
- // So is `{A}` if A is a type parameter
- s => match generics.params.iter().find(|param| param.name == s) {
- Some(_) => (),
- None => {
- let reported = struct_span_err!(
- tcx.sess,
- span,
- E0230,
- "there is no parameter `{}` on {}",
- s,
- if trait_def_id == item_def_id {
- format!("trait `{}`", trait_name)
- } else {
- "impl".to_string()
- }
- )
- .emit();
- result = Err(reported);
- }
- },
- }
- }
- // `{:1}` and `{}` are not to be used
- Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => {
- let reported = struct_span_err!(
- tcx.sess,
- span,
- E0231,
- "only named substitution parameters are allowed"
- )
- .emit();
- result = Err(reported);
- }
- },
- }
- }
-
- result
- }
-
- pub fn format(
- &self,
- tcx: TyCtxt<'tcx>,
- trait_ref: ty::TraitRef<'tcx>,
- options: &FxHashMap<Symbol, String>,
- ) -> String {
- let name = tcx.item_name(trait_ref.def_id);
- let trait_str = tcx.def_path_str(trait_ref.def_id);
- let generics = tcx.generics_of(trait_ref.def_id);
- let generic_map = generics
- .params
- .iter()
- .filter_map(|param| {
- let value = match param.kind {
- GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
- trait_ref.substs[param.index as usize].to_string()
- }
- GenericParamDefKind::Lifetime => return None,
- };
- let name = param.name;
- Some((name, value))
- })
- .collect::<FxHashMap<Symbol, String>>();
- let empty_string = String::new();
-
- let s = self.0.as_str();
- let parser = Parser::new(s, None, None, false, ParseMode::Format);
- let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
- parser
- .map(|p| match p {
- Piece::String(s) => s,
- Piece::NextArgument(a) => match a.position {
- Position::ArgumentNamed(s) => {
- let s = Symbol::intern(s);
- match generic_map.get(&s) {
- Some(val) => val,
- None if s == name => &trait_str,
- None => {
- if let Some(val) = options.get(&s) {
- val
- } else if s == sym::from_desugaring || s == sym::from_method {
- // don't break messages using these two arguments incorrectly
- &empty_string
- } else if s == sym::ItemContext {
- &item_context
- } else if s == sym::integral {
- "{integral}"
- } else if s == sym::integer_ {
- "{integer}"
- } else if s == sym::float {
- "{float}"
- } else {
- bug!(
- "broken on_unimplemented {:?} for {:?}: \
- no argument matching {:?}",
- self.0,
- trait_ref,
- s
- )
- }
- }
- }
- }
- _ => bug!("broken on_unimplemented {:?} - bad format arg", self.0),
- },
- })
- .collect()
- }
-}
diff --git a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs
index 108dae092..e1092a788 100644
--- a/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs
+++ b/compiler/rustc_trait_selection/src/traits/outlives_bounds.rs
@@ -1,8 +1,8 @@
use crate::infer::InferCtxt;
use crate::traits::query::type_op::{self, TypeOp, TypeOpOutput};
use crate::traits::query::NoSolution;
-use crate::traits::{ObligationCause, TraitEngine, TraitEngineExt};
-use rustc_data_structures::fx::FxHashSet;
+use crate::traits::ObligationCause;
+use rustc_data_structures::fx::FxIndexSet;
use rustc_hir as hir;
use rustc_hir::HirId;
use rustc_middle::ty::{self, ParamEnv, Ty};
@@ -22,7 +22,7 @@ pub trait InferCtxtExt<'a, 'tcx> {
&'a self,
param_env: ty::ParamEnv<'tcx>,
body_id: hir::HirId,
- tys: FxHashSet<Ty<'tcx>>,
+ tys: FxIndexSet<Ty<'tcx>>,
) -> Bounds<'a, 'tcx>;
}
@@ -74,20 +74,20 @@ impl<'a, 'tcx: 'a> InferCtxtExt<'a, 'tcx> for InferCtxt<'tcx> {
debug!(?constraints);
// Instantiation may have produced new inference variables and constraints on those
// variables. Process these constraints.
- let mut fulfill_cx = <dyn TraitEngine<'tcx>>::new(self.tcx);
let cause = ObligationCause::misc(span, body_id);
- for &constraint in &constraints.outlives {
- let obligation = self.query_outlives_constraint_to_obligation(
- constraint,
- cause.clone(),
- param_env,
- );
- fulfill_cx.register_predicate_obligation(self, obligation);
- }
+ let errors = super::fully_solve_obligations(
+ self,
+ constraints.outlives.iter().map(|constraint| {
+ self.query_outlives_constraint_to_obligation(
+ *constraint,
+ cause.clone(),
+ param_env,
+ )
+ }),
+ );
if !constraints.member_constraints.is_empty() {
span_bug!(span, "{:#?}", constraints.member_constraints);
}
- let errors = fulfill_cx.select_all_or_error(self);
if !errors.is_empty() {
self.tcx.sess.delay_span_bug(
span,
@@ -103,7 +103,7 @@ impl<'a, 'tcx: 'a> InferCtxtExt<'a, 'tcx> for InferCtxt<'tcx> {
&'a self,
param_env: ParamEnv<'tcx>,
body_id: HirId,
- tys: FxHashSet<Ty<'tcx>>,
+ tys: FxIndexSet<Ty<'tcx>>,
) -> Bounds<'a, 'tcx> {
tys.into_iter()
.map(move |ty| {
diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs
index e4284b9d3..5789754e4 100644
--- a/compiler/rustc_trait_selection/src/traits/project.rs
+++ b/compiler/rustc_trait_selection/src/traits/project.rs
@@ -11,8 +11,8 @@ use super::Selection;
use super::SelectionContext;
use super::SelectionError;
use super::{
- ImplSourceClosureData, ImplSourceDiscriminantKindData, ImplSourceFnPointerData,
- ImplSourceGeneratorData, ImplSourcePointeeData, ImplSourceUserDefinedData,
+ ImplSourceClosureData, ImplSourceFnPointerData, ImplSourceFutureData, ImplSourceGeneratorData,
+ ImplSourceUserDefinedData,
};
use super::{Normalized, NormalizedTy, ProjectionCacheEntry, ProjectionCacheKey};
@@ -27,7 +27,9 @@ use rustc_errors::ErrorGuaranteed;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
+use rustc_infer::infer::at::At;
use rustc_infer::infer::resolve::OpportunisticRegionResolver;
+use rustc_infer::traits::ImplSourceBuiltinData;
use rustc_middle::traits::select::OverflowError;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
use rustc_middle::ty::visit::{MaxUniverse, TypeVisitable};
@@ -47,6 +49,23 @@ pub type ProjectionTyObligation<'tcx> = Obligation<'tcx, ty::ProjectionTy<'tcx>>
pub(super) struct InProgress;
+pub trait NormalizeExt<'tcx> {
+ /// Normalize a value using the `AssocTypeNormalizer`.
+ ///
+ /// This normalization should be used when the type contains inference variables or the
+ /// projection may be fallible.
+ fn normalize<T: TypeFoldable<'tcx>>(&self, t: T) -> InferOk<'tcx, T>;
+}
+
+impl<'tcx> NormalizeExt<'tcx> for At<'_, 'tcx> {
+ fn normalize<T: TypeFoldable<'tcx>>(&self, value: T) -> InferOk<'tcx, T> {
+ let mut selcx = SelectionContext::new(self.infcx);
+ let Normalized { value, obligations } =
+ normalize_with_depth(&mut selcx, self.param_env, self.cause.clone(), 0, value);
+ InferOk { value, obligations }
+ }
+}
+
/// When attempting to resolve `<T as TraitRef>::Name` ...
#[derive(Debug)]
pub enum ProjectionError<'tcx> {
@@ -193,14 +212,14 @@ pub(super) fn poly_project_and_unify_type<'cx, 'tcx>(
selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &PolyProjectionObligation<'tcx>,
) -> ProjectAndUnifyResult<'tcx> {
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
let r = infcx.commit_if_ok(|_snapshot| {
let old_universe = infcx.universe();
let placeholder_predicate =
infcx.replace_bound_vars_with_placeholders(obligation.predicate);
let new_universe = infcx.universe();
- let placeholder_obligation = obligation.with(placeholder_predicate);
+ let placeholder_obligation = obligation.with(infcx.tcx, placeholder_predicate);
match project_and_unify_type(selcx, &placeholder_obligation) {
ProjectAndUnifyResult::MismatchedProjectionTypes(e) => Err(e),
ProjectAndUnifyResult::Holds(obligations)
@@ -249,7 +268,7 @@ fn project_and_unify_type<'cx, 'tcx>(
) -> ProjectAndUnifyResult<'tcx> {
let mut obligations = vec![];
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
let normalized = match opt_normalize_projection_type(
selcx,
obligation.param_env,
@@ -268,7 +287,7 @@ fn project_and_unify_type<'cx, 'tcx>(
// This allows users to omit re-mentioning all bounds on an associated type and just use an
// `impl Trait` for the assoc type to add more bounds.
let InferOk { value: actual, obligations: new } =
- selcx.infcx().replace_opaque_types_with_inference_vars(
+ selcx.infcx.replace_opaque_types_with_inference_vars(
actual,
obligation.cause.body_id,
obligation.cause.span,
@@ -288,39 +307,8 @@ fn project_and_unify_type<'cx, 'tcx>(
}
}
-/// Normalizes any associated type projections in `value`, replacing
-/// them with a fully resolved type where possible. The return value
-/// combines the normalized result and any additional obligations that
-/// were incurred as result.
-pub fn normalize<'a, 'b, 'tcx, T>(
- selcx: &'a mut SelectionContext<'b, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- cause: ObligationCause<'tcx>,
- value: T,
-) -> Normalized<'tcx, T>
-where
- T: TypeFoldable<'tcx>,
-{
- let mut obligations = Vec::new();
- let value = normalize_to(selcx, param_env, cause, value, &mut obligations);
- Normalized { value, obligations }
-}
-
-pub fn normalize_to<'a, 'b, 'tcx, T>(
- selcx: &'a mut SelectionContext<'b, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- cause: ObligationCause<'tcx>,
- value: T,
- obligations: &mut Vec<PredicateObligation<'tcx>>,
-) -> T
-where
- T: TypeFoldable<'tcx>,
-{
- normalize_with_depth_to(selcx, param_env, cause, 0, value, obligations)
-}
-
/// As `normalize`, but with a custom depth.
-pub fn normalize_with_depth<'a, 'b, 'tcx, T>(
+pub(crate) fn normalize_with_depth<'a, 'b, 'tcx, T>(
selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
@@ -336,7 +324,7 @@ where
}
#[instrument(level = "info", skip(selcx, param_env, cause, obligations))]
-pub fn normalize_with_depth_to<'a, 'b, 'tcx, T>(
+pub(crate) fn normalize_with_depth_to<'a, 'b, 'tcx, T>(
selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
@@ -356,7 +344,7 @@ where
}
#[instrument(level = "info", skip(selcx, param_env, cause, obligations))]
-pub fn try_normalize_with_depth_to<'a, 'b, 'tcx, T>(
+pub(crate) fn try_normalize_with_depth_to<'a, 'b, 'tcx, T>(
selcx: &'a mut SelectionContext<'b, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
@@ -444,7 +432,7 @@ impl<'a, 'b, 'tcx> AssocTypeNormalizer<'a, 'b, 'tcx> {
}
fn fold<T: TypeFoldable<'tcx>>(&mut self, value: T) -> T {
- let value = self.selcx.infcx().resolve_vars_if_possible(value);
+ let value = self.selcx.infcx.resolve_vars_if_possible(value);
debug!(?value);
assert!(
@@ -516,13 +504,12 @@ impl<'a, 'b, 'tcx> TypeFolder<'tcx> for AssocTypeNormalizer<'a, 'b, 'tcx> {
Reveal::All => {
let recursion_limit = self.tcx().recursion_limit();
if !recursion_limit.value_within_limit(self.depth) {
- let obligation = Obligation::with_depth(
- self.cause.clone(),
- recursion_limit.0,
- self.param_env,
- ty,
+ self.selcx.infcx.err_ctxt().report_overflow_error(
+ &ty,
+ self.cause.span,
+ true,
+ |_| {},
);
- self.selcx.infcx().err_ctxt().report_overflow_error(&obligation, true);
}
let substs = substs.fold_with(self);
@@ -588,7 +575,7 @@ impl<'a, 'b, 'tcx> TypeFolder<'tcx> for AssocTypeNormalizer<'a, 'b, 'tcx> {
// want to figure out how to register obligations with escaping vars
// or handle this some other way.
- let infcx = self.selcx.infcx();
+ let infcx = self.selcx.infcx;
let (data, mapped_regions, mapped_types, mapped_consts) =
BoundVarReplacer::replace_bound_vars(infcx, &mut self.universes, data);
let data = data.fold_with(self);
@@ -632,13 +619,13 @@ impl<'a, 'b, 'tcx> TypeFolder<'tcx> for AssocTypeNormalizer<'a, 'b, 'tcx> {
#[instrument(skip(self), level = "debug")]
fn fold_const(&mut self, constant: ty::Const<'tcx>) -> ty::Const<'tcx> {
let tcx = self.selcx.tcx();
- if tcx.lazy_normalization() {
+ if tcx.lazy_normalization() || !needs_normalization(&constant, self.param_env.reveal()) {
constant
} else {
let constant = constant.super_fold_with(self);
debug!(?constant, ?self.param_env);
with_replaced_escaping_bound_vars(
- self.selcx.infcx(),
+ self.selcx.infcx,
&mut self.universes,
constant,
|constant| constant.eval(tcx, self.param_env),
@@ -816,9 +803,7 @@ impl<'tcx> TypeFolder<'tcx> for BoundVarReplacer<'_, 'tcx> {
let universe = self.universe_for(debruijn);
let p = ty::PlaceholderConst { universe, name: bound_const };
self.mapped_consts.insert(p, bound_const);
- self.infcx
- .tcx
- .mk_const(ty::ConstS { kind: ty::ConstKind::Placeholder(p), ty: ct.ty() })
+ self.infcx.tcx.mk_const(p, ct.ty())
}
_ => ct.super_fold_with(self),
}
@@ -829,7 +814,7 @@ impl<'tcx> TypeFolder<'tcx> for BoundVarReplacer<'_, 'tcx> {
}
}
-// The inverse of `BoundVarReplacer`: replaces placeholders with the bound vars from which they came.
+/// The inverse of [`BoundVarReplacer`]: replaces placeholders with the bound vars from which they came.
pub struct PlaceholderReplacer<'me, 'tcx> {
infcx: &'me InferCtxt<'tcx>,
mapped_regions: BTreeMap<ty::PlaceholderRegion, ty::BoundRegion>,
@@ -953,10 +938,7 @@ impl<'tcx> TypeFolder<'tcx> for PlaceholderReplacer<'_, 'tcx> {
let db = ty::DebruijnIndex::from_usize(
self.universe_indices.len() - index + self.current_index.as_usize() - 1,
);
- self.tcx().mk_const(ty::ConstS {
- kind: ty::ConstKind::Bound(db, *replace_var),
- ty: ct.ty(),
- })
+ self.tcx().mk_const(ty::ConstKind::Bound(db, *replace_var), ct.ty())
}
None => ct,
}
@@ -995,10 +977,7 @@ pub fn normalize_projection_type<'a, 'b, 'tcx>(
// and a deferred predicate to resolve this when more type
// information is available.
- selcx
- .infcx()
- .infer_projection(param_env, projection_ty, cause, depth + 1, obligations)
- .into()
+ selcx.infcx.infer_projection(param_env, projection_ty, cause, depth + 1, obligations).into()
})
}
@@ -1021,7 +1000,7 @@ fn opt_normalize_projection_type<'a, 'b, 'tcx>(
depth: usize,
obligations: &mut Vec<PredicateObligation<'tcx>>,
) -> Result<Option<Term<'tcx>>, InProgress> {
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
// Don't use the projection cache in intercrate mode -
// the `infcx` may be re-used between intercrate in non-intercrate
// mode, which could lead to using incorrect cache results.
@@ -1100,7 +1079,8 @@ fn opt_normalize_projection_type<'a, 'b, 'tcx>(
}
}
- let obligation = Obligation::with_depth(cause.clone(), depth, param_env, projection_ty);
+ let obligation =
+ Obligation::with_depth(selcx.tcx(), cause.clone(), depth, param_env, projection_ty);
match project(selcx, &obligation) {
Ok(Projected::Progress(Progress {
@@ -1112,7 +1092,7 @@ fn opt_normalize_projection_type<'a, 'b, 'tcx>(
// an impl, where-clause etc) and hence we must
// re-normalize it
- let projected_term = selcx.infcx().resolve_vars_if_possible(projected_term);
+ let projected_term = selcx.infcx.resolve_vars_if_possible(projected_term);
let mut result = if projected_term.has_projections() {
let mut normalizer = AssocTypeNormalizer::new(
@@ -1208,9 +1188,9 @@ fn normalize_to_error<'a, 'tcx>(
param_env,
predicate: trait_ref.without_const().to_predicate(selcx.tcx()),
};
- let tcx = selcx.infcx().tcx;
+ let tcx = selcx.infcx.tcx;
let def_id = projection_ty.item_def_id;
- let new_value = selcx.infcx().next_ty_var(TypeVariableOrigin {
+ let new_value = selcx.infcx.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::NormalizeProjectionType,
span: tcx.def_span(def_id),
});
@@ -1330,11 +1310,10 @@ fn assemble_candidate_for_impl_trait_in_trait<'cx, 'tcx>(
obligation.predicate.substs.truncate_to(tcx, tcx.generics_of(trait_def_id));
// FIXME(named-returns): Binders
let trait_predicate =
- ty::Binder::dummy(ty::TraitRef { def_id: trait_def_id, substs: trait_substs })
- .to_poly_trait_predicate();
+ ty::Binder::dummy(ty::TraitRef { def_id: trait_def_id, substs: trait_substs });
- let _ =
- selcx.infcx().commit_if_ok(|_| match selcx.select(&obligation.with(trait_predicate)) {
+ let _ = selcx.infcx.commit_if_ok(|_| {
+ match selcx.select(&obligation.with(tcx, trait_predicate)) {
Ok(Some(super::ImplSource::UserDefined(data))) => {
candidate_set.push_candidate(ProjectionCandidate::ImplTraitInTrait(
ImplTraitInTraitCandidate::Impl(data),
@@ -1354,7 +1333,8 @@ fn assemble_candidate_for_impl_trait_in_trait<'cx, 'tcx>(
candidate_set.mark_error(e);
return Err(());
}
- });
+ }
+ });
}
}
@@ -1437,7 +1417,7 @@ fn assemble_candidates_from_object_ty<'cx, 'tcx>(
let tcx = selcx.tcx();
let self_ty = obligation.predicate.self_ty();
- let object_ty = selcx.infcx().shallow_resolve(self_ty);
+ let object_ty = selcx.infcx.shallow_resolve(self_ty);
let data = match object_ty.kind() {
ty::Dynamic(data, ..) => data,
ty::Infer(ty::TyVar(_)) => {
@@ -1475,10 +1455,12 @@ fn assemble_candidates_from_predicates<'cx, 'tcx>(
env_predicates: impl Iterator<Item = ty::Predicate<'tcx>>,
potentially_unnormalized_candidates: bool,
) {
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
for predicate in env_predicates {
let bound_predicate = predicate.kind();
- if let ty::PredicateKind::Projection(data) = predicate.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(data)) =
+ predicate.kind().skip_binder()
+ {
let data = bound_predicate.rebind(data);
if data.projection_def_id() != obligation.predicate.item_def_id {
continue;
@@ -1528,8 +1510,8 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
// If we are resolving `<T as TraitRef<...>>::Item == Type`,
// start out by selecting the predicate `T as TraitRef<...>`:
let poly_trait_ref = ty::Binder::dummy(obligation.predicate.trait_ref(selcx.tcx()));
- let trait_obligation = obligation.with(poly_trait_ref.to_poly_trait_predicate());
- let _ = selcx.infcx().commit_if_ok(|_| {
+ let trait_obligation = obligation.with(selcx.tcx(), poly_trait_ref);
+ let _ = selcx.infcx.commit_if_ok(|_| {
let impl_source = match selcx.select(&trait_obligation) {
Ok(Some(impl_source)) => impl_source,
Ok(None) => {
@@ -1546,6 +1528,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
let eligible = match &impl_source {
super::ImplSource::Closure(_)
| super::ImplSource::Generator(_)
+ | super::ImplSource::Future(_)
| super::ImplSource::FnPointer(_)
| super::ImplSource::TraitAlias(_) => true,
super::ImplSource::UserDefined(impl_data) => {
@@ -1586,7 +1569,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
if obligation.param_env.reveal() == Reveal::All {
// NOTE(eddyb) inference variables can resolve to parameters, so
// assume `poly_trait_ref` isn't monomorphic, if it contains any.
- let poly_trait_ref = selcx.infcx().resolve_vars_if_possible(poly_trait_ref);
+ let poly_trait_ref = selcx.infcx.resolve_vars_if_possible(poly_trait_ref);
!poly_trait_ref.still_further_specializable()
} else {
debug!(
@@ -1598,128 +1581,126 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
}
}
}
- super::ImplSource::DiscriminantKind(..) => {
- // While `DiscriminantKind` is automatically implemented for every type,
- // the concrete discriminant may not be known yet.
- //
- // Any type with multiple potential discriminant types is therefore not eligible.
- let self_ty = selcx.infcx().shallow_resolve(obligation.predicate.self_ty());
-
- match self_ty.kind() {
- ty::Bool
- | ty::Char
- | ty::Int(_)
- | ty::Uint(_)
- | ty::Float(_)
- | ty::Adt(..)
- | ty::Foreign(_)
- | ty::Str
- | ty::Array(..)
- | ty::Slice(_)
- | ty::RawPtr(..)
- | ty::Ref(..)
- | ty::FnDef(..)
- | ty::FnPtr(..)
- | ty::Dynamic(..)
- | ty::Closure(..)
- | ty::Generator(..)
- | ty::GeneratorWitness(..)
- | ty::Never
- | ty::Tuple(..)
- // Integers and floats always have `u8` as their discriminant.
- | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true,
-
- ty::Projection(..)
- | ty::Opaque(..)
- | ty::Param(..)
- | ty::Bound(..)
- | ty::Placeholder(..)
- | ty::Infer(..)
- | ty::Error(_) => false,
- }
- }
- super::ImplSource::Pointee(..) => {
- // While `Pointee` is automatically implemented for every type,
- // the concrete metadata type may not be known yet.
- //
- // Any type with multiple potential metadata types is therefore not eligible.
- let self_ty = selcx.infcx().shallow_resolve(obligation.predicate.self_ty());
-
- let tail = selcx.tcx().struct_tail_with_normalize(
- self_ty,
- |ty| {
- // We throw away any obligations we get from this, since we normalize
- // and confirm these obligations once again during confirmation
- normalize_with_depth(
- selcx,
- obligation.param_env,
- obligation.cause.clone(),
- obligation.recursion_depth + 1,
- ty,
- )
- .value
- },
- || {},
- );
-
- match tail.kind() {
- ty::Bool
- | ty::Char
- | ty::Int(_)
- | ty::Uint(_)
- | ty::Float(_)
- | ty::Str
- | ty::Array(..)
- | ty::Slice(_)
- | ty::RawPtr(..)
- | ty::Ref(..)
- | ty::FnDef(..)
- | ty::FnPtr(..)
- | ty::Dynamic(..)
- | ty::Closure(..)
- | ty::Generator(..)
- | ty::GeneratorWitness(..)
- | ty::Never
- // Extern types have unit metadata, according to RFC 2850
- | ty::Foreign(_)
- // If returned by `struct_tail_without_normalization` this is a unit struct
- // without any fields, or not a struct, and therefore is Sized.
- | ty::Adt(..)
- // If returned by `struct_tail_without_normalization` this is the empty tuple.
- | ty::Tuple(..)
- // Integers and floats are always Sized, and so have unit type metadata.
- | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true,
-
- // type parameters, opaques, and unnormalized projections have pointer
- // metadata if they're known (e.g. by the param_env) to be sized
- ty::Param(_) | ty::Projection(..) | ty::Opaque(..)
- if selcx.infcx().predicate_must_hold_modulo_regions(
- &obligation.with(
- ty::Binder::dummy(ty::TraitRef::new(
- selcx.tcx().require_lang_item(LangItem::Sized, None),
- selcx.tcx().mk_substs_trait(self_ty, &[]),
- ))
- .without_const()
- .to_predicate(selcx.tcx()),
- ),
- ) =>
- {
- true
+ super::ImplSource::Builtin(..) => {
+ // While a builtin impl may be known to exist, the associated type may not yet
+ // be known. Any type with multiple potential associated types is therefore
+ // not eligible.
+ let self_ty = selcx.infcx.shallow_resolve(obligation.predicate.self_ty());
+
+ let lang_items = selcx.tcx().lang_items();
+ if lang_items.discriminant_kind_trait() == Some(poly_trait_ref.def_id()) {
+ match self_ty.kind() {
+ ty::Bool
+ | ty::Char
+ | ty::Int(_)
+ | ty::Uint(_)
+ | ty::Float(_)
+ | ty::Adt(..)
+ | ty::Foreign(_)
+ | ty::Str
+ | ty::Array(..)
+ | ty::Slice(_)
+ | ty::RawPtr(..)
+ | ty::Ref(..)
+ | ty::FnDef(..)
+ | ty::FnPtr(..)
+ | ty::Dynamic(..)
+ | ty::Closure(..)
+ | ty::Generator(..)
+ | ty::GeneratorWitness(..)
+ | ty::Never
+ | ty::Tuple(..)
+ // Integers and floats always have `u8` as their discriminant.
+ | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true,
+
+ // type parameters, opaques, and unnormalized projections have pointer
+ // metadata if they're known (e.g. by the param_env) to be sized
+ ty::Param(_)
+ | ty::Projection(..)
+ | ty::Opaque(..)
+ | ty::Bound(..)
+ | ty::Placeholder(..)
+ | ty::Infer(..)
+ | ty::Error(_) => false,
}
+ } else if lang_items.pointee_trait() == Some(poly_trait_ref.def_id()) {
+ let tail = selcx.tcx().struct_tail_with_normalize(
+ self_ty,
+ |ty| {
+ // We throw away any obligations we get from this, since we normalize
+ // and confirm these obligations once again during confirmation
+ normalize_with_depth(
+ selcx,
+ obligation.param_env,
+ obligation.cause.clone(),
+ obligation.recursion_depth + 1,
+ ty,
+ )
+ .value
+ },
+ || {},
+ );
- // FIXME(compiler-errors): are Bound and Placeholder types ever known sized?
- ty::Param(_)
- | ty::Projection(..)
- | ty::Opaque(..)
- | ty::Bound(..)
- | ty::Placeholder(..)
- | ty::Infer(..)
- | ty::Error(_) => {
- if tail.has_infer_types() {
- candidate_set.mark_ambiguous();
+ match tail.kind() {
+ ty::Bool
+ | ty::Char
+ | ty::Int(_)
+ | ty::Uint(_)
+ | ty::Float(_)
+ | ty::Str
+ | ty::Array(..)
+ | ty::Slice(_)
+ | ty::RawPtr(..)
+ | ty::Ref(..)
+ | ty::FnDef(..)
+ | ty::FnPtr(..)
+ | ty::Dynamic(..)
+ | ty::Closure(..)
+ | ty::Generator(..)
+ | ty::GeneratorWitness(..)
+ | ty::Never
+ // Extern types have unit metadata, according to RFC 2850
+ | ty::Foreign(_)
+ // If returned by `struct_tail_without_normalization` this is a unit struct
+ // without any fields, or not a struct, and therefore is Sized.
+ | ty::Adt(..)
+ // If returned by `struct_tail_without_normalization` this is the empty tuple.
+ | ty::Tuple(..)
+ // Integers and floats are always Sized, and so have unit type metadata.
+ | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true,
+
+ // type parameters, opaques, and unnormalized projections have pointer
+ // metadata if they're known (e.g. by the param_env) to be sized
+ ty::Param(_) | ty::Projection(..) | ty::Opaque(..)
+ if selcx.infcx.predicate_must_hold_modulo_regions(
+ &obligation.with(
+ selcx.tcx(),
+ ty::Binder::dummy(
+ selcx.tcx().at(obligation.cause.span()).mk_trait_ref(LangItem::Sized, [self_ty]),
+ )
+ .without_const(),
+ ),
+ ) =>
+ {
+ true
+ }
+
+ // FIXME(compiler-errors): are Bound and Placeholder types ever known sized?
+ ty::Param(_)
+ | ty::Projection(..)
+ | ty::Opaque(..)
+ | ty::Bound(..)
+ | ty::Placeholder(..)
+ | ty::Infer(..)
+ | ty::Error(_) => {
+ if tail.has_infer_types() {
+ candidate_set.mark_ambiguous();
+ }
+ false
}
- false
}
+ } else {
+ bug!("unexpected builtin trait with associated type: {poly_trait_ref:?}")
}
}
super::ImplSource::Param(..) => {
@@ -1757,7 +1738,6 @@ fn assemble_candidates_from_impls<'cx, 'tcx>(
false
}
super::ImplSource::AutoImpl(..)
- | super::ImplSource::Builtin(..)
| super::ImplSource::TraitUpcasting(_)
| super::ImplSource::ConstDestruct(_) => {
// These traits have no associated types.
@@ -1820,8 +1800,7 @@ fn confirm_candidate<'cx, 'tcx>(
// when possible for this to work. See `auto-trait-projection-recursion.rs`
// for a case where this matters.
if progress.term.has_infer_regions() {
- progress.term =
- progress.term.fold_with(&mut OpportunisticRegionResolver::new(selcx.infcx()));
+ progress.term = progress.term.fold_with(&mut OpportunisticRegionResolver::new(selcx.infcx));
}
progress
}
@@ -1834,16 +1813,13 @@ fn confirm_select_candidate<'cx, 'tcx>(
match impl_source {
super::ImplSource::UserDefined(data) => confirm_impl_candidate(selcx, obligation, data),
super::ImplSource::Generator(data) => confirm_generator_candidate(selcx, obligation, data),
+ super::ImplSource::Future(data) => confirm_future_candidate(selcx, obligation, data),
super::ImplSource::Closure(data) => confirm_closure_candidate(selcx, obligation, data),
super::ImplSource::FnPointer(data) => confirm_fn_pointer_candidate(selcx, obligation, data),
- super::ImplSource::DiscriminantKind(data) => {
- confirm_discriminant_kind_candidate(selcx, obligation, data)
- }
- super::ImplSource::Pointee(data) => confirm_pointee_candidate(selcx, obligation, data),
+ super::ImplSource::Builtin(data) => confirm_builtin_candidate(selcx, obligation, data),
super::ImplSource::Object(_)
| super::ImplSource::AutoImpl(..)
| super::ImplSource::Param(..)
- | super::ImplSource::Builtin(..)
| super::ImplSource::TraitUpcasting(_)
| super::ImplSource::TraitAlias(..)
| super::ImplSource::ConstDestruct(_) => {
@@ -1907,74 +1883,97 @@ fn confirm_generator_candidate<'cx, 'tcx>(
.with_addl_obligations(obligations)
}
-fn confirm_discriminant_kind_candidate<'cx, 'tcx>(
+fn confirm_future_candidate<'cx, 'tcx>(
selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
- _: ImplSourceDiscriminantKindData,
+ impl_source: ImplSourceFutureData<'tcx, PredicateObligation<'tcx>>,
) -> Progress<'tcx> {
- let tcx = selcx.tcx();
+ let gen_sig = impl_source.substs.as_generator().poly_sig();
+ let Normalized { value: gen_sig, obligations } = normalize_with_depth(
+ selcx,
+ obligation.param_env,
+ obligation.cause.clone(),
+ obligation.recursion_depth + 1,
+ gen_sig,
+ );
- let self_ty = selcx.infcx().shallow_resolve(obligation.predicate.self_ty());
- // We get here from `poly_project_and_unify_type` which replaces bound vars
- // with placeholders
- debug_assert!(!self_ty.has_escaping_bound_vars());
- let substs = tcx.mk_substs([self_ty.into()].iter());
+ debug!(?obligation, ?gen_sig, ?obligations, "confirm_future_candidate");
- let discriminant_def_id = tcx.require_lang_item(LangItem::Discriminant, None);
+ let tcx = selcx.tcx();
+ let fut_def_id = tcx.require_lang_item(LangItem::Future, None);
- let predicate = ty::ProjectionPredicate {
- projection_ty: ty::ProjectionTy { substs, item_def_id: discriminant_def_id },
- term: self_ty.discriminant_ty(tcx).into(),
- };
+ let predicate = super::util::future_trait_ref_and_outputs(
+ tcx,
+ fut_def_id,
+ obligation.predicate.self_ty(),
+ gen_sig,
+ )
+ .map_bound(|(trait_ref, return_ty)| {
+ debug_assert_eq!(tcx.associated_item(obligation.predicate.item_def_id).name, sym::Output);
- // We get here from `poly_project_and_unify_type` which replaces bound vars
- // with placeholders, so dummy is okay here.
- confirm_param_env_candidate(selcx, obligation, ty::Binder::dummy(predicate), false)
+ ty::ProjectionPredicate {
+ projection_ty: ty::ProjectionTy {
+ substs: trait_ref.substs,
+ item_def_id: obligation.predicate.item_def_id,
+ },
+ term: return_ty.into(),
+ }
+ });
+
+ confirm_param_env_candidate(selcx, obligation, predicate, false)
+ .with_addl_obligations(impl_source.nested)
+ .with_addl_obligations(obligations)
}
-fn confirm_pointee_candidate<'cx, 'tcx>(
+fn confirm_builtin_candidate<'cx, 'tcx>(
selcx: &mut SelectionContext<'cx, 'tcx>,
obligation: &ProjectionTyObligation<'tcx>,
- _: ImplSourcePointeeData,
+ data: ImplSourceBuiltinData<PredicateObligation<'tcx>>,
) -> Progress<'tcx> {
let tcx = selcx.tcx();
- let self_ty = selcx.infcx().shallow_resolve(obligation.predicate.self_ty());
-
- let mut obligations = vec![];
- let (metadata_ty, check_is_sized) = self_ty.ptr_metadata_ty(tcx, |ty| {
- normalize_with_depth_to(
- selcx,
- obligation.param_env,
- obligation.cause.clone(),
- obligation.recursion_depth + 1,
- ty,
- &mut obligations,
- )
- });
- if check_is_sized {
- let sized_predicate = ty::Binder::dummy(ty::TraitRef::new(
- tcx.require_lang_item(LangItem::Sized, None),
- tcx.mk_substs_trait(self_ty, &[]),
- ))
- .without_const()
- .to_predicate(tcx);
- obligations.push(Obligation::new(
- obligation.cause.clone(),
- obligation.param_env,
- sized_predicate,
- ));
- }
-
+ let self_ty = obligation.predicate.self_ty();
let substs = tcx.mk_substs([self_ty.into()].iter());
- let metadata_def_id = tcx.require_lang_item(LangItem::Metadata, None);
-
- let predicate = ty::ProjectionPredicate {
- projection_ty: ty::ProjectionTy { substs, item_def_id: metadata_def_id },
- term: metadata_ty.into(),
+ let lang_items = tcx.lang_items();
+ let item_def_id = obligation.predicate.item_def_id;
+ let trait_def_id = tcx.trait_of_item(item_def_id).unwrap();
+ let (term, obligations) = if lang_items.discriminant_kind_trait() == Some(trait_def_id) {
+ let discriminant_def_id = tcx.require_lang_item(LangItem::Discriminant, None);
+ assert_eq!(discriminant_def_id, item_def_id);
+
+ (self_ty.discriminant_ty(tcx).into(), Vec::new())
+ } else if lang_items.pointee_trait() == Some(trait_def_id) {
+ let metadata_def_id = tcx.require_lang_item(LangItem::Metadata, None);
+ assert_eq!(metadata_def_id, item_def_id);
+
+ let mut obligations = Vec::new();
+ let (metadata_ty, check_is_sized) = self_ty.ptr_metadata_ty(tcx, |ty| {
+ normalize_with_depth_to(
+ selcx,
+ obligation.param_env,
+ obligation.cause.clone(),
+ obligation.recursion_depth + 1,
+ ty,
+ &mut obligations,
+ )
+ });
+ if check_is_sized {
+ let sized_predicate = ty::Binder::dummy(
+ tcx.at(obligation.cause.span()).mk_trait_ref(LangItem::Sized, [self_ty]),
+ )
+ .without_const();
+ obligations.push(obligation.with(tcx, sized_predicate));
+ }
+ (metadata_ty.into(), obligations)
+ } else {
+ bug!("unexpected builtin trait with associated type: {:?}", obligation.predicate);
};
+ let predicate =
+ ty::ProjectionPredicate { projection_ty: ty::ProjectionTy { substs, item_def_id }, term };
+
confirm_param_env_candidate(selcx, obligation, ty::Binder::dummy(predicate), false)
.with_addl_obligations(obligations)
+ .with_addl_obligations(data.nested)
}
fn confirm_fn_pointer_candidate<'cx, 'tcx>(
@@ -1982,7 +1981,7 @@ fn confirm_fn_pointer_candidate<'cx, 'tcx>(
obligation: &ProjectionTyObligation<'tcx>,
fn_pointer_impl_source: ImplSourceFnPointerData<'tcx, PredicateObligation<'tcx>>,
) -> Progress<'tcx> {
- let fn_type = selcx.infcx().shallow_resolve(fn_pointer_impl_source.fn_ty);
+ let fn_type = selcx.infcx.shallow_resolve(fn_pointer_impl_source.fn_ty);
let sig = fn_type.fn_sig(selcx.tcx());
let Normalized { value: sig, obligations } = normalize_with_depth(
selcx,
@@ -2055,7 +2054,7 @@ fn confirm_param_env_candidate<'cx, 'tcx>(
poly_cache_entry: ty::PolyProjectionPredicate<'tcx>,
potentially_unnormalized_candidate: bool,
) -> Progress<'tcx> {
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
let cause = &obligation.cause;
let param_env = obligation.param_env;
@@ -2150,7 +2149,7 @@ fn confirm_impl_candidate<'cx, 'tcx>(
// * `substs` ends up as `[u32, S]`
let substs = obligation.predicate.substs.rebase_onto(tcx, trait_def_id, substs);
let substs =
- translate_substs(selcx.infcx(), param_env, impl_def_id, substs, assoc_ty.defining_node);
+ translate_substs(selcx.infcx, param_env, impl_def_id, substs, assoc_ty.defining_node);
let ty = tcx.bound_type_of(assoc_ty.item.def_id);
let is_const = matches!(tcx.def_kind(assoc_ty.item.def_id), DefKind::AssocConst);
let term: ty::EarlyBinder<ty::Term<'tcx>> = if is_const {
@@ -2158,7 +2157,7 @@ fn confirm_impl_candidate<'cx, 'tcx>(
crate::traits::InternalSubsts::identity_for_item(tcx, assoc_ty.item.def_id);
let did = ty::WithOptConstParam::unknown(assoc_ty.item.def_id);
let kind = ty::ConstKind::Unevaluated(ty::UnevaluatedConst::new(did, identity_substs));
- ty.map_bound(|ty| tcx.mk_const(ty::ConstS { ty, kind }).into())
+ ty.map_bound(|ty| tcx.mk_const(kind, ty).into())
} else {
ty.map_bound(|ty| ty.into())
};
@@ -2177,7 +2176,7 @@ fn confirm_impl_candidate<'cx, 'tcx>(
// Verify that the trait item and its implementation have compatible substs lists
fn check_substs_compatible<'tcx>(
tcx: TyCtxt<'tcx>,
- assoc_ty: &ty::AssocItem,
+ assoc_item: &ty::AssocItem,
substs: ty::SubstsRef<'tcx>,
) -> bool {
fn check_substs_compatible_inner<'tcx>(
@@ -2209,7 +2208,10 @@ fn check_substs_compatible<'tcx>(
true
}
- check_substs_compatible_inner(tcx, tcx.generics_of(assoc_ty.def_id), substs.as_slice())
+ let generics = tcx.generics_of(assoc_item.def_id);
+ // Chop off any additional substs (RPITIT) substs
+ let substs = &substs[0..generics.count().min(substs.len())];
+ check_substs_compatible_inner(tcx, generics, substs)
}
fn confirm_impl_trait_in_trait_candidate<'tcx>(
@@ -2238,11 +2240,27 @@ fn confirm_impl_trait_in_trait_candidate<'tcx>(
};
}
- let impl_fn_def_id = leaf_def.item.def_id;
// Rebase from {trait}::{fn}::{opaque} to {impl}::{fn}::{opaque},
// since `data.substs` are the impl substs.
let impl_fn_substs =
obligation.predicate.substs.rebase_onto(tcx, tcx.parent(trait_fn_def_id), data.substs);
+ let impl_fn_substs = translate_substs(
+ selcx.infcx,
+ obligation.param_env,
+ data.impl_def_id,
+ impl_fn_substs,
+ leaf_def.defining_node,
+ );
+
+ if !check_substs_compatible(tcx, &leaf_def.item, impl_fn_substs) {
+ let err = tcx.ty_error_with_message(
+ obligation.cause.span,
+ "impl method and trait method have different parameters",
+ );
+ return Progress { term: err.into(), obligations };
+ }
+
+ let impl_fn_def_id = leaf_def.item.def_id;
let cause = ObligationCause::new(
obligation.cause.span,
@@ -2260,6 +2278,7 @@ fn confirm_impl_trait_in_trait_candidate<'tcx>(
obligations.extend(std::iter::zip(predicates.predicates, predicates.spans).map(
|(pred, span)| {
Obligation::with_depth(
+ tcx,
ObligationCause::new(
obligation.cause.span,
obligation.cause.body_id,
@@ -2276,10 +2295,11 @@ fn confirm_impl_trait_in_trait_candidate<'tcx>(
},
));
- let ty = super::normalize_to(
+ let ty = normalize_with_depth_to(
selcx,
obligation.param_env,
cause.clone(),
+ obligation.recursion_depth + 1,
tcx.bound_trait_impl_trait_tys(impl_fn_def_id)
.map_bound(|tys| {
tys.map_or_else(|_| tcx.ty_error(), |tys| tys[&obligation.predicate.item_def_id])
@@ -2299,11 +2319,10 @@ fn assoc_ty_own_obligations<'cx, 'tcx>(
nested: &mut Vec<PredicateObligation<'tcx>>,
) {
let tcx = selcx.tcx();
- for predicate in tcx
+ let own = tcx
.predicates_of(obligation.predicate.item_def_id)
- .instantiate_own(tcx, obligation.predicate.substs)
- .predicates
- {
+ .instantiate_own(tcx, obligation.predicate.substs);
+ for (predicate, span) in std::iter::zip(own.predicates, own.spans) {
let normalized = normalize_with_depth_to(
selcx,
obligation.param_env,
@@ -2312,8 +2331,30 @@ fn assoc_ty_own_obligations<'cx, 'tcx>(
predicate,
nested,
);
+
+ let nested_cause = if matches!(
+ obligation.cause.code(),
+ super::CompareImplItemObligation { .. }
+ | super::CheckAssociatedTypeBounds { .. }
+ | super::AscribeUserTypeProvePredicate(..)
+ ) {
+ obligation.cause.clone()
+ } else if span.is_dummy() {
+ ObligationCause::new(
+ obligation.cause.span,
+ obligation.cause.body_id,
+ super::ItemObligation(obligation.predicate.item_def_id),
+ )
+ } else {
+ ObligationCause::new(
+ obligation.cause.span,
+ obligation.cause.body_id,
+ super::BindingObligation(obligation.predicate.item_def_id, span),
+ )
+ };
nested.push(Obligation::with_depth(
- obligation.cause.clone(),
+ tcx,
+ nested_cause,
obligation.recursion_depth + 1,
obligation.param_env,
normalized,
@@ -2385,7 +2426,7 @@ impl<'cx, 'tcx> ProjectionCacheKeyExt<'cx, 'tcx> for ProjectionCacheKey<'tcx> {
selcx: &mut SelectionContext<'cx, 'tcx>,
predicate: ty::PolyProjectionPredicate<'tcx>,
) -> Option<Self> {
- let infcx = selcx.infcx();
+ let infcx = selcx.infcx;
// We don't do cross-snapshot caching of obligations with escaping regions,
// so there's no cache key to use
predicate.no_bound_vars().map(|predicate| {
diff --git a/compiler/rustc_trait_selection/src/traits/query/evaluate_obligation.rs b/compiler/rustc_trait_selection/src/traits/query/evaluate_obligation.rs
index c84f128dd..09b58894d 100644
--- a/compiler/rustc_trait_selection/src/traits/query/evaluate_obligation.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/evaluate_obligation.rs
@@ -2,9 +2,7 @@ use rustc_middle::ty;
use crate::infer::canonical::OriginalQueryValues;
use crate::infer::InferCtxt;
-use crate::traits::{
- EvaluationResult, OverflowError, PredicateObligation, SelectionContext, TraitQueryMode,
-};
+use crate::traits::{EvaluationResult, OverflowError, PredicateObligation, SelectionContext};
pub trait InferCtxtExt<'tcx> {
fn predicate_may_hold(&self, obligation: &PredicateObligation<'tcx>) -> bool;
@@ -68,7 +66,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
let mut _orig_values = OriginalQueryValues::default();
let param_env = match obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => {
// we ignore the value set to it.
let mut _constness = pred.constness;
obligation
@@ -97,7 +95,7 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
match self.evaluate_obligation(obligation) {
Ok(result) => result,
Err(OverflowError::Canonical) => {
- let mut selcx = SelectionContext::with_query_mode(&self, TraitQueryMode::Standard);
+ let mut selcx = SelectionContext::new(&self);
selcx.evaluate_root_obligation(obligation).unwrap_or_else(|r| match r {
OverflowError::Canonical => {
span_bug!(
diff --git a/compiler/rustc_trait_selection/src/traits/query/normalize.rs b/compiler/rustc_trait_selection/src/traits/query/normalize.rs
index 58e4597b7..7ad532d8a 100644
--- a/compiler/rustc_trait_selection/src/traits/query/normalize.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/normalize.rs
@@ -7,7 +7,7 @@ use crate::infer::canonical::OriginalQueryValues;
use crate::infer::{InferCtxt, InferOk};
use crate::traits::error_reporting::TypeErrCtxtExt;
use crate::traits::project::{needs_normalization, BoundVarReplacer, PlaceholderReplacer};
-use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
+use crate::traits::{ObligationCause, PredicateObligation, Reveal};
use rustc_data_structures::sso::SsoHashMap;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_infer::traits::Normalized;
@@ -22,13 +22,20 @@ use super::NoSolution;
pub use rustc_middle::traits::query::NormalizationResult;
-pub trait AtExt<'tcx> {
- fn normalize<T>(&self, value: T) -> Result<Normalized<'tcx, T>, NoSolution>
+pub trait QueryNormalizeExt<'tcx> {
+ /// Normalize a value using the `QueryNormalizer`.
+ ///
+ /// This normalization should *only* be used when the projection does not
+ /// have possible ambiguity or may not be well-formed.
+ ///
+ /// After codegen, when lifetimes do not matter, it is preferable to instead
+ /// use [`TyCtxt::normalize_erasing_regions`], which wraps this procedure.
+ fn query_normalize<T>(&self, value: T) -> Result<Normalized<'tcx, T>, NoSolution>
where
T: TypeFoldable<'tcx>;
}
-impl<'cx, 'tcx> AtExt<'tcx> for At<'cx, 'tcx> {
+impl<'cx, 'tcx> QueryNormalizeExt<'tcx> for At<'cx, 'tcx> {
/// Normalize `value` in the context of the inference context,
/// yielding a resulting type, or an error if `value` cannot be
/// normalized. If you don't care about regions, you should prefer
@@ -42,7 +49,7 @@ impl<'cx, 'tcx> AtExt<'tcx> for At<'cx, 'tcx> {
/// normalizing, but for now should be used only when we actually
/// know that normalization will succeed, since error reporting
/// and other details are still "under development".
- fn normalize<T>(&self, value: T) -> Result<Normalized<'tcx, T>, NoSolution>
+ fn query_normalize<T>(&self, value: T) -> Result<Normalized<'tcx, T>, NoSolution>
where
T: TypeFoldable<'tcx>,
{
@@ -207,13 +214,12 @@ impl<'cx, 'tcx> FallibleTypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
let substs = substs.try_fold_with(self)?;
let recursion_limit = self.tcx().recursion_limit();
if !recursion_limit.value_within_limit(self.anon_depth) {
- let obligation = Obligation::with_depth(
- self.cause.clone(),
- recursion_limit.0,
- self.param_env,
- ty,
+ self.infcx.err_ctxt().report_overflow_error(
+ &ty,
+ self.cause.span,
+ true,
+ |_| {},
);
- self.infcx.err_ctxt().report_overflow_error(&obligation, true);
}
let generic_ty = self.tcx().bound_type_of(def_id);
@@ -353,6 +359,10 @@ impl<'cx, 'tcx> FallibleTypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
&mut self,
constant: ty::Const<'tcx>,
) -> Result<ty::Const<'tcx>, Self::Error> {
+ if !needs_normalization(&constant, self.param_env.reveal()) {
+ return Ok(constant);
+ }
+
let constant = constant.try_super_fold_with(self)?;
debug!(?constant, ?self.param_env);
Ok(crate::traits::project::with_replaced_escaping_bound_vars(
diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs
index 081308ac7..68434c2b6 100644
--- a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs
+++ b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs
@@ -15,7 +15,9 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> {
// `&T`, accounts for about 60% percentage of the predicates
// we have to prove. No need to canonicalize and all that for
// such cases.
- if let ty::PredicateKind::Trait(trait_ref) = key.value.predicate.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(trait_ref)) =
+ key.value.predicate.kind().skip_binder()
+ {
if let Some(sized_def_id) = tcx.lang_items().sized_trait() {
if trait_ref.def_id() == sized_def_id {
if trait_ref.self_ty().is_trivially_sized(tcx) {
@@ -33,7 +35,7 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> {
mut canonicalized: Canonicalized<'tcx, ParamEnvAnd<'tcx, Self>>,
) -> Fallible<CanonicalizedQueryResponse<'tcx, ()>> {
match canonicalized.value.value.predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => {
canonicalized.value.param_env.remap_constness_with(pred.constness);
}
_ => canonicalized.value.param_env = canonicalized.value.param_env.without_const(),
diff --git a/compiler/rustc_trait_selection/src/traits/relationships.rs b/compiler/rustc_trait_selection/src/traits/relationships.rs
index 8cf500a46..bfa318787 100644
--- a/compiler/rustc_trait_selection/src/traits/relationships.rs
+++ b/compiler/rustc_trait_selection/src/traits/relationships.rs
@@ -1,8 +1,8 @@
use crate::infer::InferCtxt;
use crate::traits::query::evaluate_obligation::InferCtxtExt;
-use crate::traits::{ObligationCause, PredicateObligation};
+use crate::traits::PredicateObligation;
use rustc_infer::traits::TraitEngine;
-use rustc_middle::ty::{self, ToPredicate};
+use rustc_middle::ty;
pub(crate) fn update<'tcx, T>(
engine: &mut T,
@@ -12,34 +12,22 @@ pub(crate) fn update<'tcx, T>(
T: TraitEngine<'tcx>,
{
// (*) binder skipped
- if let ty::PredicateKind::Trait(tpred) = obligation.predicate.kind().skip_binder()
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(tpred)) = obligation.predicate.kind().skip_binder()
&& let Some(ty) = infcx.shallow_resolve(tpred.self_ty()).ty_vid().map(|t| infcx.root_var(t))
&& infcx.tcx.lang_items().sized_trait().map_or(false, |st| st != tpred.trait_ref.def_id)
{
let new_self_ty = infcx.tcx.types.unit;
- let trait_ref = ty::TraitRef {
- substs: infcx.tcx.mk_substs_trait(new_self_ty, &tpred.trait_ref.substs[1..]),
- ..tpred.trait_ref
- };
-
// Then construct a new obligation with Self = () added
// to the ParamEnv, and see if it holds.
- let o = rustc_infer::traits::Obligation::new(
- ObligationCause::dummy(),
- obligation.param_env,
+ let o = obligation.with(infcx.tcx,
obligation
.predicate
.kind()
.rebind(
// (*) binder moved here
- ty::PredicateKind::Trait(ty::TraitPredicate {
- trait_ref,
- constness: tpred.constness,
- polarity: tpred.polarity,
- })
- )
- .to_predicate(infcx.tcx),
+ ty::PredicateKind::Clause(ty::Clause::Trait(tpred.with_self_type(infcx.tcx, new_self_ty)))
+ ),
);
// Don't report overflow errors. Otherwise equivalent to may_hold.
if let Ok(result) = infcx.probe(|_| infcx.evaluate_obligation(&o)) && result.may_apply() {
@@ -47,7 +35,9 @@ pub(crate) fn update<'tcx, T>(
}
}
- if let ty::PredicateKind::Projection(predicate) = obligation.predicate.kind().skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) =
+ obligation.predicate.kind().skip_binder()
+ {
// If the projection predicate (Foo::Bar == X) has X as a non-TyVid,
// we need to make it into one.
if let Some(vid) = predicate.term.ty().and_then(|ty| ty.ty_vid()) {
diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
index 4c5bc3339..e4b70f0d2 100644
--- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs
@@ -6,239 +6,21 @@
//!
//! [rustc dev guide]:https://rustc-dev-guide.rust-lang.org/traits/resolution.html#candidate-assembly
use hir::LangItem;
-use rustc_errors::DelayDm;
use rustc_hir as hir;
-use rustc_hir::def_id::DefId;
use rustc_infer::traits::ObligationCause;
use rustc_infer::traits::{Obligation, SelectionError, TraitObligation};
-use rustc_lint_defs::builtin::DEREF_INTO_DYN_SUPERTRAIT;
-use rustc_middle::ty::print::with_no_trimmed_paths;
-use rustc_middle::ty::{self, ToPredicate, Ty, TypeVisitable};
+use rustc_middle::ty::{self, Ty, TypeVisitable};
use rustc_target::spec::abi::Abi;
use crate::traits;
-use crate::traits::coherence::Conflict;
use crate::traits::query::evaluate_obligation::InferCtxtExt;
-use crate::traits::{util, SelectionResult};
-use crate::traits::{Ambiguous, ErrorReporting, Overflow, Unimplemented};
+use crate::traits::util;
use super::BuiltinImplConditions;
-use super::IntercrateAmbiguityCause;
-use super::OverflowError;
-use super::SelectionCandidate::{self, *};
-use super::{EvaluatedCandidate, SelectionCandidateSet, SelectionContext, TraitObligationStack};
+use super::SelectionCandidate::*;
+use super::{SelectionCandidateSet, SelectionContext, TraitObligationStack};
impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
- #[instrument(level = "debug", skip(self), ret)]
- pub(super) fn candidate_from_obligation<'o>(
- &mut self,
- stack: &TraitObligationStack<'o, 'tcx>,
- ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
- // Watch out for overflow. This intentionally bypasses (and does
- // not update) the cache.
- self.check_recursion_limit(&stack.obligation, &stack.obligation)?;
-
- // Check the cache. Note that we freshen the trait-ref
- // separately rather than using `stack.fresh_trait_ref` --
- // this is because we want the unbound variables to be
- // replaced with fresh types starting from index 0.
- let cache_fresh_trait_pred = self.infcx.freshen(stack.obligation.predicate);
- debug!(?cache_fresh_trait_pred);
- debug_assert!(!stack.obligation.predicate.has_escaping_bound_vars());
-
- if let Some(c) =
- self.check_candidate_cache(stack.obligation.param_env, cache_fresh_trait_pred)
- {
- debug!("CACHE HIT");
- return c;
- }
-
- // If no match, compute result and insert into cache.
- //
- // FIXME(nikomatsakis) -- this cache is not taking into
- // account cycles that may have occurred in forming the
- // candidate. I don't know of any specific problems that
- // result but it seems awfully suspicious.
- let (candidate, dep_node) =
- self.in_task(|this| this.candidate_from_obligation_no_cache(stack));
-
- debug!("CACHE MISS");
- self.insert_candidate_cache(
- stack.obligation.param_env,
- cache_fresh_trait_pred,
- dep_node,
- candidate.clone(),
- );
- candidate
- }
-
- fn candidate_from_obligation_no_cache<'o>(
- &mut self,
- stack: &TraitObligationStack<'o, 'tcx>,
- ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
- if let Err(conflict) = self.is_knowable(stack) {
- debug!("coherence stage: not knowable");
- if self.intercrate_ambiguity_causes.is_some() {
- debug!("evaluate_stack: intercrate_ambiguity_causes is some");
- // Heuristics: show the diagnostics when there are no candidates in crate.
- if let Ok(candidate_set) = self.assemble_candidates(stack) {
- let mut no_candidates_apply = true;
-
- for c in candidate_set.vec.iter() {
- if self.evaluate_candidate(stack, &c)?.may_apply() {
- no_candidates_apply = false;
- break;
- }
- }
-
- if !candidate_set.ambiguous && no_candidates_apply {
- let trait_ref = stack.obligation.predicate.skip_binder().trait_ref;
- let self_ty = trait_ref.self_ty();
- let (trait_desc, self_desc) = with_no_trimmed_paths!({
- let trait_desc = trait_ref.print_only_trait_path().to_string();
- let self_desc = if self_ty.has_concrete_skeleton() {
- Some(self_ty.to_string())
- } else {
- None
- };
- (trait_desc, self_desc)
- });
- let cause = if let Conflict::Upstream = conflict {
- IntercrateAmbiguityCause::UpstreamCrateUpdate { trait_desc, self_desc }
- } else {
- IntercrateAmbiguityCause::DownstreamCrate { trait_desc, self_desc }
- };
- debug!(?cause, "evaluate_stack: pushing cause");
- self.intercrate_ambiguity_causes.as_mut().unwrap().insert(cause);
- }
- }
- }
- return Ok(None);
- }
-
- let candidate_set = self.assemble_candidates(stack)?;
-
- if candidate_set.ambiguous {
- debug!("candidate set contains ambig");
- return Ok(None);
- }
-
- let candidates = candidate_set.vec;
-
- debug!(?stack, ?candidates, "assembled {} candidates", candidates.len());
-
- // At this point, we know that each of the entries in the
- // candidate set is *individually* applicable. Now we have to
- // figure out if they contain mutual incompatibilities. This
- // frequently arises if we have an unconstrained input type --
- // for example, we are looking for `$0: Eq` where `$0` is some
- // unconstrained type variable. In that case, we'll get a
- // candidate which assumes $0 == int, one that assumes `$0 ==
- // usize`, etc. This spells an ambiguity.
-
- let mut candidates = self.filter_impls(candidates, stack.obligation);
-
- // If there is more than one candidate, first winnow them down
- // by considering extra conditions (nested obligations and so
- // forth). We don't winnow if there is exactly one
- // candidate. This is a relatively minor distinction but it
- // can lead to better inference and error-reporting. An
- // example would be if there was an impl:
- //
- // impl<T:Clone> Vec<T> { fn push_clone(...) { ... } }
- //
- // and we were to see some code `foo.push_clone()` where `boo`
- // is a `Vec<Bar>` and `Bar` does not implement `Clone`. If
- // we were to winnow, we'd wind up with zero candidates.
- // Instead, we select the right impl now but report "`Bar` does
- // not implement `Clone`".
- if candidates.len() == 1 {
- return self.filter_reservation_impls(candidates.pop().unwrap(), stack.obligation);
- }
-
- // Winnow, but record the exact outcome of evaluation, which
- // is needed for specialization. Propagate overflow if it occurs.
- let mut candidates = candidates
- .into_iter()
- .map(|c| match self.evaluate_candidate(stack, &c) {
- Ok(eval) if eval.may_apply() => {
- Ok(Some(EvaluatedCandidate { candidate: c, evaluation: eval }))
- }
- Ok(_) => Ok(None),
- Err(OverflowError::Canonical) => Err(Overflow(OverflowError::Canonical)),
- Err(OverflowError::ErrorReporting) => Err(ErrorReporting),
- Err(OverflowError::Error(e)) => Err(Overflow(OverflowError::Error(e))),
- })
- .flat_map(Result::transpose)
- .collect::<Result<Vec<_>, _>>()?;
-
- debug!(?stack, ?candidates, "winnowed to {} candidates", candidates.len());
-
- let needs_infer = stack.obligation.predicate.has_non_region_infer();
-
- // If there are STILL multiple candidates, we can further
- // reduce the list by dropping duplicates -- including
- // resolving specializations.
- if candidates.len() > 1 {
- let mut i = 0;
- while i < candidates.len() {
- let is_dup = (0..candidates.len()).filter(|&j| i != j).any(|j| {
- self.candidate_should_be_dropped_in_favor_of(
- &candidates[i],
- &candidates[j],
- needs_infer,
- )
- });
- if is_dup {
- debug!(candidate = ?candidates[i], "Dropping candidate #{}/{}", i, candidates.len());
- candidates.swap_remove(i);
- } else {
- debug!(candidate = ?candidates[i], "Retaining candidate #{}/{}", i, candidates.len());
- i += 1;
-
- // If there are *STILL* multiple candidates, give up
- // and report ambiguity.
- if i > 1 {
- debug!("multiple matches, ambig");
- return Err(Ambiguous(
- candidates
- .into_iter()
- .filter_map(|c| match c.candidate {
- SelectionCandidate::ImplCandidate(def_id) => Some(def_id),
- _ => None,
- })
- .collect(),
- ));
- }
- }
- }
- }
-
- // If there are *NO* candidates, then there are no impls --
- // that we know of, anyway. Note that in the case where there
- // are unbound type variables within the obligation, it might
- // be the case that you could still satisfy the obligation
- // from another crate by instantiating the type variables with
- // a type from another crate that does have an impl. This case
- // is checked for in `evaluate_stack` (and hence users
- // who might care about this case, like coherence, should use
- // that function).
- if candidates.is_empty() {
- // If there's an error type, 'downgrade' our result from
- // `Err(Unimplemented)` to `Ok(None)`. This helps us avoid
- // emitting additional spurious errors, since we're guaranteed
- // to have emitted at least one.
- if stack.obligation.predicate.references_error() {
- debug!(?stack.obligation.predicate, "found error type in predicate, treating as ambiguous");
- return Ok(None);
- }
- return Err(Unimplemented);
- }
-
- // Just one candidate left.
- self.filter_reservation_impls(candidates.pop().unwrap().candidate, stack.obligation)
- }
-
#[instrument(skip(self, stack), level = "debug")]
pub(super) fn assemble_candidates<'o>(
&mut self,
@@ -249,7 +31,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
param_env: obligation.param_env,
cause: obligation.cause.clone(),
recursion_depth: obligation.recursion_depth,
- predicate: self.infcx().resolve_vars_if_possible(obligation.predicate),
+ predicate: self.infcx.resolve_vars_if_possible(obligation.predicate),
};
if obligation.predicate.skip_binder().self_ty().is_ty_var() {
@@ -293,10 +75,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.assemble_builtin_bound_candidates(copy_conditions, &mut candidates);
} else if lang_items.discriminant_kind_trait() == Some(def_id) {
// `DiscriminantKind` is automatically implemented for every type.
- candidates.vec.push(DiscriminantKindCandidate);
+ candidates.vec.push(BuiltinCandidate { has_nested: false });
} else if lang_items.pointee_trait() == Some(def_id) {
// `Pointee` is automatically implemented for every type.
- candidates.vec.push(PointeeCandidate);
+ candidates.vec.push(BuiltinCandidate { has_nested: false });
} else if lang_items.sized_trait() == Some(def_id) {
// Sized is never implementable by end-users, it is
// always automatically computed.
@@ -312,6 +94,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.assemble_candidates_for_transmutability(obligation, &mut candidates);
} else if lang_items.tuple_trait() == Some(def_id) {
self.assemble_candidate_for_tuple(obligation, &mut candidates);
+ } else if lang_items.pointer_sized() == Some(def_id) {
+ self.assemble_candidate_for_ptr_sized(obligation, &mut candidates);
} else {
if lang_items.clone_trait() == Some(def_id) {
// Same builtin conditions as `Copy`, i.e., every type which has builtin support
@@ -321,7 +105,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.assemble_builtin_bound_candidates(clone_conditions, &mut candidates);
}
- self.assemble_generator_candidates(obligation, &mut candidates);
+ if lang_items.gen_trait() == Some(def_id) {
+ self.assemble_generator_candidates(obligation, &mut candidates);
+ } else if lang_items.future_trait() == Some(def_id) {
+ self.assemble_future_candidates(obligation, &mut candidates);
+ }
+
self.assemble_closure_candidates(obligation, &mut candidates);
self.assemble_fn_pointer_candidates(obligation, &mut candidates);
self.assemble_candidates_from_impls(obligation, &mut candidates);
@@ -409,16 +198,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) {
- if self.tcx().lang_items().gen_trait() != Some(obligation.predicate.def_id()) {
- return;
- }
-
// Okay to skip binder because the substs on generator types never
// touch bound regions, they just capture the in-scope
// type/region parameters.
let self_ty = obligation.self_ty().skip_binder();
match self_ty.kind() {
- ty::Generator(..) => {
+ // async constructs get lowered to a special kind of generator that
+ // should *not* `impl Generator`.
+ ty::Generator(did, ..) if !self.tcx().generator_is_async(*did) => {
debug!(?self_ty, ?obligation, "assemble_generator_candidates",);
candidates.vec.push(GeneratorCandidate);
@@ -431,6 +218,23 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
+ fn assemble_future_candidates(
+ &mut self,
+ obligation: &TraitObligation<'tcx>,
+ candidates: &mut SelectionCandidateSet<'tcx>,
+ ) {
+ let self_ty = obligation.self_ty().skip_binder();
+ if let ty::Generator(did, ..) = self_ty.kind() {
+ // async constructs get lowered to a special kind of generator that
+ // should directly `impl Future`.
+ if self.tcx().generator_is_async(*did) {
+ debug!(?self_ty, ?obligation, "assemble_future_candidates",);
+
+ candidates.vec.push(FutureCandidate);
+ }
+ }
+ }
+
/// Checks for the artificial impl that the compiler will create for an obligation like `X :
/// FnMut<..>` where `X` is a closure type.
///
@@ -442,7 +246,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) {
- let Some(kind) = self.tcx().fn_trait_kind_from_lang_item(obligation.predicate.def_id()) else {
+ let Some(kind) = self.tcx().fn_trait_kind_from_def_id(obligation.predicate.def_id()) else {
return;
};
@@ -480,7 +284,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
candidates: &mut SelectionCandidateSet<'tcx>,
) {
// We provide impl of all fn traits for fn pointers.
- if self.tcx().fn_trait_kind_from_lang_item(obligation.predicate.def_id()).is_none() {
+ if !self.tcx().is_fn_trait(obligation.predicate.def_id()) {
return;
}
@@ -680,9 +484,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!(?poly_trait_ref, "assemble_candidates_from_object_ty");
- let poly_trait_predicate = self.infcx().resolve_vars_if_possible(obligation.predicate);
+ let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
let placeholder_trait_predicate =
- self.infcx().replace_bound_vars_with_placeholders(poly_trait_predicate);
+ self.infcx.replace_bound_vars_with_placeholders(poly_trait_predicate);
// Count only those upcast versions that match the trait-ref
// we are looking for. Specifically, do not only check for the
@@ -713,48 +517,40 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty: Ty<'tcx>,
param_env: ty::ParamEnv<'tcx>,
cause: &ObligationCause<'tcx>,
- ) -> Option<(Ty<'tcx>, DefId)> {
+ ) -> Option<ty::PolyExistentialTraitRef<'tcx>> {
let tcx = self.tcx();
if tcx.features().trait_upcasting {
return None;
}
// <ty as Deref>
- let trait_ref = ty::TraitRef {
- def_id: tcx.lang_items().deref_trait()?,
- substs: tcx.mk_substs_trait(ty, &[]),
- };
+ let trait_ref = tcx.mk_trait_ref(tcx.lang_items().deref_trait()?, [ty]);
- let obligation = traits::Obligation::new(
- cause.clone(),
- param_env,
- ty::Binder::dummy(trait_ref).without_const().to_predicate(tcx),
- );
+ let obligation =
+ traits::Obligation::new(tcx, cause.clone(), param_env, ty::Binder::dummy(trait_ref));
if !self.infcx.predicate_may_hold(&obligation) {
return None;
}
- let ty = traits::normalize_projection_type(
- self,
- param_env,
- ty::ProjectionTy {
- item_def_id: tcx.lang_items().deref_target()?,
- substs: trait_ref.substs,
- },
- cause.clone(),
- 0,
- // We're *intentionally* throwing these away,
- // since we don't actually use them.
- &mut vec![],
- )
- .ty()
- .unwrap();
-
- if let ty::Dynamic(data, ..) = ty.kind() {
- Some((ty, data.principal_def_id()?))
- } else {
- None
- }
+ self.infcx.probe(|_| {
+ let ty = traits::normalize_projection_type(
+ self,
+ param_env,
+ ty::ProjectionTy {
+ item_def_id: tcx.lang_items().deref_target()?,
+ substs: trait_ref.substs,
+ },
+ cause.clone(),
+ 0,
+ // We're *intentionally* throwing these away,
+ // since we don't actually use them.
+ &mut vec![],
+ )
+ .ty()
+ .unwrap();
+
+ if let ty::Dynamic(data, ..) = ty.kind() { data.principal() } else { None }
+ })
}
/// Searches for unsizing that might apply to `obligation`.
@@ -787,7 +583,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
match (source.kind(), target.kind()) {
// Trait+Kx+'a -> Trait+Ky+'b (upcasts).
- (&ty::Dynamic(ref data_a, ..), &ty::Dynamic(ref data_b, ..)) => {
+ (&ty::Dynamic(ref data_a, _, ty::Dyn), &ty::Dynamic(ref data_b, _, ty::Dyn)) => {
// Upcast coercions permit several things:
//
// 1. Dropping auto traits, e.g., `Foo + Send` to `Foo`
@@ -814,24 +610,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let principal_a = data_a.principal().unwrap();
let target_trait_did = principal_def_id_b.unwrap();
let source_trait_ref = principal_a.with_self_ty(self.tcx(), source);
- if let Some((deref_output_ty, deref_output_trait_did)) = self
- .need_migrate_deref_output_trait_object(
- source,
- obligation.param_env,
- &obligation.cause,
- )
- {
- if deref_output_trait_did == target_trait_did {
- self.tcx().struct_span_lint_hir(
- DEREF_INTO_DYN_SUPERTRAIT,
- obligation.cause.body_id,
- obligation.cause.span,
- DelayDm(|| format!(
- "`{}` implements `Deref` with supertrait `{}` as output",
- source, deref_output_ty
- )),
- |lint| lint,
- );
+ if let Some(deref_trait_ref) = self.need_migrate_deref_output_trait_object(
+ source,
+ obligation.param_env,
+ &obligation.cause,
+ ) {
+ if deref_trait_ref.def_id() == target_trait_did {
return;
}
}
@@ -848,7 +632,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
// `T` -> `Trait`
- (_, &ty::Dynamic(..)) => {
+ (_, &ty::Dynamic(_, _, ty::Dyn)) => {
candidates.vec.push(BuiltinUnsizeCandidate);
}
@@ -951,7 +735,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
return;
}
- let self_ty = self.infcx().shallow_resolve(obligation.self_ty());
+ let self_ty = self.infcx.shallow_resolve(obligation.self_ty());
match self_ty.skip_binder().kind() {
ty::Opaque(..)
| ty::Dynamic(..)
@@ -1018,7 +802,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &TraitObligation<'tcx>,
candidates: &mut SelectionCandidateSet<'tcx>,
) {
- let self_ty = self.infcx().shallow_resolve(obligation.self_ty().skip_binder());
+ let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
match self_ty.kind() {
ty::Tuple(_) => {
candidates.vec.push(BuiltinCandidate { has_nested: false });
@@ -1054,4 +838,30 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
| ty::Placeholder(_) => {}
}
}
+
+ fn assemble_candidate_for_ptr_sized(
+ &mut self,
+ obligation: &TraitObligation<'tcx>,
+ candidates: &mut SelectionCandidateSet<'tcx>,
+ ) {
+ // The regions of a type don't affect the size of the type
+ let self_ty = self
+ .tcx()
+ .erase_regions(self.tcx().erase_late_bound_regions(obligation.predicate.self_ty()));
+
+ // But if there are inference variables, we have to wait until it's resolved.
+ if self_ty.has_non_region_infer() {
+ candidates.ambiguous = true;
+ return;
+ }
+
+ let usize_layout =
+ self.tcx().layout_of(ty::ParamEnv::empty().and(self.tcx().types.usize)).unwrap().layout;
+ if let Ok(layout) = self.tcx().layout_of(obligation.param_env.and(self_ty))
+ && layout.layout.size() == usize_layout.size()
+ && layout.layout.align().abi == usize_layout.align().abi
+ {
+ candidates.vec.push(BuiltinCandidate { has_nested: false });
+ }
+ }
}
diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs
index ed22058c6..fda415155 100644
--- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs
@@ -19,14 +19,18 @@ use rustc_span::def_id::DefId;
use crate::traits::project::{normalize_with_depth, normalize_with_depth_to};
use crate::traits::util::{self, closure_trait_ref_and_return_type, predicate_for_trait_def};
+use crate::traits::vtable::{
+ count_own_vtable_entries, prepare_vtable_segments, vtable_trait_first_method_offset,
+ VtblSegment,
+};
use crate::traits::{
BuiltinDerivedObligation, ImplDerivedObligation, ImplDerivedObligationCause, ImplSource,
ImplSourceAutoImplData, ImplSourceBuiltinData, ImplSourceClosureData,
- ImplSourceConstDestructData, ImplSourceDiscriminantKindData, ImplSourceFnPointerData,
- ImplSourceGeneratorData, ImplSourceObjectData, ImplSourcePointeeData, ImplSourceTraitAliasData,
+ ImplSourceConstDestructData, ImplSourceFnPointerData, ImplSourceFutureData,
+ ImplSourceGeneratorData, ImplSourceObjectData, ImplSourceTraitAliasData,
ImplSourceTraitUpcastingData, ImplSourceUserDefinedData, Normalized, ObjectCastObligation,
Obligation, ObligationCause, OutputTypeParameterMismatch, PredicateObligation, Selection,
- SelectionError, TraitNotObjectSafe, TraitObligation, Unimplemented, VtblSegment,
+ SelectionError, TraitNotObjectSafe, TraitObligation, Unimplemented,
};
use super::BuiltinImplConditions;
@@ -89,17 +93,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplSource::Generator(vtable_generator)
}
+ FutureCandidate => {
+ let vtable_future = self.confirm_future_candidate(obligation)?;
+ ImplSource::Future(vtable_future)
+ }
+
FnPointerCandidate { .. } => {
let data = self.confirm_fn_pointer_candidate(obligation)?;
ImplSource::FnPointer(data)
}
- DiscriminantKindCandidate => {
- ImplSource::DiscriminantKind(ImplSourceDiscriminantKindData)
- }
-
- PointeeCandidate => ImplSource::Pointee(ImplSourcePointeeData),
-
TraitAliasCandidate => {
let data = self.confirm_trait_alias_candidate(obligation);
ImplSource::TraitAlias(data)
@@ -148,7 +151,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let trait_predicate = self.infcx.shallow_resolve(obligation.predicate);
let placeholder_trait_predicate =
- self.infcx().replace_bound_vars_with_placeholders(trait_predicate).trait_ref;
+ self.infcx.replace_bound_vars_with_placeholders(trait_predicate).trait_ref;
let placeholder_self_ty = placeholder_trait_predicate.self_ty();
let placeholder_trait_predicate = ty::Binder::dummy(placeholder_trait_predicate);
let (def_id, substs) = match *placeholder_self_ty.kind() {
@@ -194,6 +197,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
&mut obligations,
);
obligations.push(Obligation::with_depth(
+ self.tcx(),
obligation.cause.clone(),
obligation.recursion_depth + 1,
obligation.param_env,
@@ -482,11 +486,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
super_trait,
&mut nested,
);
- nested.push(Obligation::new(
- obligation.cause.clone(),
- obligation.param_env,
- normalized_super_trait,
- ));
+ nested.push(obligation.with(tcx, normalized_super_trait));
}
let assoc_types: Vec<_> = tcx
@@ -555,13 +555,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
GenericParamDefKind::Const { .. } => {
let bound_var = ty::BoundVariableKind::Const;
bound_vars.push(bound_var);
- tcx.mk_const(ty::ConstS {
- ty: tcx.type_of(param.def_id),
- kind: ty::ConstKind::Bound(
+ tcx.mk_const(
+ ty::ConstKind::Bound(
ty::INNERMOST,
ty::BoundVar::from_usize(bound_vars.len() - 1),
),
- })
+ tcx.type_of(param.def_id),
+ )
.into()
}
});
@@ -581,17 +581,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
subst_bound,
&mut nested,
);
- nested.push(Obligation::new(
- obligation.cause.clone(),
- obligation.param_env,
- normalized_bound,
- ));
+ nested.push(obligation.with(tcx, normalized_bound));
}
}
debug!(?nested, "object nested obligations");
- let vtable_base = super::super::vtable_trait_first_method_offset(
+ let vtable_base = vtable_trait_first_method_offset(
tcx,
(unnormalized_upcast_trait_ref, ty::Binder::dummy(object_trait_ref)),
);
@@ -606,8 +602,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
{
debug!(?obligation, "confirm_fn_pointer_candidate");
- // Okay to skip binder; it is reintroduced below.
- let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
+ let self_ty = self
+ .infcx
+ .shallow_resolve(obligation.self_ty().no_bound_vars())
+ .expect("fn pointer should not capture bound vars from predicate");
let sig = self_ty.fn_sig(self.tcx());
let trait_ref = closure_trait_ref_and_return_type(
self.tcx(),
@@ -622,15 +620,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// Confirm the `type Output: Sized;` bound that is present on `FnOnce`
let cause = obligation.derived_cause(BuiltinDerivedObligation);
- // The binder on the Fn obligation is "less" important than the one on
- // the signature, as evidenced by how we treat it during projection.
- // The safe thing to do here is to liberate it, though, which should
- // have no worse effect than skipping the binder here.
- let liberated_fn_ty =
- self.infcx.replace_bound_vars_with_placeholders(obligation.predicate.rebind(self_ty));
- let output_ty = self
- .infcx
- .replace_bound_vars_with_placeholders(liberated_fn_ty.fn_sig(self.tcx()).output());
+ let output_ty = self.infcx.replace_bound_vars_with_placeholders(sig.output());
let output_ty = normalize_with_depth_to(
self,
obligation.param_env,
@@ -639,15 +629,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
output_ty,
&mut nested,
);
- let tr = ty::Binder::dummy(ty::TraitRef::new(
- self.tcx().require_lang_item(LangItem::Sized, None),
- self.tcx().mk_substs_trait(output_ty, &[]),
- ));
- nested.push(Obligation::new(
- cause,
- obligation.param_env,
- tr.to_poly_trait_predicate().to_predicate(self.tcx()),
- ));
+ let tr =
+ ty::Binder::dummy(self.tcx().at(cause.span).mk_trait_ref(LangItem::Sized, [output_ty]));
+ nested.push(Obligation::new(self.infcx.tcx, cause, obligation.param_env, tr));
Ok(ImplSourceFnPointerData { fn_ty: self_ty, nested })
}
@@ -659,7 +643,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!(?obligation, "confirm_trait_alias_candidate");
let alias_def_id = obligation.predicate.def_id();
- let predicate = self.infcx().replace_bound_vars_with_placeholders(obligation.predicate);
+ let predicate = self.infcx.replace_bound_vars_with_placeholders(obligation.predicate);
let trait_ref = predicate.trait_ref;
let trait_def_id = trait_ref.def_id;
let substs = trait_ref.substs;
@@ -693,7 +677,24 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!(?obligation, ?generator_def_id, ?substs, "confirm_generator_candidate");
- let trait_ref = self.generator_trait_ref_unnormalized(obligation, substs);
+ let gen_sig = substs.as_generator().poly_sig();
+
+ // NOTE: The self-type is a generator type and hence is
+ // in fact unparameterized (or at least does not reference any
+ // regions bound in the obligation).
+ let self_ty = obligation
+ .predicate
+ .self_ty()
+ .no_bound_vars()
+ .expect("unboxed closure type should not capture bound vars from the predicate");
+
+ let trait_ref = super::util::generator_trait_ref_and_outputs(
+ self.tcx(),
+ obligation.predicate.def_id(),
+ self_ty,
+ gen_sig,
+ )
+ .map_bound(|(trait_ref, ..)| trait_ref);
let nested = self.confirm_poly_trait_refs(obligation, trait_ref)?;
debug!(?trait_ref, ?nested, "generator candidate obligations");
@@ -701,6 +702,36 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
Ok(ImplSourceGeneratorData { generator_def_id, substs, nested })
}
+ fn confirm_future_candidate(
+ &mut self,
+ obligation: &TraitObligation<'tcx>,
+ ) -> Result<ImplSourceFutureData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>> {
+ // Okay to skip binder because the substs on generator types never
+ // touch bound regions, they just capture the in-scope
+ // type/region parameters.
+ let self_ty = self.infcx.shallow_resolve(obligation.self_ty().skip_binder());
+ let ty::Generator(generator_def_id, substs, _) = *self_ty.kind() else {
+ bug!("closure candidate for non-closure {:?}", obligation);
+ };
+
+ debug!(?obligation, ?generator_def_id, ?substs, "confirm_future_candidate");
+
+ let gen_sig = substs.as_generator().poly_sig();
+
+ let trait_ref = super::util::future_trait_ref_and_outputs(
+ self.tcx(),
+ obligation.predicate.def_id(),
+ obligation.predicate.no_bound_vars().expect("future has no bound vars").self_ty(),
+ gen_sig,
+ )
+ .map_bound(|(trait_ref, ..)| trait_ref);
+
+ let nested = self.confirm_poly_trait_refs(obligation, trait_ref)?;
+ debug!(?trait_ref, ?nested, "future candidate obligations");
+
+ Ok(ImplSourceFutureData { generator_def_id, substs, nested })
+ }
+
#[instrument(skip(self), level = "debug")]
fn confirm_closure_candidate(
&mut self,
@@ -708,7 +739,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
) -> Result<ImplSourceClosureData<'tcx, PredicateObligation<'tcx>>, SelectionError<'tcx>> {
let kind = self
.tcx()
- .fn_trait_kind_from_lang_item(obligation.predicate.def_id())
+ .fn_trait_kind_from_def_id(obligation.predicate.def_id())
.unwrap_or_else(|| bug!("closure candidate for non-fn trait {:?}", obligation));
// Okay to skip binder because the substs on closure types never
@@ -727,11 +758,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// FIXME: Chalk
if !self.tcx().sess.opts.unstable_opts.chalk {
- nested.push(Obligation::new(
- obligation.cause.clone(),
- obligation.param_env,
- ty::Binder::dummy(ty::PredicateKind::ClosureKind(closure_def_id, substs, kind))
- .to_predicate(self.tcx()),
+ nested.push(obligation.with(
+ self.tcx(),
+ ty::Binder::dummy(ty::PredicateKind::ClosureKind(closure_def_id, substs, kind)),
));
}
@@ -813,9 +842,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let upcast_trait_ref;
match (source.kind(), target.kind()) {
// TraitA+Kx+'a -> TraitB+Ky+'b (trait upcasting coercion).
- (&ty::Dynamic(ref data_a, r_a, repr_a), &ty::Dynamic(ref data_b, r_b, repr_b))
- if repr_a == repr_b =>
- {
+ (
+ &ty::Dynamic(ref data_a, r_a, repr_a @ ty::Dyn),
+ &ty::Dynamic(ref data_b, r_b, ty::Dyn),
+ ) => {
// See `assemble_candidates_for_unsizing` for more info.
// We already checked the compatibility of auto traits within `assemble_candidates_for_unsizing`.
let principal_a = data_a.principal().unwrap();
@@ -841,7 +871,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
.map(ty::Binder::dummy),
);
let existential_predicates = tcx.mk_poly_existential_predicates(iter);
- let source_trait = tcx.mk_dynamic(existential_predicates, r_b, repr_b);
+ let source_trait = tcx.mk_dynamic(existential_predicates, r_b, repr_a);
// Require that the traits involved in this upcast are **equal**;
// only the **lifetime bound** is changed.
@@ -860,10 +890,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
);
let outlives = ty::OutlivesPredicate(r_a, r_b);
nested.push(Obligation::with_depth(
+ tcx,
cause,
obligation.recursion_depth + 1,
obligation.param_env,
- obligation.predicate.rebind(outlives).to_predicate(tcx),
+ obligation.predicate.rebind(outlives),
));
}
_ => bug!(),
@@ -877,7 +908,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
vptr_offset += TyCtxt::COMMON_VTABLE_ENTRIES.len();
}
VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => {
- vptr_offset += util::count_own_vtable_entries(tcx, trait_ref);
+ vptr_offset += count_own_vtable_entries(tcx, trait_ref);
if trait_ref == upcast_trait_ref {
if emit_vptr {
return ControlFlow::Break(Some(vptr_offset));
@@ -896,8 +927,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
};
let vtable_vptr_slot =
- super::super::prepare_vtable_segments(tcx, source_trait_ref, vtable_segment_callback)
- .unwrap();
+ prepare_vtable_segments(tcx, source_trait_ref, vtable_segment_callback).unwrap();
Ok(ImplSourceTraitUpcastingData { upcast_trait_ref, vtable_vptr_slot, nested })
}
@@ -919,7 +949,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let mut nested = vec![];
match (source.kind(), target.kind()) {
// Trait+Kx+'a -> Trait+Ky+'b (auto traits and lifetime subtyping).
- (&ty::Dynamic(ref data_a, r_a, ty::Dyn), &ty::Dynamic(ref data_b, r_b, ty::Dyn)) => {
+ (&ty::Dynamic(ref data_a, r_a, dyn_a), &ty::Dynamic(ref data_b, r_b, dyn_b))
+ if dyn_a == dyn_b =>
+ {
// See `assemble_candidates_for_unsizing` for more info.
// We already checked the compatibility of auto traits within `assemble_candidates_for_unsizing`.
let iter = data_a
@@ -938,7 +970,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
.map(ty::Binder::dummy),
);
let existential_predicates = tcx.mk_poly_existential_predicates(iter);
- let source_trait = tcx.mk_dynamic(existential_predicates, r_b, ty::Dyn);
+ let source_trait = tcx.mk_dynamic(existential_predicates, r_b, dyn_a);
// Require that the traits involved in this upcast are **equal**;
// only the **lifetime bound** is changed.
@@ -957,10 +989,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
);
let outlives = ty::OutlivesPredicate(r_a, r_b);
nested.push(Obligation::with_depth(
+ tcx,
cause,
obligation.recursion_depth + 1,
obligation.param_env,
- obligation.predicate.rebind(outlives).to_predicate(tcx),
+ obligation.predicate.rebind(outlives),
));
}
@@ -979,6 +1012,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let predicate_to_obligation = |predicate| {
Obligation::with_depth(
+ tcx,
cause.clone(),
obligation.recursion_depth + 1,
obligation.param_env,
@@ -999,10 +1033,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
);
// We can only make objects from sized types.
- let tr = ty::Binder::dummy(ty::TraitRef::new(
- tcx.require_lang_item(LangItem::Sized, None),
- tcx.mk_substs_trait(source, &[]),
- ));
+ let tr =
+ ty::Binder::dummy(tcx.at(cause.span).mk_trait_ref(LangItem::Sized, [source]));
nested.push(predicate_to_obligation(tr.without_const().to_predicate(tcx)));
// If the type is `Foo + 'a`, ensure that the type
@@ -1108,8 +1140,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
- source_tail,
- &[target_tail.into()],
+ [source_tail, target_tail],
));
}
@@ -1139,13 +1170,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
- a_last,
- &[b_last.into()],
+ [a_last, b_last],
)
}));
}
- _ => bug!(),
+ _ => bug!("source: {source}, target: {target}"),
};
Ok(ImplSourceBuiltinData { nested })
@@ -1255,20 +1285,19 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation.param_env,
cause.clone(),
obligation.recursion_depth + 1,
- self_ty
- .rebind(ty::TraitPredicate {
- trait_ref: ty::TraitRef {
- def_id: self.tcx().require_lang_item(LangItem::Destruct, None),
- substs: self.tcx().mk_substs_trait(nested_ty, &[]),
- },
- constness: ty::BoundConstness::ConstIfConst,
- polarity: ty::ImplPolarity::Positive,
- })
- .to_predicate(tcx),
+ self_ty.rebind(ty::TraitPredicate {
+ trait_ref: self
+ .tcx()
+ .at(cause.span)
+ .mk_trait_ref(LangItem::Destruct, [nested_ty]),
+ constness: ty::BoundConstness::ConstIfConst,
+ polarity: ty::ImplPolarity::Positive,
+ }),
&mut nested,
);
nested.push(Obligation::with_depth(
+ tcx,
cause.clone(),
obligation.recursion_depth + 1,
obligation.param_env,
@@ -1280,18 +1309,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// since it's either not `const Drop` (and we raise an error during selection),
// or it's an ADT (and we need to check for a custom impl during selection)
_ => {
- let predicate = self_ty
- .rebind(ty::TraitPredicate {
- trait_ref: ty::TraitRef {
- def_id: self.tcx().require_lang_item(LangItem::Destruct, None),
- substs: self.tcx().mk_substs_trait(nested_ty, &[]),
- },
- constness: ty::BoundConstness::ConstIfConst,
- polarity: ty::ImplPolarity::Positive,
- })
- .to_predicate(tcx);
+ let predicate = self_ty.rebind(ty::TraitPredicate {
+ trait_ref: self
+ .tcx()
+ .at(cause.span)
+ .mk_trait_ref(LangItem::Destruct, [nested_ty]),
+ constness: ty::BoundConstness::ConstIfConst,
+ polarity: ty::ImplPolarity::Positive,
+ });
nested.push(Obligation::with_depth(
+ tcx,
cause.clone(),
obligation.recursion_depth + 1,
obligation.param_env,
diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs
index 9ebff4892..035deb616 100644
--- a/compiler/rustc_trait_selection/src/traits/select/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs
@@ -2,6 +2,12 @@
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/resolution.html#selection
+// FIXME: The `map` field in ProvisionalEvaluationCache should be changed to
+// a `FxIndexMap` to avoid query instability, but right now it causes a perf regression. This would be
+// fixed or at least lightened by the addition of the `drain_filter` method to `FxIndexMap`
+// Relevant: https://github.com/rust-lang/rust/pull/103723 and https://github.com/bluss/indexmap/issues/242
+#![allow(rustc::potential_query_instability)]
+
use self::EvaluationResult::*;
use self::SelectionCandidate::*;
@@ -24,9 +30,11 @@ use crate::traits::error_reporting::TypeErrCtxtExt;
use crate::traits::project::ProjectAndUnifyResult;
use crate::traits::project::ProjectionCacheKeyExt;
use crate::traits::ProjectionCacheKey;
-use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
+use crate::traits::Unimplemented;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_data_structures::stack::ensure_sufficient_stack;
-use rustc_errors::{Diagnostic, ErrorGuaranteed};
+use rustc_errors::Diagnostic;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_infer::infer::LateBoundRegionConversionTime;
@@ -47,6 +55,7 @@ use std::fmt::{self, Display};
use std::iter;
pub use rustc_middle::traits::select::*;
+use rustc_middle::ty::print::with_no_trimmed_paths;
mod candidate_assembly;
mod confirmation;
@@ -93,7 +102,7 @@ impl IntercrateAmbiguityCause {
}
pub struct SelectionContext<'cx, 'tcx> {
- infcx: &'cx InferCtxt<'tcx>,
+ pub infcx: &'cx InferCtxt<'tcx>,
/// Freshener used specifically for entries on the obligation
/// stack. This ensures that all entries on the stack at one time
@@ -102,25 +111,6 @@ pub struct SelectionContext<'cx, 'tcx> {
/// require themselves.
freshener: TypeFreshener<'cx, 'tcx>,
- /// During coherence we have to assume that other crates may add
- /// additional impls which we currently don't know about.
- ///
- /// To deal with this evaluation should be conservative
- /// and consider the possibility of impls from outside this crate.
- /// This comes up primarily when resolving ambiguity. Imagine
- /// there is some trait reference `$0: Bar` where `$0` is an
- /// inference variable. If `intercrate` is true, then we can never
- /// say for sure that this reference is not implemented, even if
- /// there are *no impls at all for `Bar`*, because `$0` could be
- /// bound to some type that in a downstream crate that implements
- /// `Bar`.
- ///
- /// Outside of coherence we set this to false because we are only
- /// interested in types that the user could actually have written.
- /// In other words, we consider `$0: Bar` to be unimplemented if
- /// there is no type that the user could *actually name* that
- /// would satisfy it. This avoids crippling inference, basically.
- intercrate: bool,
/// If `intercrate` is set, we remember predicates which were
/// considered ambiguous because of impls potentially added in other crates.
/// This is used in coherence to give improved diagnostics.
@@ -218,16 +208,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
SelectionContext {
infcx,
freshener: infcx.freshener_keep_static(),
- intercrate: false,
intercrate_ambiguity_causes: None,
query_mode: TraitQueryMode::Standard,
}
}
- pub fn intercrate(infcx: &'cx InferCtxt<'tcx>) -> SelectionContext<'cx, 'tcx> {
- SelectionContext { intercrate: true, ..SelectionContext::new(infcx) }
- }
-
pub fn with_query_mode(
infcx: &'cx InferCtxt<'tcx>,
query_mode: TraitQueryMode,
@@ -239,7 +224,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
/// Enables tracking of intercrate ambiguity causes. See
/// the documentation of [`Self::intercrate_ambiguity_causes`] for more.
pub fn enable_tracking_intercrate_ambiguity_causes(&mut self) {
- assert!(self.intercrate);
+ assert!(self.is_intercrate());
assert!(self.intercrate_ambiguity_causes.is_none());
self.intercrate_ambiguity_causes = Some(FxIndexSet::default());
debug!("selcx: enable_tracking_intercrate_ambiguity_causes");
@@ -249,20 +234,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
/// was enabled and disables tracking at the same time. If
/// tracking is not enabled, just returns an empty vector.
pub fn take_intercrate_ambiguity_causes(&mut self) -> FxIndexSet<IntercrateAmbiguityCause> {
- assert!(self.intercrate);
+ assert!(self.is_intercrate());
self.intercrate_ambiguity_causes.take().unwrap_or_default()
}
- pub fn infcx(&self) -> &'cx InferCtxt<'tcx> {
- self.infcx
- }
-
pub fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
pub fn is_intercrate(&self) -> bool {
- self.intercrate
+ self.infcx.intercrate
}
///////////////////////////////////////////////////////////////////////////
@@ -294,9 +275,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
assert!(self.query_mode == TraitQueryMode::Canonical);
return Err(SelectionError::Overflow(OverflowError::Canonical));
}
- Err(SelectionError::Ambiguous(_)) => {
- return Ok(None);
- }
Err(e) => {
return Err(e);
}
@@ -328,6 +306,213 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.candidate_from_obligation(&stack)
}
+ #[instrument(level = "debug", skip(self), ret)]
+ fn candidate_from_obligation<'o>(
+ &mut self,
+ stack: &TraitObligationStack<'o, 'tcx>,
+ ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
+ // Watch out for overflow. This intentionally bypasses (and does
+ // not update) the cache.
+ self.check_recursion_limit(&stack.obligation, &stack.obligation)?;
+
+ // Check the cache. Note that we freshen the trait-ref
+ // separately rather than using `stack.fresh_trait_ref` --
+ // this is because we want the unbound variables to be
+ // replaced with fresh types starting from index 0.
+ let cache_fresh_trait_pred = self.infcx.freshen(stack.obligation.predicate);
+ debug!(?cache_fresh_trait_pred);
+ debug_assert!(!stack.obligation.predicate.has_escaping_bound_vars());
+
+ if let Some(c) =
+ self.check_candidate_cache(stack.obligation.param_env, cache_fresh_trait_pred)
+ {
+ debug!("CACHE HIT");
+ return c;
+ }
+
+ // If no match, compute result and insert into cache.
+ //
+ // FIXME(nikomatsakis) -- this cache is not taking into
+ // account cycles that may have occurred in forming the
+ // candidate. I don't know of any specific problems that
+ // result but it seems awfully suspicious.
+ let (candidate, dep_node) =
+ self.in_task(|this| this.candidate_from_obligation_no_cache(stack));
+
+ debug!("CACHE MISS");
+ self.insert_candidate_cache(
+ stack.obligation.param_env,
+ cache_fresh_trait_pred,
+ dep_node,
+ candidate.clone(),
+ );
+ candidate
+ }
+
+ fn candidate_from_obligation_no_cache<'o>(
+ &mut self,
+ stack: &TraitObligationStack<'o, 'tcx>,
+ ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
+ if let Err(conflict) = self.is_knowable(stack) {
+ debug!("coherence stage: not knowable");
+ if self.intercrate_ambiguity_causes.is_some() {
+ debug!("evaluate_stack: intercrate_ambiguity_causes is some");
+ // Heuristics: show the diagnostics when there are no candidates in crate.
+ if let Ok(candidate_set) = self.assemble_candidates(stack) {
+ let mut no_candidates_apply = true;
+
+ for c in candidate_set.vec.iter() {
+ if self.evaluate_candidate(stack, &c)?.may_apply() {
+ no_candidates_apply = false;
+ break;
+ }
+ }
+
+ if !candidate_set.ambiguous && no_candidates_apply {
+ let trait_ref = stack.obligation.predicate.skip_binder().trait_ref;
+ if !trait_ref.references_error() {
+ let self_ty = trait_ref.self_ty();
+ let (trait_desc, self_desc) = with_no_trimmed_paths!({
+ let trait_desc = trait_ref.print_only_trait_path().to_string();
+ let self_desc = if self_ty.has_concrete_skeleton() {
+ Some(self_ty.to_string())
+ } else {
+ None
+ };
+ (trait_desc, self_desc)
+ });
+ let cause = if let Conflict::Upstream = conflict {
+ IntercrateAmbiguityCause::UpstreamCrateUpdate {
+ trait_desc,
+ self_desc,
+ }
+ } else {
+ IntercrateAmbiguityCause::DownstreamCrate { trait_desc, self_desc }
+ };
+ debug!(?cause, "evaluate_stack: pushing cause");
+ self.intercrate_ambiguity_causes.as_mut().unwrap().insert(cause);
+ }
+ }
+ }
+ }
+ return Ok(None);
+ }
+
+ let candidate_set = self.assemble_candidates(stack)?;
+
+ if candidate_set.ambiguous {
+ debug!("candidate set contains ambig");
+ return Ok(None);
+ }
+
+ let candidates = candidate_set.vec;
+
+ debug!(?stack, ?candidates, "assembled {} candidates", candidates.len());
+
+ // At this point, we know that each of the entries in the
+ // candidate set is *individually* applicable. Now we have to
+ // figure out if they contain mutual incompatibilities. This
+ // frequently arises if we have an unconstrained input type --
+ // for example, we are looking for `$0: Eq` where `$0` is some
+ // unconstrained type variable. In that case, we'll get a
+ // candidate which assumes $0 == int, one that assumes `$0 ==
+ // usize`, etc. This spells an ambiguity.
+
+ let mut candidates = self.filter_impls(candidates, stack.obligation);
+
+ // If there is more than one candidate, first winnow them down
+ // by considering extra conditions (nested obligations and so
+ // forth). We don't winnow if there is exactly one
+ // candidate. This is a relatively minor distinction but it
+ // can lead to better inference and error-reporting. An
+ // example would be if there was an impl:
+ //
+ // impl<T:Clone> Vec<T> { fn push_clone(...) { ... } }
+ //
+ // and we were to see some code `foo.push_clone()` where `boo`
+ // is a `Vec<Bar>` and `Bar` does not implement `Clone`. If
+ // we were to winnow, we'd wind up with zero candidates.
+ // Instead, we select the right impl now but report "`Bar` does
+ // not implement `Clone`".
+ if candidates.len() == 1 {
+ return self.filter_reservation_impls(candidates.pop().unwrap(), stack.obligation);
+ }
+
+ // Winnow, but record the exact outcome of evaluation, which
+ // is needed for specialization. Propagate overflow if it occurs.
+ let mut candidates = candidates
+ .into_iter()
+ .map(|c| match self.evaluate_candidate(stack, &c) {
+ Ok(eval) if eval.may_apply() => {
+ Ok(Some(EvaluatedCandidate { candidate: c, evaluation: eval }))
+ }
+ Ok(_) => Ok(None),
+ Err(OverflowError::Canonical) => Err(Overflow(OverflowError::Canonical)),
+ Err(OverflowError::ErrorReporting) => Err(ErrorReporting),
+ Err(OverflowError::Error(e)) => Err(Overflow(OverflowError::Error(e))),
+ })
+ .flat_map(Result::transpose)
+ .collect::<Result<Vec<_>, _>>()?;
+
+ debug!(?stack, ?candidates, "winnowed to {} candidates", candidates.len());
+
+ let needs_infer = stack.obligation.predicate.has_non_region_infer();
+
+ // If there are STILL multiple candidates, we can further
+ // reduce the list by dropping duplicates -- including
+ // resolving specializations.
+ if candidates.len() > 1 {
+ let mut i = 0;
+ while i < candidates.len() {
+ let is_dup = (0..candidates.len()).filter(|&j| i != j).any(|j| {
+ self.candidate_should_be_dropped_in_favor_of(
+ &candidates[i],
+ &candidates[j],
+ needs_infer,
+ )
+ });
+ if is_dup {
+ debug!(candidate = ?candidates[i], "Dropping candidate #{}/{}", i, candidates.len());
+ candidates.swap_remove(i);
+ } else {
+ debug!(candidate = ?candidates[i], "Retaining candidate #{}/{}", i, candidates.len());
+ i += 1;
+
+ // If there are *STILL* multiple candidates, give up
+ // and report ambiguity.
+ if i > 1 {
+ debug!("multiple matches, ambig");
+ return Ok(None);
+ }
+ }
+ }
+ }
+
+ // If there are *NO* candidates, then there are no impls --
+ // that we know of, anyway. Note that in the case where there
+ // are unbound type variables within the obligation, it might
+ // be the case that you could still satisfy the obligation
+ // from another crate by instantiating the type variables with
+ // a type from another crate that does have an impl. This case
+ // is checked for in `evaluate_stack` (and hence users
+ // who might care about this case, like coherence, should use
+ // that function).
+ if candidates.is_empty() {
+ // If there's an error type, 'downgrade' our result from
+ // `Err(Unimplemented)` to `Ok(None)`. This helps us avoid
+ // emitting additional spurious errors, since we're guaranteed
+ // to have emitted at least one.
+ if stack.obligation.predicate.references_error() {
+ debug!(?stack.obligation.predicate, "found error type in predicate, treating as ambiguous");
+ return Ok(None);
+ }
+ return Err(Unimplemented);
+ }
+
+ // Just one candidate left.
+ self.filter_reservation_impls(candidates.pop().unwrap().candidate, stack.obligation)
+ }
+
///////////////////////////////////////////////////////////////////////////
// EVALUATION
//
@@ -438,10 +623,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ensure_sufficient_stack(|| {
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
- ty::PredicateKind::Trait(t) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(t)) => {
let t = bound_predicate.rebind(t);
debug_assert!(!t.has_escaping_bound_vars());
- let obligation = obligation.with(t);
+ let obligation = obligation.with(self.tcx(), t);
self.evaluate_trait_predicate_recursively(previous_stack, obligation)
}
@@ -565,7 +750,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
- ty::PredicateKind::TypeOutlives(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(pred)) => {
// A global type with no late-bound regions can only
// contain the "'static" lifetime (any other lifetime
// would either be late-bound or local), so it is guaranteed
@@ -577,7 +762,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
- ty::PredicateKind::RegionOutlives(..) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) => {
// We do not consider region relationships when evaluating trait matches.
Ok(EvaluatedToOkModuloRegions)
}
@@ -590,9 +775,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
- ty::PredicateKind::Projection(data) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(data)) => {
let data = bound_predicate.rebind(data);
- let project_obligation = obligation.with(data);
+ let project_obligation = obligation.with(self.tcx(), data);
match project::poly_project_and_unify_type(self, &project_obligation) {
ProjectAndUnifyResult::Holds(mut subobligations) => {
'compute_res: {
@@ -676,21 +861,62 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
ty::PredicateKind::ConstEquate(c1, c2) => {
+ let tcx = self.tcx();
assert!(
- self.tcx().features().generic_const_exprs,
+ tcx.features().generic_const_exprs,
"`ConstEquate` without a feature gate: {c1:?} {c2:?}",
);
- debug!(?c1, ?c2, "evaluate_predicate_recursively: equating consts");
- // FIXME: we probably should only try to unify abstract constants
- // if the constants depend on generic parameters.
- //
- // Let's just see where this breaks :shrug:
- if let (ty::ConstKind::Unevaluated(a), ty::ConstKind::Unevaluated(b)) =
- (c1.kind(), c2.kind())
{
- if self.infcx.try_unify_abstract_consts(a, b, obligation.param_env) {
- return Ok(EvaluatedToOk);
+ let c1 = tcx.expand_abstract_consts(c1);
+ let c2 = tcx.expand_abstract_consts(c2);
+ debug!(
+ "evalaute_predicate_recursively: equating consts:\nc1= {:?}\nc2= {:?}",
+ c1, c2
+ );
+
+ use rustc_hir::def::DefKind;
+ use ty::ConstKind::Unevaluated;
+ match (c1.kind(), c2.kind()) {
+ (Unevaluated(a), Unevaluated(b))
+ if a.def.did == b.def.did
+ && tcx.def_kind(a.def.did) == DefKind::AssocConst =>
+ {
+ if let Ok(new_obligations) = self
+ .infcx
+ .at(&obligation.cause, obligation.param_env)
+ .trace(c1, c2)
+ .eq(a.substs, b.substs)
+ {
+ let mut obligations = new_obligations.obligations;
+ self.add_depth(
+ obligations.iter_mut(),
+ obligation.recursion_depth,
+ );
+ return self.evaluate_predicates_recursively(
+ previous_stack,
+ obligations.into_iter(),
+ );
+ }
+ }
+ (_, Unevaluated(_)) | (Unevaluated(_), _) => (),
+ (_, _) => {
+ if let Ok(new_obligations) = self
+ .infcx
+ .at(&obligation.cause, obligation.param_env)
+ .eq(c1, c2)
+ {
+ let mut obligations = new_obligations.obligations;
+ self.add_depth(
+ obligations.iter_mut(),
+ obligation.recursion_depth,
+ );
+ return self.evaluate_predicates_recursively(
+ previous_stack,
+ obligations.into_iter(),
+ );
+ }
+ }
}
}
@@ -712,23 +938,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
match (evaluate(c1), evaluate(c2)) {
(Ok(c1), Ok(c2)) => {
- match self
- .infcx()
- .at(&obligation.cause, obligation.param_env)
- .eq(c1, c2)
+ match self.infcx.at(&obligation.cause, obligation.param_env).eq(c1, c2)
{
- Ok(_) => Ok(EvaluatedToOk),
+ Ok(inf_ok) => self.evaluate_predicates_recursively(
+ previous_stack,
+ inf_ok.into_obligations(),
+ ),
Err(_) => Ok(EvaluatedToErr),
}
}
(Err(ErrorHandled::Reported(_)), _)
| (_, Err(ErrorHandled::Reported(_))) => Ok(EvaluatedToErr),
- (Err(ErrorHandled::Linted), _) | (_, Err(ErrorHandled::Linted)) => {
- span_bug!(
- obligation.cause.span(),
- "ConstEquate: const_eval_resolve returned an unexpected error"
- )
- }
(Err(ErrorHandled::TooGeneric), _) | (_, Err(ErrorHandled::TooGeneric)) => {
if c1.has_non_region_infer() || c2.has_non_region_infer() {
Ok(EvaluatedToAmbig)
@@ -742,6 +962,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::PredicateKind::TypeWellFormedFromEnv(..) => {
bug!("TypeWellFormedFromEnv is only used for chalk")
}
+ ty::PredicateKind::Ambiguous => Ok(EvaluatedToAmbig),
}
})
}
@@ -752,7 +973,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
previous_stack: TraitObligationStackList<'o, 'tcx>,
mut obligation: TraitObligation<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
- if !self.intercrate
+ if !self.is_intercrate()
&& obligation.is_global()
&& obligation.param_env.caller_bounds().iter().all(|bound| bound.needs_subst())
{
@@ -931,7 +1152,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
match self.candidate_from_obligation(stack) {
Ok(Some(c)) => self.evaluate_candidate(stack, &c),
- Err(SelectionError::Ambiguous(_)) => Ok(EvaluatedToAmbig),
Ok(None) => Ok(EvaluatedToAmbig),
Err(Overflow(OverflowError::Canonical)) => Err(OverflowError::Canonical),
Err(ErrorReporting) => Err(OverflowError::ErrorReporting),
@@ -956,7 +1176,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
fn coinductive_predicate(&self, predicate: ty::Predicate<'tcx>) -> bool {
let result = match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(ref data) => self.tcx().trait_is_auto(data.def_id()),
+ ty::PredicateKind::Clause(ty::Clause::Trait(ref data)) => {
+ self.tcx().trait_is_coinductive(data.def_id())
+ }
ty::PredicateKind::WellFormed(_) => true,
_ => false,
};
@@ -1016,7 +1238,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// mode, so don't do any caching. In particular, we might
// re-use the same `InferCtxt` with both an intercrate
// and non-intercrate `SelectionContext`
- if self.intercrate {
+ if self.is_intercrate() {
return None;
}
@@ -1046,7 +1268,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// mode, so don't do any caching. In particular, we might
// re-use the same `InferCtxt` with both an intercrate
// and non-intercrate `SelectionContext`
- if self.intercrate {
+ if self.is_intercrate() {
return;
}
@@ -1084,20 +1306,21 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
it.for_each(|o| o.recursion_depth = cmp::max(min_depth, o.recursion_depth) + 1);
}
- fn check_recursion_depth<T: Display + TypeFoldable<'tcx>>(
+ fn check_recursion_depth<T>(
&self,
depth: usize,
error_obligation: &Obligation<'tcx, T>,
- ) -> Result<(), OverflowError> {
+ ) -> Result<(), OverflowError>
+ where
+ T: ToPredicate<'tcx> + Clone,
+ {
if !self.infcx.tcx.recursion_limit().value_within_limit(depth) {
match self.query_mode {
TraitQueryMode::Standard => {
- if self.infcx.is_tainted_by_errors() {
- return Err(OverflowError::Error(
- ErrorGuaranteed::unchecked_claim_error_was_emitted(),
- ));
+ if let Some(e) = self.infcx.tainted_by_errors() {
+ return Err(OverflowError::Error(e));
}
- self.infcx.err_ctxt().report_overflow_error(error_obligation, true);
+ self.infcx.err_ctxt().report_overflow_obligation(error_obligation, true);
}
TraitQueryMode::Canonical => {
return Err(OverflowError::Canonical);
@@ -1112,11 +1335,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
/// The weird return type of this function allows it to be used with the `try` (`?`)
/// operator within certain functions.
#[inline(always)]
- fn check_recursion_limit<T: Display + TypeFoldable<'tcx>, V: Display + TypeFoldable<'tcx>>(
+ fn check_recursion_limit<T: Display + TypeFoldable<'tcx>, V>(
&self,
obligation: &Obligation<'tcx, T>,
error_obligation: &Obligation<'tcx, V>,
- ) -> Result<(), OverflowError> {
+ ) -> Result<(), OverflowError>
+ where
+ V: ToPredicate<'tcx> + Clone,
+ {
self.check_recursion_depth(obligation.recursion_depth, error_obligation)
}
@@ -1132,12 +1358,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
/// filter_impls filters constant trait obligations and candidates that have a positive impl
/// for a negative goal and a negative impl for a positive goal
- #[instrument(level = "debug", skip(self))]
+ #[instrument(level = "debug", skip(self, candidates))]
fn filter_impls(
&mut self,
candidates: Vec<SelectionCandidate<'tcx>>,
obligation: &TraitObligation<'tcx>,
) -> Vec<SelectionCandidate<'tcx>> {
+ trace!("{candidates:#?}");
let tcx = self.tcx();
let mut result = Vec::with_capacity(candidates.len());
@@ -1153,9 +1380,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ProjectionCandidate(_, ty::BoundConstness::ConstIfConst) => {}
// auto trait impl
AutoImplCandidate => {}
- // generator, this will raise error in other places
+ // generator / future, this will raise error in other places
// or ignore error with const_async_blocks feature
GeneratorCandidate => {}
+ FutureCandidate => {}
// FnDef where the function is const
FnPointerCandidate { is_const: true } => {}
ConstDestructCandidate(_) => {}
@@ -1177,6 +1405,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
+ trace!("{result:#?}");
result
}
@@ -1215,14 +1444,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
fn is_knowable<'o>(&mut self, stack: &TraitObligationStack<'o, 'tcx>) -> Result<(), Conflict> {
- debug!("is_knowable(intercrate={:?})", self.intercrate);
+ debug!("is_knowable(intercrate={:?})", self.is_intercrate());
- if !self.intercrate || stack.obligation.polarity() == ty::ImplPolarity::Negative {
+ if !self.is_intercrate() || stack.obligation.polarity() == ty::ImplPolarity::Negative {
return Ok(());
}
let obligation = &stack.obligation;
- let predicate = self.infcx().resolve_vars_if_possible(obligation.predicate);
+ let predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
// Okay to skip binder because of the nature of the
// trait-ref-is-knowable check, which does not care about
@@ -1248,7 +1477,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// the master cache. Since coherence executes pretty quickly,
// it's not worth going to more trouble to increase the
// hit-rate, I don't think.
- if self.intercrate {
+ if self.is_intercrate() {
return false;
}
@@ -1265,7 +1494,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// mode, so don't do any caching. In particular, we might
// re-use the same `InferCtxt` with both an intercrate
// and non-intercrate `SelectionContext`
- if self.intercrate {
+ if self.is_intercrate() {
return None;
}
let tcx = self.tcx();
@@ -1304,7 +1533,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// mode, so don't do any caching. In particular, we might
// re-use the same `InferCtxt` with both an intercrate
// and non-intercrate `SelectionContext`
- if self.intercrate {
+ if self.is_intercrate() {
return false;
}
match result {
@@ -1359,9 +1588,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
&mut self,
obligation: &TraitObligation<'tcx>,
) -> smallvec::SmallVec<[(usize, ty::BoundConstness); 2]> {
- let poly_trait_predicate = self.infcx().resolve_vars_if_possible(obligation.predicate);
+ let poly_trait_predicate = self.infcx.resolve_vars_if_possible(obligation.predicate);
let placeholder_trait_predicate =
- self.infcx().replace_bound_vars_with_placeholders(poly_trait_predicate);
+ self.infcx.replace_bound_vars_with_placeholders(poly_trait_predicate);
debug!(?placeholder_trait_predicate);
let tcx = self.infcx.tcx;
@@ -1389,7 +1618,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
.enumerate()
.filter_map(|(idx, bound)| {
let bound_predicate = bound.kind();
- if let ty::PredicateKind::Trait(pred) = bound_predicate.skip_binder() {
+ if let ty::PredicateKind::Clause(ty::Clause::Trait(pred)) =
+ bound_predicate.skip_binder()
+ {
let bound = bound_predicate.rebind(pred.trait_ref);
if self.infcx.probe(|_| {
match self.match_normalize_trait_ref(
@@ -1581,20 +1812,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
(TransmutabilityCandidate, _) | (_, TransmutabilityCandidate) => false,
// (*)
- (
- BuiltinCandidate { has_nested: false }
- | DiscriminantKindCandidate
- | PointeeCandidate
- | ConstDestructCandidate(_),
- _,
- ) => true,
- (
- _,
- BuiltinCandidate { has_nested: false }
- | DiscriminantKindCandidate
- | PointeeCandidate
- | ConstDestructCandidate(_),
- ) => false,
+ (BuiltinCandidate { has_nested: false } | ConstDestructCandidate(_), _) => true,
+ (_, BuiltinCandidate { has_nested: false } | ConstDestructCandidate(_)) => false,
(ParamCandidate(other), ParamCandidate(victim)) => {
let same_except_bound_vars = other.skip_binder().trait_ref
@@ -1633,6 +1852,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplCandidate(..)
| ClosureCandidate
| GeneratorCandidate
+ | FutureCandidate
| FnPointerCandidate { .. }
| BuiltinObjectCandidate
| BuiltinUnsizeCandidate
@@ -1651,6 +1871,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplCandidate(_)
| ClosureCandidate
| GeneratorCandidate
+ | FutureCandidate
| FnPointerCandidate { .. }
| BuiltinObjectCandidate
| BuiltinUnsizeCandidate
@@ -1681,6 +1902,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplCandidate(..)
| ClosureCandidate
| GeneratorCandidate
+ | FutureCandidate
| FnPointerCandidate { .. }
| BuiltinObjectCandidate
| BuiltinUnsizeCandidate
@@ -1693,6 +1915,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplCandidate(..)
| ClosureCandidate
| GeneratorCandidate
+ | FutureCandidate
| FnPointerCandidate { .. }
| BuiltinObjectCandidate
| BuiltinUnsizeCandidate
@@ -1774,6 +1997,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplCandidate(_)
| ClosureCandidate
| GeneratorCandidate
+ | FutureCandidate
| FnPointerCandidate { .. }
| BuiltinObjectCandidate
| BuiltinUnsizeCandidate
@@ -1783,6 +2007,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ImplCandidate(_)
| ClosureCandidate
| GeneratorCandidate
+ | FutureCandidate
| FnPointerCandidate { .. }
| BuiltinObjectCandidate
| BuiltinUnsizeCandidate
@@ -1973,6 +2198,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
/// Bar<i32> where struct Bar<T> { x: T, y: u32 } -> [i32, u32]
/// Zed<i32> where enum Zed { A(T), B(u32) } -> [i32, u32]
/// ```
+ #[instrument(level = "debug", skip(self), ret)]
fn constituent_types_for_ty(
&self,
t: ty::Binder<'tcx, Ty<'tcx>>,
@@ -2089,8 +2315,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
cause.clone(),
trait_def_id,
recursion_depth,
- normalized_ty,
- &[],
+ [normalized_ty],
);
obligations.push(placeholder_obligation);
obligations
@@ -2117,6 +2342,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
match self.match_impl(impl_def_id, impl_trait_ref, obligation) {
Ok(substs) => substs,
Err(()) => {
+ // FIXME: A rematch may fail when a candidate cache hit occurs
+ // on thefreshened form of the trait predicate, but the match
+ // fails for some reason that is not captured in the freshened
+ // cache key. For example, equating an impl trait ref against
+ // the placeholder trait ref may fail due the Generalizer relation
+ // raising a CyclicalTy error due to a sub_root_var relation
+ // for a variable being generalized...
self.infcx.tcx.sess.delay_span_bug(
obligation.cause.span,
&format!(
@@ -2145,7 +2377,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation: &TraitObligation<'tcx>,
) -> Result<Normalized<'tcx, SubstsRef<'tcx>>, ()> {
let placeholder_obligation =
- self.infcx().replace_bound_vars_with_placeholders(obligation.predicate);
+ self.infcx.replace_bound_vars_with_placeholders(obligation.predicate);
let placeholder_obligation_trait_ref = placeholder_obligation.trait_ref;
let impl_substs = self.infcx.fresh_substs_for_item(obligation.cause.span, impl_def_id);
@@ -2181,7 +2413,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
.map_err(|e| debug!("match_impl: failed eq_trait_refs due to `{e}`"))?;
nested_obligations.extend(obligations);
- if !self.intercrate
+ if !self.is_intercrate()
&& self.tcx().impl_polarity(impl_def_id) == ty::ImplPolarity::Reservation
{
debug!("reservation impls only apply in intercrate mode");
@@ -2277,43 +2509,25 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!(?closure_sig);
- // (1) Feels icky to skip the binder here, but OTOH we know
- // that the self-type is an unboxed closure type and hence is
+ // NOTE: The self-type is an unboxed closure type and hence is
// in fact unparameterized (or at least does not reference any
- // regions bound in the obligation). Still probably some
- // refactoring could make this nicer.
+ // regions bound in the obligation).
+ let self_ty = obligation
+ .predicate
+ .self_ty()
+ .no_bound_vars()
+ .expect("unboxed closure type should not capture bound vars from the predicate");
+
closure_trait_ref_and_return_type(
self.tcx(),
obligation.predicate.def_id(),
- obligation.predicate.skip_binder().self_ty(), // (1)
+ self_ty,
closure_sig,
util::TupleArgumentsFlag::No,
)
.map_bound(|(trait_ref, _)| trait_ref)
}
- fn generator_trait_ref_unnormalized(
- &mut self,
- obligation: &TraitObligation<'tcx>,
- substs: SubstsRef<'tcx>,
- ) -> ty::PolyTraitRef<'tcx> {
- let gen_sig = substs.as_generator().poly_sig();
-
- // (1) Feels icky to skip the binder here, but OTOH we know
- // that the self-type is an generator type and hence is
- // in fact unparameterized (or at least does not reference any
- // regions bound in the obligation). Still probably some
- // refactoring could make this nicer.
-
- super::util::generator_trait_ref_and_outputs(
- self.tcx(),
- obligation.predicate.def_id(),
- obligation.predicate.skip_binder().self_ty(), // (1)
- gen_sig,
- )
- .map_bound(|(trait_ref, ..)| trait_ref)
- }
-
/// Returns the obligations that are implied by instantiating an
/// impl or trait. The obligations are substituted and fully
/// normalized. This is used when confirming an impl or default
diff --git a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs
index c89165858..a251a508b 100644
--- a/compiler/rustc_trait_selection/src/traits/specialize/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/specialize/mod.rs
@@ -15,11 +15,13 @@ use specialization_graph::GraphExt;
use crate::errors::NegativePositiveConflict;
use crate::infer::{InferCtxt, InferOk, TyCtxtInferExt};
use crate::traits::select::IntercrateAmbiguityCause;
-use crate::traits::{self, coherence, FutureCompatOverlapErrorKind, ObligationCause};
-use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
-use rustc_errors::{struct_span_err, DiagnosticBuilder, EmissionGuarantee};
+use crate::traits::{
+ self, coherence, FutureCompatOverlapErrorKind, ObligationCause, ObligationCtxt,
+};
+use rustc_data_structures::fx::FxIndexSet;
+use rustc_errors::{error_code, DelayDm, Diagnostic};
use rustc_hir::def_id::{DefId, LocalDefId};
-use rustc_middle::ty::{self, ImplSubject, TyCtxt};
+use rustc_middle::ty::{self, ImplSubject, Ty, TyCtxt};
use rustc_middle::ty::{InternalSubsts, SubstsRef};
use rustc_session::lint::builtin::COHERENCE_LEAK_CHECK;
use rustc_session::lint::builtin::ORDER_DEPENDENT_TRAIT_OBJECTS;
@@ -30,10 +32,10 @@ use super::SelectionContext;
/// Information pertinent to an overlapping impl error.
#[derive(Debug)]
-pub struct OverlapError {
+pub struct OverlapError<'tcx> {
pub with_impl: DefId,
- pub trait_desc: String,
- pub self_desc: Option<String>,
+ pub trait_ref: ty::TraitRef<'tcx>,
+ pub self_ty: Option<Ty<'tcx>>,
pub intercrate_ambiguity_causes: FxIndexSet<IntercrateAmbiguityCause>,
pub involves_placeholder: bool,
}
@@ -200,39 +202,35 @@ fn fulfill_implication<'tcx>(
return Err(());
};
+ // Needs to be `in_snapshot` because this function is used to rebase
+ // substitutions, which may happen inside of a select within a probe.
+ let ocx = ObligationCtxt::new_in_snapshot(infcx);
// attempt to prove all of the predicates for impl2 given those for impl1
// (which are packed up in penv)
+ ocx.register_obligations(obligations.chain(more_obligations));
- infcx.save_and_restore_in_snapshot_flag(|infcx| {
- let errors = traits::fully_solve_obligations(&infcx, obligations.chain(more_obligations));
- match &errors[..] {
- [] => {
- debug!(
- "fulfill_implication: an impl for {:?} specializes {:?}",
- source_trait, target_trait
- );
+ let errors = ocx.select_all_or_error();
+ if !errors.is_empty() {
+ // no dice!
+ debug!(
+ "fulfill_implication: for impls on {:?} and {:?}, \
+ could not fulfill: {:?} given {:?}",
+ source_trait,
+ target_trait,
+ errors,
+ param_env.caller_bounds()
+ );
+ return Err(());
+ }
- // Now resolve the *substitution* we built for the target earlier, replacing
- // the inference variables inside with whatever we got from fulfillment.
- Ok(infcx.resolve_vars_if_possible(target_substs))
- }
- errors => {
- // no dice!
- debug!(
- "fulfill_implication: for impls on {:?} and {:?}, \
- could not fulfill: {:?} given {:?}",
- source_trait,
- target_trait,
- errors,
- param_env.caller_bounds()
- );
- Err(())
- }
- }
- })
+ debug!("fulfill_implication: an impl for {:?} specializes {:?}", source_trait, target_trait);
+
+ // Now resolve the *substitution* we built for the target earlier, replacing
+ // the inference variables inside with whatever we got from fulfillment.
+ Ok(infcx.resolve_vars_if_possible(target_substs))
}
-// Query provider for `specialization_graph_of`.
+/// Query provider for `specialization_graph_of`.
pub(super) fn specialization_graph_provider(
tcx: TyCtxt<'_>,
trait_id: DefId,
@@ -277,9 +275,9 @@ pub(super) fn specialization_graph_provider(
// it negatively impacts perf.
#[cold]
#[inline(never)]
-fn report_overlap_conflict(
- tcx: TyCtxt<'_>,
- overlap: OverlapError,
+fn report_overlap_conflict<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ overlap: OverlapError<'tcx>,
impl_def_id: LocalDefId,
used_to_be_allowed: Option<FutureCompatOverlapErrorKind>,
sg: &mut specialization_graph::Graph,
@@ -315,9 +313,9 @@ fn report_overlap_conflict(
}
}
-fn report_negative_positive_conflict(
- tcx: TyCtxt<'_>,
- overlap: &OverlapError,
+fn report_negative_positive_conflict<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ overlap: &OverlapError<'tcx>,
local_impl_def_id: LocalDefId,
negative_impl_def_id: DefId,
positive_impl_def_id: DefId,
@@ -325,17 +323,17 @@ fn report_negative_positive_conflict(
) {
let mut err = tcx.sess.create_err(NegativePositiveConflict {
impl_span: tcx.def_span(local_impl_def_id),
- trait_desc: &overlap.trait_desc,
- self_desc: &overlap.self_desc,
+ trait_desc: overlap.trait_ref,
+ self_ty: overlap.self_ty,
negative_impl_span: tcx.span_of_impl(negative_impl_def_id),
positive_impl_span: tcx.span_of_impl(positive_impl_def_id),
});
sg.has_errored = Some(err.emit());
}
-fn report_conflicting_impls(
- tcx: TyCtxt<'_>,
- overlap: OverlapError,
+fn report_conflicting_impls<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ overlap: OverlapError<'tcx>,
impl_def_id: LocalDefId,
used_to_be_allowed: Option<FutureCompatOverlapErrorKind>,
sg: &mut specialization_graph::Graph,
@@ -345,12 +343,12 @@ fn report_conflicting_impls(
// Work to be done after we've built the DiagnosticBuilder. We have to define it
// now because the struct_lint methods don't return back the DiagnosticBuilder
// that's passed in.
- fn decorate<'a, 'b, G: EmissionGuarantee>(
- tcx: TyCtxt<'_>,
- overlap: OverlapError,
+ fn decorate<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ overlap: &OverlapError<'tcx>,
impl_span: Span,
- err: &'b mut DiagnosticBuilder<'a, G>,
- ) -> &'b mut DiagnosticBuilder<'a, G> {
+ err: &mut Diagnostic,
+ ) {
match tcx.span_of_impl(overlap.with_impl) {
Ok(span) => {
err.span_label(span, "first implementation here");
@@ -359,7 +357,7 @@ fn report_conflicting_impls(
impl_span,
format!(
"conflicting implementation{}",
- overlap.self_desc.map_or_else(String::new, |ty| format!(" for `{}`", ty))
+ overlap.self_ty.map_or_else(String::new, |ty| format!(" for `{}`", ty))
),
);
}
@@ -381,26 +379,28 @@ fn report_conflicting_impls(
if overlap.involves_placeholder {
coherence::add_placeholder_note(err);
}
- err
}
- let msg = format!(
- "conflicting implementations of trait `{}`{}{}",
- overlap.trait_desc,
- overlap.self_desc.as_deref().map_or_else(String::new, |ty| format!(" for type `{ty}`")),
- match used_to_be_allowed {
- Some(FutureCompatOverlapErrorKind::Issue33140) => ": (E0119)",
- _ => "",
- }
- );
+ let msg = DelayDm(|| {
+ format!(
+ "conflicting implementations of trait `{}`{}{}",
+ overlap.trait_ref.print_only_trait_path(),
+ overlap.self_ty.map_or_else(String::new, |ty| format!(" for type `{ty}`")),
+ match used_to_be_allowed {
+ Some(FutureCompatOverlapErrorKind::Issue33140) => ": (E0119)",
+ _ => "",
+ }
+ )
+ });
match used_to_be_allowed {
None => {
let reported = if overlap.with_impl.is_local()
|| tcx.orphan_check_impl(impl_def_id).is_ok()
{
- let mut err = struct_span_err!(tcx.sess, impl_span, E0119, "{msg}",);
- decorate(tcx, overlap, impl_span, &mut err);
+ let mut err = tcx.sess.struct_span_err(impl_span, msg);
+ err.code(error_code!(E0119));
+ decorate(tcx, &overlap, impl_span, &mut err);
Some(err.emit())
} else {
Some(tcx.sess.delay_span_bug(impl_span, "impl should have failed the orphan check"))
@@ -417,7 +417,10 @@ fn report_conflicting_impls(
tcx.hir().local_def_id_to_hir_id(impl_def_id),
impl_span,
msg,
- |err| decorate(tcx, overlap, impl_span, err),
+ |err| {
+ decorate(tcx, &overlap, impl_span, err);
+ err
+ },
);
}
};
@@ -435,7 +438,7 @@ pub(crate) fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Opti
// FIXME: Currently only handles ?Sized.
// Needs to support ?Move and ?DynSized when they are implemented.
- let mut types_without_default_bounds = FxHashSet::default();
+ let mut types_without_default_bounds = FxIndexSet::default();
let sized_trait = tcx.lang_items().sized_trait();
if !substs.is_empty() {
@@ -473,7 +476,9 @@ pub(crate) fn to_pretty_impl_header(tcx: TyCtxt<'_>, impl_def_id: DefId) -> Opti
trait_pred
});
- p = tcx.mk_predicate(new_trait_pred.map_bound(ty::PredicateKind::Trait))
+ p = tcx.mk_predicate(
+ new_trait_pred.map_bound(|p| ty::PredicateKind::Clause(ty::Clause::Trait(p))),
+ )
}
}
pretty_predicates.push(p.to_string());
diff --git a/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs b/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs
index 63f89a33e..4546c9533 100644
--- a/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs
+++ b/compiler/rustc_trait_selection/src/traits/specialize/specialization_graph.rs
@@ -3,7 +3,6 @@ use super::OverlapError;
use crate::traits;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::fast_reject::{self, SimplifiedType, TreatParams};
-use rustc_middle::ty::print::with_no_trimmed_paths;
use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
pub use rustc_middle::traits::specialization_graph::*;
@@ -15,15 +14,15 @@ pub enum FutureCompatOverlapErrorKind {
}
#[derive(Debug)]
-pub struct FutureCompatOverlapError {
- pub error: OverlapError,
+pub struct FutureCompatOverlapError<'tcx> {
+ pub error: OverlapError<'tcx>,
pub kind: FutureCompatOverlapErrorKind,
}
/// The result of attempting to insert an impl into a group of children.
-enum Inserted {
+enum Inserted<'tcx> {
/// The impl was inserted as a new child in this group of children.
- BecameNewSibling(Option<FutureCompatOverlapError>),
+ BecameNewSibling(Option<FutureCompatOverlapError<'tcx>>),
/// The impl should replace existing impls [X1, ..], because the impl specializes X1, X2, etc.
ReplaceChildren(Vec<DefId>),
@@ -42,12 +41,12 @@ trait ChildrenExt<'tcx> {
impl_def_id: DefId,
simplified_self: Option<SimplifiedType>,
overlap_mode: OverlapMode,
- ) -> Result<Inserted, OverlapError>;
+ ) -> Result<Inserted<'tcx>, OverlapError<'tcx>>;
}
-impl ChildrenExt<'_> for Children {
+impl<'tcx> ChildrenExt<'tcx> for Children {
/// Insert an impl into this set of children without comparing to any existing impls.
- fn insert_blindly(&mut self, tcx: TyCtxt<'_>, impl_def_id: DefId) {
+ fn insert_blindly(&mut self, tcx: TyCtxt<'tcx>, impl_def_id: DefId) {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), TreatParams::AsInfer)
{
@@ -62,7 +61,7 @@ impl ChildrenExt<'_> for Children {
/// Removes an impl from this set of children. Used when replacing
/// an impl with a parent. The impl must be present in the list of
/// children already.
- fn remove_existing(&mut self, tcx: TyCtxt<'_>, impl_def_id: DefId) {
+ fn remove_existing(&mut self, tcx: TyCtxt<'tcx>, impl_def_id: DefId) {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
let vec: &mut Vec<DefId>;
if let Some(st) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), TreatParams::AsInfer)
@@ -82,11 +81,11 @@ impl ChildrenExt<'_> for Children {
/// specialization relationships.
fn insert(
&mut self,
- tcx: TyCtxt<'_>,
+ tcx: TyCtxt<'tcx>,
impl_def_id: DefId,
simplified_self: Option<SimplifiedType>,
overlap_mode: OverlapMode,
- ) -> Result<Inserted, OverlapError> {
+ ) -> Result<Inserted<'tcx>, OverlapError<'tcx>> {
let mut last_lint = None;
let mut replace_children = Vec::new();
@@ -103,30 +102,23 @@ impl ChildrenExt<'_> for Children {
impl_def_id, simplified_self, possible_sibling,
);
- let create_overlap_error = |overlap: traits::coherence::OverlapResult<'_>| {
+ let create_overlap_error = |overlap: traits::coherence::OverlapResult<'tcx>| {
let trait_ref = overlap.impl_header.trait_ref.unwrap();
let self_ty = trait_ref.self_ty();
- // FIXME: should postpone string formatting until we decide to actually emit.
- with_no_trimmed_paths!({
- OverlapError {
- with_impl: possible_sibling,
- trait_desc: trait_ref.print_only_trait_path().to_string(),
- // Only report the `Self` type if it has at least
- // some outer concrete shell; otherwise, it's
- // not adding much information.
- self_desc: if self_ty.has_concrete_skeleton() {
- Some(self_ty.to_string())
- } else {
- None
- },
- intercrate_ambiguity_causes: overlap.intercrate_ambiguity_causes,
- involves_placeholder: overlap.involves_placeholder,
- }
- })
+ OverlapError {
+ with_impl: possible_sibling,
+ trait_ref,
+ // Only report the `Self` type if it has at least
+ // some outer concrete shell; otherwise, it's
+ // not adding much information.
+ self_ty: if self_ty.has_concrete_skeleton() { Some(self_ty) } else { None },
+ intercrate_ambiguity_causes: overlap.intercrate_ambiguity_causes,
+ involves_placeholder: overlap.involves_placeholder,
+ }
};
- let report_overlap_error = |overlap: traits::coherence::OverlapResult<'_>,
+ let report_overlap_error = |overlap: traits::coherence::OverlapResult<'tcx>,
last_lint: &mut _| {
// Found overlap, but no specialization; error out or report future-compat warning.
@@ -255,31 +247,31 @@ where
}
}
-pub trait GraphExt {
+pub trait GraphExt<'tcx> {
/// Insert a local impl into the specialization graph. If an existing impl
/// conflicts with it (has overlap, but neither specializes the other),
/// information about the area of overlap is returned in the `Err`.
fn insert(
&mut self,
- tcx: TyCtxt<'_>,
+ tcx: TyCtxt<'tcx>,
impl_def_id: DefId,
overlap_mode: OverlapMode,
- ) -> Result<Option<FutureCompatOverlapError>, OverlapError>;
+ ) -> Result<Option<FutureCompatOverlapError<'tcx>>, OverlapError<'tcx>>;
/// Insert cached metadata mapping from a child impl back to its parent.
- fn record_impl_from_cstore(&mut self, tcx: TyCtxt<'_>, parent: DefId, child: DefId);
+ fn record_impl_from_cstore(&mut self, tcx: TyCtxt<'tcx>, parent: DefId, child: DefId);
}
-impl GraphExt for Graph {
+impl<'tcx> GraphExt<'tcx> for Graph {
/// Insert a local impl into the specialization graph. If an existing impl
/// conflicts with it (has overlap, but neither specializes the other),
/// information about the area of overlap is returned in the `Err`.
fn insert(
&mut self,
- tcx: TyCtxt<'_>,
+ tcx: TyCtxt<'tcx>,
impl_def_id: DefId,
overlap_mode: OverlapMode,
- ) -> Result<Option<FutureCompatOverlapError>, OverlapError> {
+ ) -> Result<Option<FutureCompatOverlapError<'tcx>>, OverlapError<'tcx>> {
assert!(impl_def_id.is_local());
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
@@ -376,7 +368,7 @@ impl GraphExt for Graph {
}
/// Insert cached metadata mapping from a child impl back to its parent.
- fn record_impl_from_cstore(&mut self, tcx: TyCtxt<'_>, parent: DefId, child: DefId) {
+ fn record_impl_from_cstore(&mut self, tcx: TyCtxt<'tcx>, parent: DefId, child: DefId) {
if self.parent.insert(child, parent).is_some() {
bug!(
"When recording an impl from the crate store, information about its parent \
diff --git a/compiler/rustc_trait_selection/src/traits/structural_match.rs b/compiler/rustc_trait_selection/src/traits/structural_match.rs
index 932dbbb81..4dc08e0f9 100644
--- a/compiler/rustc_trait_selection/src/traits/structural_match.rs
+++ b/compiler/rustc_trait_selection/src/traits/structural_match.rs
@@ -1,11 +1,5 @@
-use crate::infer::{InferCtxt, TyCtxtInferExt};
-use crate::traits::ObligationCause;
-use crate::traits::{TraitEngine, TraitEngineExt};
-
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
-use rustc_hir::lang_items::LangItem;
-use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor};
use rustc_span::Span;
use std::ops::ControlFlow;
@@ -60,53 +54,6 @@ pub fn search_for_adt_const_param_violation<'tcx>(
.break_value()
}
-/// This method returns true if and only if `adt_ty` itself has been marked as
-/// eligible for structural-match: namely, if it implements both
-/// `StructuralPartialEq` and `StructuralEq` (which are respectively injected by
-/// `#[derive(PartialEq)]` and `#[derive(Eq)]`).
-///
-/// Note that this does *not* recursively check if the substructure of `adt_ty`
-/// implements the traits.
-fn type_marked_structural<'tcx>(
- infcx: &InferCtxt<'tcx>,
- adt_ty: Ty<'tcx>,
- cause: ObligationCause<'tcx>,
-) -> bool {
- let mut fulfillment_cx = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
- // require `#[derive(PartialEq)]`
- let structural_peq_def_id =
- infcx.tcx.require_lang_item(LangItem::StructuralPeq, Some(cause.span));
- fulfillment_cx.register_bound(
- infcx,
- ty::ParamEnv::empty(),
- adt_ty,
- structural_peq_def_id,
- cause.clone(),
- );
- // for now, require `#[derive(Eq)]`. (Doing so is a hack to work around
- // the type `for<'a> fn(&'a ())` failing to implement `Eq` itself.)
- let structural_teq_def_id =
- infcx.tcx.require_lang_item(LangItem::StructuralTeq, Some(cause.span));
- fulfillment_cx.register_bound(
- infcx,
- ty::ParamEnv::empty(),
- adt_ty,
- structural_teq_def_id,
- cause,
- );
-
- // We deliberately skip *reporting* fulfillment errors (via
- // `report_fulfillment_errors`), for two reasons:
- //
- // 1. The error messages would mention `std::marker::StructuralPartialEq`
- // (a trait which is solely meant as an implementation detail
- // for now), and
- //
- // 2. We are sometimes doing future-incompatibility lints for
- // now, so we do not want unconditional errors here.
- fulfillment_cx.select_all_or_error(infcx).is_empty()
-}
-
/// This implements the traversal over the structure of a given type to try to
/// find instances of ADTs (specifically structs or enums) that do not implement
/// the structural-match traits (`StructuralPartialEq` and `StructuralEq`).
@@ -262,11 +209,3 @@ impl<'tcx> TypeVisitor<'tcx> for Search<'tcx> {
})
}
}
-
-pub fn provide(providers: &mut Providers) {
- providers.has_structural_eq_impls = |tcx, ty| {
- let infcx = tcx.infer_ctxt().build();
- let cause = ObligationCause::dummy();
- type_marked_structural(&infcx, ty, cause)
- };
-}
diff --git a/compiler/rustc_trait_selection/src/traits/util.rs b/compiler/rustc_trait_selection/src/traits/util.rs
index ed47d2f83..f3ca6a6c7 100644
--- a/compiler/rustc_trait_selection/src/traits/util.rs
+++ b/compiler/rustc_trait_selection/src/traits/util.rs
@@ -8,7 +8,9 @@ use rustc_hir::def_id::DefId;
use rustc_middle::ty::{self, ImplSubject, ToPredicate, Ty, TyCtxt, TypeVisitable};
use rustc_middle::ty::{GenericArg, SubstsRef};
-use super::{Normalized, Obligation, ObligationCause, PredicateObligation, SelectionContext};
+use super::NormalizeExt;
+use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext};
+use rustc_infer::infer::InferOk;
pub use rustc_infer::traits::{self, util::*};
///////////////////////////////////////////////////////////////////////////
@@ -200,13 +202,13 @@ pub fn impl_subject_and_oblig<'a, 'tcx>(
) -> (ImplSubject<'tcx>, impl Iterator<Item = PredicateObligation<'tcx>>) {
let subject = selcx.tcx().bound_impl_subject(impl_def_id);
let subject = subject.subst(selcx.tcx(), impl_substs);
- let Normalized { value: subject, obligations: normalization_obligations1 } =
- super::normalize(selcx, param_env, ObligationCause::dummy(), subject);
+ let InferOk { value: subject, obligations: normalization_obligations1 } =
+ selcx.infcx.at(&ObligationCause::dummy(), param_env).normalize(subject);
let predicates = selcx.tcx().predicates_of(impl_def_id);
let predicates = predicates.instantiate(selcx.tcx(), impl_substs);
- let Normalized { value: predicates, obligations: normalization_obligations2 } =
- super::normalize(selcx, param_env, ObligationCause::dummy(), predicates);
+ let InferOk { value: predicates, obligations: normalization_obligations2 } =
+ selcx.infcx.at(&ObligationCause::dummy(), param_env).normalize(predicates);
let impl_obligations =
super::predicates_for_generics(|_, _| ObligationCause::dummy(), param_env, predicates);
@@ -238,11 +240,9 @@ pub fn predicate_for_trait_def<'tcx>(
cause: ObligationCause<'tcx>,
trait_def_id: DefId,
recursion_depth: usize,
- self_ty: Ty<'tcx>,
- params: &[GenericArg<'tcx>],
+ params: impl IntoIterator<Item = impl Into<GenericArg<'tcx>>>,
) -> PredicateObligation<'tcx> {
- let trait_ref =
- ty::TraitRef { def_id: trait_def_id, substs: tcx.mk_substs_trait(self_ty, params) };
+ let trait_ref = tcx.mk_trait_ref(trait_def_id, params);
predicate_for_trait_ref(tcx, cause, param_env, trait_ref, recursion_depth)
}
@@ -261,16 +261,6 @@ pub fn upcast_choices<'tcx>(
supertraits(tcx, source_trait_ref).filter(|r| r.def_id() == target_trait_def_id).collect()
}
-/// Given a trait `trait_ref`, returns the number of vtable entries
-/// that come from `trait_ref`, excluding its supertraits. Used in
-/// computing the vtable base for an upcast trait of a trait object.
-pub fn count_own_vtable_entries<'tcx>(
- tcx: TyCtxt<'tcx>,
- trait_ref: ty::PolyTraitRef<'tcx>,
-) -> usize {
- tcx.own_existential_vtable_entries(trait_ref.def_id()).len()
-}
-
/// Given an upcast trait object described by `object`, returns the
/// index of the method `method_def_id` (which should be part of
/// `object.upcast_trait_ref`) within the vtable for `object`.
@@ -300,15 +290,12 @@ pub fn closure_trait_ref_and_return_type<'tcx>(
sig: ty::PolyFnSig<'tcx>,
tuple_arguments: TupleArgumentsFlag,
) -> ty::Binder<'tcx, (ty::TraitRef<'tcx>, Ty<'tcx>)> {
+ assert!(!self_ty.has_escaping_bound_vars());
let arguments_tuple = match tuple_arguments {
TupleArgumentsFlag::No => sig.skip_binder().inputs()[0],
TupleArgumentsFlag::Yes => tcx.intern_tup(sig.skip_binder().inputs()),
};
- debug_assert!(!self_ty.has_escaping_bound_vars());
- let trait_ref = ty::TraitRef {
- def_id: fn_trait_def_id,
- substs: tcx.mk_substs_trait(self_ty, &[arguments_tuple.into()]),
- };
+ let trait_ref = tcx.mk_trait_ref(fn_trait_def_id, [self_ty, arguments_tuple]);
sig.map_bound(|sig| (trait_ref, sig.output()))
}
@@ -318,14 +305,22 @@ pub fn generator_trait_ref_and_outputs<'tcx>(
self_ty: Ty<'tcx>,
sig: ty::PolyGenSig<'tcx>,
) -> ty::Binder<'tcx, (ty::TraitRef<'tcx>, Ty<'tcx>, Ty<'tcx>)> {
- debug_assert!(!self_ty.has_escaping_bound_vars());
- let trait_ref = ty::TraitRef {
- def_id: fn_trait_def_id,
- substs: tcx.mk_substs_trait(self_ty, &[sig.skip_binder().resume_ty.into()]),
- };
+ assert!(!self_ty.has_escaping_bound_vars());
+ let trait_ref = tcx.mk_trait_ref(fn_trait_def_id, [self_ty, sig.skip_binder().resume_ty]);
sig.map_bound(|sig| (trait_ref, sig.yield_ty, sig.return_ty))
}
+pub fn future_trait_ref_and_outputs<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ fn_trait_def_id: DefId,
+ self_ty: Ty<'tcx>,
+ sig: ty::PolyGenSig<'tcx>,
+) -> ty::Binder<'tcx, (ty::TraitRef<'tcx>, Ty<'tcx>)> {
+ assert!(!self_ty.has_escaping_bound_vars());
+ let trait_ref = tcx.mk_trait_ref(fn_trait_def_id, [self_ty]);
+ sig.map_bound(|sig| (trait_ref, sig.return_ty))
+}
+
pub fn impl_item_is_final(tcx: TyCtxt<'_>, assoc_item: &ty::AssocItem) -> bool {
assoc_item.defaultness(tcx).is_final()
&& tcx.impl_defaultness(assoc_item.container_id(tcx)).is_final()
diff --git a/compiler/rustc_trait_selection/src/traits/vtable.rs b/compiler/rustc_trait_selection/src/traits/vtable.rs
new file mode 100644
index 000000000..41ce6cdf7
--- /dev/null
+++ b/compiler/rustc_trait_selection/src/traits/vtable.rs
@@ -0,0 +1,386 @@
+use crate::errors::DumpVTableEntries;
+use crate::traits::{impossible_predicates, is_vtable_safe_method};
+use rustc_hir::def_id::DefId;
+use rustc_hir::lang_items::LangItem;
+use rustc_infer::traits::util::PredicateSet;
+use rustc_infer::traits::ImplSource;
+use rustc_middle::ty::visit::TypeVisitable;
+use rustc_middle::ty::InternalSubsts;
+use rustc_middle::ty::{self, GenericParamDefKind, ToPredicate, Ty, TyCtxt, VtblEntry};
+use rustc_span::{sym, Span};
+use smallvec::SmallVec;
+
+use std::fmt::Debug;
+use std::ops::ControlFlow;
+
+#[derive(Clone, Debug)]
+pub(super) enum VtblSegment<'tcx> {
+ MetadataDSA,
+ TraitOwnEntries { trait_ref: ty::PolyTraitRef<'tcx>, emit_vptr: bool },
+}
+
+/// Prepare the segments for a vtable
+pub(super) fn prepare_vtable_segments<'tcx, T>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+ mut segment_visitor: impl FnMut(VtblSegment<'tcx>) -> ControlFlow<T>,
+) -> Option<T> {
+ // The following constraints holds for the final arrangement.
+ // 1. The whole virtual table of the first direct super trait is included as the
+ // the prefix. If this trait doesn't have any super traits, then this step
+ // consists of the dsa metadata.
+ // 2. Then comes the proper pointer metadata(vptr) and all own methods for all
+ // other super traits except those already included as part of the first
+ // direct super trait virtual table.
+ // 3. finally, the own methods of this trait.
+
+ // This has the advantage that trait upcasting to the first direct super trait on each level
+ // is zero cost, and to another trait includes only replacing the pointer with one level indirection,
+ // while not using too much extra memory.
+
+ // For a single inheritance relationship like this,
+ // D --> C --> B --> A
+ // The resulting vtable will consists of these segments:
+ // DSA, A, B, C, D
+
+ // For a multiple inheritance relationship like this,
+ // D --> C --> A
+ // \-> B
+ // The resulting vtable will consists of these segments:
+ // DSA, A, B, B-vptr, C, D
+
+ // For a diamond inheritance relationship like this,
+ // D --> B --> A
+ // \-> C -/
+ // The resulting vtable will consists of these segments:
+ // DSA, A, B, C, C-vptr, D
+
+ // For a more complex inheritance relationship like this:
+ // O --> G --> C --> A
+ // \ \ \-> B
+ // | |-> F --> D
+ // | \-> E
+ // |-> N --> J --> H
+ // \ \-> I
+ // |-> M --> K
+ // \-> L
+ // The resulting vtable will consists of these segments:
+ // DSA, A, B, B-vptr, C, D, D-vptr, E, E-vptr, F, F-vptr, G,
+ // H, H-vptr, I, I-vptr, J, J-vptr, K, K-vptr, L, L-vptr, M, M-vptr,
+ // N, N-vptr, O
+
+ // emit dsa segment first.
+ if let ControlFlow::Break(v) = (segment_visitor)(VtblSegment::MetadataDSA) {
+ return Some(v);
+ }
+
+ let mut emit_vptr_on_new_entry = false;
+ let mut visited = PredicateSet::new(tcx);
+ let predicate = trait_ref.without_const().to_predicate(tcx);
+ let mut stack: SmallVec<[(ty::PolyTraitRef<'tcx>, _, _); 5]> =
+ smallvec![(trait_ref, emit_vptr_on_new_entry, None)];
+ visited.insert(predicate);
+
+ // the main traversal loop:
+ // basically we want to cut the inheritance directed graph into a few non-overlapping slices of nodes
+ // that each node is emitted after all its descendents have been emitted.
+ // so we convert the directed graph into a tree by skipping all previously visited nodes using a visited set.
+ // this is done on the fly.
+ // Each loop run emits a slice - it starts by find a "childless" unvisited node, backtracking upwards, and it
+ // stops after it finds a node that has a next-sibling node.
+ // This next-sibling node will used as the starting point of next slice.
+
+ // Example:
+ // For a diamond inheritance relationship like this,
+ // D#1 --> B#0 --> A#0
+ // \-> C#1 -/
+
+ // Starting point 0 stack [D]
+ // Loop run #0: Stack after diving in is [D B A], A is "childless"
+ // after this point, all newly visited nodes won't have a vtable that equals to a prefix of this one.
+ // Loop run #0: Emitting the slice [B A] (in reverse order), B has a next-sibling node, so this slice stops here.
+ // Loop run #0: Stack after exiting out is [D C], C is the next starting point.
+ // Loop run #1: Stack after diving in is [D C], C is "childless", since its child A is skipped(already emitted).
+ // Loop run #1: Emitting the slice [D C] (in reverse order). No one has a next-sibling node.
+ // Loop run #1: Stack after exiting out is []. Now the function exits.
+
+ loop {
+ // dive deeper into the stack, recording the path
+ 'diving_in: loop {
+ if let Some((inner_most_trait_ref, _, _)) = stack.last() {
+ let inner_most_trait_ref = *inner_most_trait_ref;
+ let mut direct_super_traits_iter = tcx
+ .super_predicates_of(inner_most_trait_ref.def_id())
+ .predicates
+ .into_iter()
+ .filter_map(move |(pred, _)| {
+ pred.subst_supertrait(tcx, &inner_most_trait_ref).to_opt_poly_trait_pred()
+ });
+
+ 'diving_in_skip_visited_traits: loop {
+ if let Some(next_super_trait) = direct_super_traits_iter.next() {
+ if visited.insert(next_super_trait.to_predicate(tcx)) {
+ // We're throwing away potential constness of super traits here.
+ // FIXME: handle ~const super traits
+ let next_super_trait = next_super_trait.map_bound(|t| t.trait_ref);
+ stack.push((
+ next_super_trait,
+ emit_vptr_on_new_entry,
+ Some(direct_super_traits_iter),
+ ));
+ break 'diving_in_skip_visited_traits;
+ } else {
+ continue 'diving_in_skip_visited_traits;
+ }
+ } else {
+ break 'diving_in;
+ }
+ }
+ }
+ }
+
+ // Other than the left-most path, vptr should be emitted for each trait.
+ emit_vptr_on_new_entry = true;
+
+ // emit innermost item, move to next sibling and stop there if possible, otherwise jump to outer level.
+ 'exiting_out: loop {
+ if let Some((inner_most_trait_ref, emit_vptr, siblings_opt)) = stack.last_mut() {
+ if let ControlFlow::Break(v) = (segment_visitor)(VtblSegment::TraitOwnEntries {
+ trait_ref: *inner_most_trait_ref,
+ emit_vptr: *emit_vptr,
+ }) {
+ return Some(v);
+ }
+
+ 'exiting_out_skip_visited_traits: loop {
+ if let Some(siblings) = siblings_opt {
+ if let Some(next_inner_most_trait_ref) = siblings.next() {
+ if visited.insert(next_inner_most_trait_ref.to_predicate(tcx)) {
+ // We're throwing away potential constness of super traits here.
+ // FIXME: handle ~const super traits
+ let next_inner_most_trait_ref =
+ next_inner_most_trait_ref.map_bound(|t| t.trait_ref);
+ *inner_most_trait_ref = next_inner_most_trait_ref;
+ *emit_vptr = emit_vptr_on_new_entry;
+ break 'exiting_out;
+ } else {
+ continue 'exiting_out_skip_visited_traits;
+ }
+ }
+ }
+ stack.pop();
+ continue 'exiting_out;
+ }
+ }
+ // all done
+ return None;
+ }
+ }
+}
+
+fn dump_vtable_entries<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ sp: Span,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+ entries: &[VtblEntry<'tcx>],
+) {
+ tcx.sess.emit_err(DumpVTableEntries {
+ span: sp,
+ trait_ref,
+ entries: format!("{:#?}", entries),
+ });
+}
+
+fn own_existential_vtable_entries<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> &'tcx [DefId] {
+ let trait_methods = tcx
+ .associated_items(trait_def_id)
+ .in_definition_order()
+ .filter(|item| item.kind == ty::AssocKind::Fn);
+ // Now list each method's DefId (for within its trait).
+ let own_entries = trait_methods.filter_map(move |trait_method| {
+ debug!("own_existential_vtable_entry: trait_method={:?}", trait_method);
+ let def_id = trait_method.def_id;
+
+ // Some methods cannot be called on an object; skip those.
+ if !is_vtable_safe_method(tcx, trait_def_id, &trait_method) {
+ debug!("own_existential_vtable_entry: not vtable safe");
+ return None;
+ }
+
+ Some(def_id)
+ });
+
+ tcx.arena.alloc_from_iter(own_entries.into_iter())
+}
+
+/// Given a trait `trait_ref`, iterates the vtable entries
+/// that come from `trait_ref`, including its supertraits.
+fn vtable_entries<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+) -> &'tcx [VtblEntry<'tcx>] {
+ debug!("vtable_entries({:?})", trait_ref);
+
+ let mut entries = vec![];
+
+ let vtable_segment_callback = |segment| -> ControlFlow<()> {
+ match segment {
+ VtblSegment::MetadataDSA => {
+ entries.extend(TyCtxt::COMMON_VTABLE_ENTRIES);
+ }
+ VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => {
+ let existential_trait_ref = trait_ref
+ .map_bound(|trait_ref| ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref));
+
+ // Lookup the shape of vtable for the trait.
+ let own_existential_entries =
+ tcx.own_existential_vtable_entries(existential_trait_ref.def_id());
+
+ let own_entries = own_existential_entries.iter().copied().map(|def_id| {
+ debug!("vtable_entries: trait_method={:?}", def_id);
+
+ // The method may have some early-bound lifetimes; add regions for those.
+ let substs = trait_ref.map_bound(|trait_ref| {
+ InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind {
+ GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
+ GenericParamDefKind::Type { .. }
+ | GenericParamDefKind::Const { .. } => {
+ trait_ref.substs[param.index as usize]
+ }
+ })
+ });
+
+ // The trait type may have higher-ranked lifetimes in it;
+ // erase them if they appear, so that we get the type
+ // at some particular call site.
+ let substs = tcx
+ .normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), substs);
+
+ // It's possible that the method relies on where-clauses that
+ // do not hold for this particular set of type parameters.
+ // Note that this method could then never be called, so we
+ // do not want to try and codegen it, in that case (see #23435).
+ let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs);
+ if impossible_predicates(tcx, predicates.predicates) {
+ debug!("vtable_entries: predicates do not hold");
+ return VtblEntry::Vacant;
+ }
+
+ let instance = ty::Instance::resolve_for_vtable(
+ tcx,
+ ty::ParamEnv::reveal_all(),
+ def_id,
+ substs,
+ )
+ .expect("resolution failed during building vtable representation");
+ VtblEntry::Method(instance)
+ });
+
+ entries.extend(own_entries);
+
+ if emit_vptr {
+ entries.push(VtblEntry::TraitVPtr(trait_ref));
+ }
+ }
+ }
+
+ ControlFlow::Continue(())
+ };
+
+ let _ = prepare_vtable_segments(tcx, trait_ref, vtable_segment_callback);
+
+ if tcx.has_attr(trait_ref.def_id(), sym::rustc_dump_vtable) {
+ let sp = tcx.def_span(trait_ref.def_id());
+ dump_vtable_entries(tcx, sp, trait_ref, &entries);
+ }
+
+ tcx.arena.alloc_from_iter(entries.into_iter())
+}
+
+/// Find slot base for trait methods within vtable entries of another trait
+pub(super) fn vtable_trait_first_method_offset<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ key: (
+ ty::PolyTraitRef<'tcx>, // trait_to_be_found
+ ty::PolyTraitRef<'tcx>, // trait_owning_vtable
+ ),
+) -> usize {
+ let (trait_to_be_found, trait_owning_vtable) = key;
+
+ // #90177
+ let trait_to_be_found_erased = tcx.erase_regions(trait_to_be_found);
+
+ let vtable_segment_callback = {
+ let mut vtable_base = 0;
+
+ move |segment| {
+ match segment {
+ VtblSegment::MetadataDSA => {
+ vtable_base += TyCtxt::COMMON_VTABLE_ENTRIES.len();
+ }
+ VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => {
+ if tcx.erase_regions(trait_ref) == trait_to_be_found_erased {
+ return ControlFlow::Break(vtable_base);
+ }
+ vtable_base += count_own_vtable_entries(tcx, trait_ref);
+ if emit_vptr {
+ vtable_base += 1;
+ }
+ }
+ }
+ ControlFlow::Continue(())
+ }
+ };
+
+ if let Some(vtable_base) =
+ prepare_vtable_segments(tcx, trait_owning_vtable, vtable_segment_callback)
+ {
+ vtable_base
+ } else {
+ bug!("Failed to find info for expected trait in vtable");
+ }
+}
+
+/// Find slot offset for trait vptr within vtable entries of another trait
+pub(crate) fn vtable_trait_upcasting_coercion_new_vptr_slot<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ key: (
+ Ty<'tcx>, // trait object type whose trait owning vtable
+ Ty<'tcx>, // trait object for supertrait
+ ),
+) -> Option<usize> {
+ let (source, target) = key;
+ assert!(matches!(&source.kind(), &ty::Dynamic(..)) && !source.needs_infer());
+ assert!(matches!(&target.kind(), &ty::Dynamic(..)) && !target.needs_infer());
+
+ // this has been typecked-before, so diagnostics is not really needed.
+ let unsize_trait_did = tcx.require_lang_item(LangItem::Unsize, None);
+
+ let trait_ref = tcx.mk_trait_ref(unsize_trait_did, [source, target]);
+
+ match tcx.codegen_select_candidate((ty::ParamEnv::reveal_all(), ty::Binder::dummy(trait_ref))) {
+ Ok(ImplSource::TraitUpcasting(implsrc_traitcasting)) => {
+ implsrc_traitcasting.vtable_vptr_slot
+ }
+ otherwise => bug!("expected TraitUpcasting candidate, got {otherwise:?}"),
+ }
+}
+
+/// Given a trait `trait_ref`, returns the number of vtable entries
+/// that come from `trait_ref`, excluding its supertraits. Used in
+/// computing the vtable base for an upcast trait of a trait object.
+pub(crate) fn count_own_vtable_entries<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>,
+) -> usize {
+ tcx.own_existential_vtable_entries(trait_ref.def_id()).len()
+}
+
+pub(super) fn provide(providers: &mut ty::query::Providers) {
+ *providers = ty::query::Providers {
+ own_existential_vtable_entries,
+ vtable_entries,
+ vtable_trait_upcasting_coercion_new_vptr_slot,
+ ..*providers
+ };
+}
diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs
index fc0a9f690..681fb753f 100644
--- a/compiler/rustc_trait_selection/src/traits/wf.rs
+++ b/compiler/rustc_trait_selection/src/traits/wf.rs
@@ -4,7 +4,7 @@ use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItem;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, SubstsRef};
-use rustc_middle::ty::{self, ToPredicate, Ty, TyCtxt, TypeVisitable};
+use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable};
use rustc_span::Span;
use std::iter;
@@ -121,14 +121,14 @@ pub fn predicate_obligations<'tcx>(
// It's ok to skip the binder here because wf code is prepared for it
match predicate.kind().skip_binder() {
- ty::PredicateKind::Trait(t) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(t)) => {
wf.compute_trait_pred(&t, Elaborate::None);
}
- ty::PredicateKind::RegionOutlives(..) => {}
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty, _reg)) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) => {}
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(ty, _reg))) => {
wf.compute(ty.into());
}
- ty::PredicateKind::Projection(t) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(t)) => {
wf.compute_projection(t.projection_ty);
wf.compute(match t.term.unpack() {
ty::TermKind::Ty(ty) => ty.into(),
@@ -155,6 +155,7 @@ pub fn predicate_obligations<'tcx>(
wf.compute(c1.into());
wf.compute(c2.into());
}
+ ty::PredicateKind::Ambiguous => {}
ty::PredicateKind::TypeWellFormedFromEnv(..) => {
bug!("TypeWellFormedFromEnv is only used for Chalk")
}
@@ -227,7 +228,7 @@ fn extend_cause_with_original_assoc_item_obligation<'tcx>(
// It is fine to skip the binder as we don't care about regions here.
match pred.kind().skip_binder() {
- ty::PredicateKind::Projection(proj) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(proj)) => {
// The obligation comes not from the current `impl` nor the `trait` being implemented,
// but rather from a "second order" obligation, where an associated type has a
// projection coming from another associated type. See
@@ -244,7 +245,7 @@ fn extend_cause_with_original_assoc_item_obligation<'tcx>(
cause.span = impl_item_span;
}
}
- ty::PredicateKind::Trait(pred) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(pred)) => {
// An associated item obligation born out of the `trait` failed to be met. An example
// can be seen in `ui/associated-types/point-at-type-on-obligation-failure-2.rs`.
debug!("extended_cause_with_original_assoc_item_obligation trait proj {:?}", pred);
@@ -324,7 +325,7 @@ impl<'tcx> WfPredicates<'tcx> {
extend_cause_with_original_assoc_item_obligation(
tcx, trait_ref, item, &mut cause, predicate,
);
- traits::Obligation::with_depth(cause, depth, param_env, predicate)
+ traits::Obligation::with_depth(tcx, cause, depth, param_env, predicate)
};
if let Elaborate::All = elaborate {
@@ -356,10 +357,11 @@ impl<'tcx> WfPredicates<'tcx> {
}
}
traits::Obligation::with_depth(
+ tcx,
cause,
depth,
param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)).to_predicate(tcx),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)),
)
}),
);
@@ -407,10 +409,11 @@ impl<'tcx> WfPredicates<'tcx> {
.filter(|arg| !arg.has_escaping_bound_vars())
.map(|arg| {
traits::Obligation::with_depth(
+ tcx,
cause.clone(),
depth,
param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)).to_predicate(tcx),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(arg)),
)
}),
);
@@ -419,15 +422,13 @@ impl<'tcx> WfPredicates<'tcx> {
fn require_sized(&mut self, subty: Ty<'tcx>, cause: traits::ObligationCauseCode<'tcx>) {
if !subty.has_escaping_bound_vars() {
let cause = self.cause(cause);
- let trait_ref = ty::TraitRef {
- def_id: self.tcx.require_lang_item(LangItem::Sized, None),
- substs: self.tcx.mk_substs_trait(subty, &[]),
- };
+ let trait_ref = self.tcx.at(cause.span).mk_trait_ref(LangItem::Sized, [subty]);
self.out.push(traits::Obligation::with_depth(
+ self.tcx,
cause,
self.recursion_depth,
self.param_env,
- ty::Binder::dummy(trait_ref).without_const().to_predicate(self.tcx),
+ ty::Binder::dummy(trait_ref).without_const(),
));
}
}
@@ -454,10 +455,10 @@ impl<'tcx> WfPredicates<'tcx> {
self.out.extend(obligations);
let predicate =
- ty::Binder::dummy(ty::PredicateKind::ConstEvaluatable(ct))
- .to_predicate(self.tcx());
+ ty::Binder::dummy(ty::PredicateKind::ConstEvaluatable(ct));
let cause = self.cause(traits::WellFormed(None));
self.out.push(traits::Obligation::with_depth(
+ self.tcx(),
cause,
self.recursion_depth,
self.param_env,
@@ -468,13 +469,33 @@ impl<'tcx> WfPredicates<'tcx> {
let cause = self.cause(traits::WellFormed(None));
self.out.push(traits::Obligation::with_depth(
+ self.tcx(),
cause,
self.recursion_depth,
self.param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(ct.into()))
- .to_predicate(self.tcx()),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(ct.into())),
));
}
+ ty::ConstKind::Expr(_) => {
+ // FIXME(generic_const_exprs): this doesnt verify that given `Expr(N + 1)` the
+ // trait bound `typeof(N): Add<typeof(1)>` holds. This is currently unnecessary
+ // as `ConstKind::Expr` is only produced via normalization of `ConstKind::Unevaluated`
+ // which means that the `DefId` would have been typeck'd elsewhere. However in
+ // the future we may allow directly lowering to `ConstKind::Expr` in which case
+ // we would not be proving bounds we should.
+
+ let predicate =
+ ty::Binder::dummy(ty::PredicateKind::ConstEvaluatable(ct));
+ let cause = self.cause(traits::WellFormed(None));
+ self.out.push(traits::Obligation::with_depth(
+ self.tcx(),
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ predicate,
+ ));
+ }
+
ty::ConstKind::Error(_)
| ty::ConstKind::Param(_)
| ty::ConstKind::Bound(..)
@@ -556,13 +577,13 @@ impl<'tcx> WfPredicates<'tcx> {
if !r.has_escaping_bound_vars() && !rty.has_escaping_bound_vars() {
let cause = self.cause(traits::ReferenceOutlivesReferent(ty));
self.out.push(traits::Obligation::with_depth(
+ self.tcx(),
cause,
depth,
param_env,
- ty::Binder::dummy(ty::PredicateKind::TypeOutlives(
+ ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::TypeOutlives(
ty::OutlivesPredicate(rty, r),
- ))
- .to_predicate(self.tcx()),
+ ))),
));
}
}
@@ -631,7 +652,7 @@ impl<'tcx> WfPredicates<'tcx> {
// All of the requirements on type parameters
// have already been checked for `impl Trait` in
// return position. We do need to check type-alias-impl-trait though.
- if ty::is_impl_trait_defn(self.tcx, did).is_none() {
+ if self.tcx.is_type_alias_impl_trait(did) {
let obligations = self.nominal_obligations(did, substs);
self.out.extend(obligations);
}
@@ -656,11 +677,11 @@ impl<'tcx> WfPredicates<'tcx> {
let tcx = self.tcx();
self.out.extend(component_traits.map(|did| {
traits::Obligation::with_depth(
+ tcx,
cause.clone(),
depth,
param_env,
- ty::Binder::dummy(ty::PredicateKind::ObjectSafe(did))
- .to_predicate(tcx),
+ ty::Binder::dummy(ty::PredicateKind::ObjectSafe(did)),
)
}));
}
@@ -681,11 +702,11 @@ impl<'tcx> WfPredicates<'tcx> {
ty::Infer(_) => {
let cause = self.cause(traits::WellFormed(None));
self.out.push(traits::Obligation::with_depth(
+ self.tcx(),
cause,
self.recursion_depth,
param_env,
- ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into()))
- .to_predicate(self.tcx()),
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into())),
));
}
}
@@ -724,7 +745,13 @@ impl<'tcx> WfPredicates<'tcx> {
if remap_constness {
pred = pred.without_const(self.tcx);
}
- traits::Obligation::with_depth(cause, self.recursion_depth, self.param_env, pred)
+ traits::Obligation::with_depth(
+ self.tcx,
+ cause,
+ self.recursion_depth,
+ self.param_env,
+ pred,
+ )
})
.filter(|pred| !pred.has_escaping_bound_vars())
.collect()
@@ -749,7 +776,7 @@ impl<'tcx> WfPredicates<'tcx> {
fn from_object_ty(
&mut self,
ty: Ty<'tcx>,
- data: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ data: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
region: ty::Region<'tcx>,
) {
// Imagine a type like this:
@@ -794,10 +821,11 @@ impl<'tcx> WfPredicates<'tcx> {
let outlives =
ty::Binder::dummy(ty::OutlivesPredicate(explicit_bound, implicit_bound));
self.out.push(traits::Obligation::with_depth(
+ self.tcx,
cause,
self.recursion_depth,
self.param_env,
- outlives.to_predicate(self.tcx),
+ outlives,
));
}
}
@@ -812,7 +840,7 @@ impl<'tcx> WfPredicates<'tcx> {
/// `infer::required_region_bounds`, see that for more information.
pub fn object_region_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
- existential_predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
+ existential_predicates: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Vec<ty::Region<'tcx>> {
// Since we don't actually *know* the self type for an object,
// this "open(err)" serves as a kind of dummy standin -- basically
@@ -858,18 +886,22 @@ pub(crate) fn required_region_bounds<'tcx>(
.filter_map(|obligation| {
debug!(?obligation);
match obligation.predicate.kind().skip_binder() {
- ty::PredicateKind::Projection(..)
- | ty::PredicateKind::Trait(..)
+ ty::PredicateKind::Clause(ty::Clause::Projection(..))
+ | ty::PredicateKind::Clause(ty::Clause::Trait(..))
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
- | ty::PredicateKind::RegionOutlives(..)
+ | ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ref t, ref r)) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ref t,
+ ref r,
+ ))) => {
// Search for a bound of the form `erased_self_ty
// : 'a`, but be wary of something like `for<'a>
// erased_self_ty : 'a` (we interpret a
diff --git a/compiler/rustc_traits/Cargo.toml b/compiler/rustc_traits/Cargo.toml
index 951554c77..a432498ab 100644
--- a/compiler/rustc_traits/Cargo.toml
+++ b/compiler/rustc_traits/Cargo.toml
@@ -12,9 +12,10 @@ rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }
rustc_ast = { path = "../rustc_ast" }
rustc_span = { path = "../rustc_span" }
-chalk-ir = "0.80.0"
-chalk-engine = "0.80.0"
-chalk-solve = "0.80.0"
+rustc_target = { path = "../rustc_target" }
+chalk-ir = "0.87.0"
+chalk-engine = "0.87.0"
+chalk-solve = "0.87.0"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
rustc_infer = { path = "../rustc_infer" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
diff --git a/compiler/rustc_traits/src/chalk/db.rs b/compiler/rustc_traits/src/chalk/db.rs
index 0de28b826..344c8b93c 100644
--- a/compiler/rustc_traits/src/chalk/db.rs
+++ b/compiler/rustc_traits/src/chalk/db.rs
@@ -9,9 +9,9 @@
use rustc_middle::traits::ChalkRustInterner as RustInterner;
use rustc_middle::ty::{self, AssocKind, EarlyBinder, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable};
use rustc_middle::ty::{InternalSubsts, SubstsRef};
+use rustc_target::abi::{Integer, IntegerType};
use rustc_ast::ast;
-use rustc_attr as attr;
use rustc_hir::def_id::DefId;
@@ -142,6 +142,8 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
Some(CoerceUnsized)
} else if lang_items.dispatch_from_dyn_trait() == Some(def_id) {
Some(DispatchFromDyn)
+ } else if lang_items.tuple_trait() == Some(def_id) {
+ Some(Tuple)
} else {
None
};
@@ -216,21 +218,21 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
c: adt_def.repr().c(),
packed: adt_def.repr().packed(),
int: adt_def.repr().int.map(|i| match i {
- attr::IntType::SignedInt(ty) => match ty {
- ast::IntTy::Isize => int(chalk_ir::IntTy::Isize),
- ast::IntTy::I8 => int(chalk_ir::IntTy::I8),
- ast::IntTy::I16 => int(chalk_ir::IntTy::I16),
- ast::IntTy::I32 => int(chalk_ir::IntTy::I32),
- ast::IntTy::I64 => int(chalk_ir::IntTy::I64),
- ast::IntTy::I128 => int(chalk_ir::IntTy::I128),
+ IntegerType::Pointer(true) => int(chalk_ir::IntTy::Isize),
+ IntegerType::Pointer(false) => uint(chalk_ir::UintTy::Usize),
+ IntegerType::Fixed(i, true) => match i {
+ Integer::I8 => int(chalk_ir::IntTy::I8),
+ Integer::I16 => int(chalk_ir::IntTy::I16),
+ Integer::I32 => int(chalk_ir::IntTy::I32),
+ Integer::I64 => int(chalk_ir::IntTy::I64),
+ Integer::I128 => int(chalk_ir::IntTy::I128),
},
- attr::IntType::UnsignedInt(ty) => match ty {
- ast::UintTy::Usize => uint(chalk_ir::UintTy::Usize),
- ast::UintTy::U8 => uint(chalk_ir::UintTy::U8),
- ast::UintTy::U16 => uint(chalk_ir::UintTy::U16),
- ast::UintTy::U32 => uint(chalk_ir::UintTy::U32),
- ast::UintTy::U64 => uint(chalk_ir::UintTy::U64),
- ast::UintTy::U128 => uint(chalk_ir::UintTy::U128),
+ IntegerType::Fixed(i, false) => match i {
+ Integer::I8 => uint(chalk_ir::UintTy::U8),
+ Integer::I16 => uint(chalk_ir::UintTy::U16),
+ Integer::I32 => uint(chalk_ir::UintTy::U32),
+ Integer::I64 => uint(chalk_ir::UintTy::U64),
+ Integer::I128 => uint(chalk_ir::UintTy::U128),
},
}),
})
@@ -570,6 +572,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
CoerceUnsized => lang_items.coerce_unsized_trait(),
DiscriminantKind => lang_items.discriminant_kind_trait(),
DispatchFromDyn => lang_items.dispatch_from_dyn_trait(),
+ Tuple => lang_items.tuple_trait(),
};
def_id.map(chalk_ir::TraitId)
}
@@ -728,16 +731,16 @@ fn bound_vars_for_item<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx
ty::GenericParamDefKind::Lifetime => {
let br = ty::BoundRegion {
var: ty::BoundVar::from_usize(substs.len()),
- kind: ty::BrAnon(substs.len() as u32),
+ kind: ty::BrAnon(substs.len() as u32, None),
};
tcx.mk_region(ty::ReLateBound(ty::INNERMOST, br)).into()
}
ty::GenericParamDefKind::Const { .. } => tcx
- .mk_const(ty::ConstS {
- kind: ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from(param.index)),
- ty: tcx.type_of(param.def_id),
- })
+ .mk_const(
+ ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from(param.index)),
+ tcx.type_of(param.def_id),
+ )
.into(),
})
}
diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs
index 45d5ea93d..c4ab86e9e 100644
--- a/compiler/rustc_traits/src/chalk/lowering.rs
+++ b/compiler/rustc_traits/src/chalk/lowering.rs
@@ -89,24 +89,32 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::InEnvironment<chalk_ir::Goal<RustInterner<'
ty::PredicateKind::TypeWellFormedFromEnv(ty) => {
chalk_ir::DomainGoal::FromEnv(chalk_ir::FromEnv::Ty(ty.lower_into(interner)))
}
- ty::PredicateKind::Trait(predicate) => chalk_ir::DomainGoal::FromEnv(
- chalk_ir::FromEnv::Trait(predicate.trait_ref.lower_into(interner)),
- ),
- ty::PredicateKind::RegionOutlives(predicate) => chalk_ir::DomainGoal::Holds(
- chalk_ir::WhereClause::LifetimeOutlives(chalk_ir::LifetimeOutlives {
- a: predicate.0.lower_into(interner),
- b: predicate.1.lower_into(interner),
- }),
- ),
- ty::PredicateKind::TypeOutlives(predicate) => chalk_ir::DomainGoal::Holds(
- chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives {
- ty: predicate.0.lower_into(interner),
- lifetime: predicate.1.lower_into(interner),
- }),
- ),
- ty::PredicateKind::Projection(predicate) => chalk_ir::DomainGoal::Holds(
- chalk_ir::WhereClause::AliasEq(predicate.lower_into(interner)),
- ),
+ ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) => {
+ chalk_ir::DomainGoal::FromEnv(chalk_ir::FromEnv::Trait(
+ predicate.trait_ref.lower_into(interner),
+ ))
+ }
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(predicate)) => {
+ chalk_ir::DomainGoal::Holds(chalk_ir::WhereClause::LifetimeOutlives(
+ chalk_ir::LifetimeOutlives {
+ a: predicate.0.lower_into(interner),
+ b: predicate.1.lower_into(interner),
+ },
+ ))
+ }
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(predicate)) => {
+ chalk_ir::DomainGoal::Holds(chalk_ir::WhereClause::TypeOutlives(
+ chalk_ir::TypeOutlives {
+ ty: predicate.0.lower_into(interner),
+ lifetime: predicate.1.lower_into(interner),
+ },
+ ))
+ }
+ ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => {
+ chalk_ir::DomainGoal::Holds(chalk_ir::WhereClause::AliasEq(
+ predicate.lower_into(interner),
+ ))
+ }
ty::PredicateKind::WellFormed(arg) => match arg.unpack() {
ty::GenericArgKind::Type(ty) => chalk_ir::DomainGoal::WellFormed(
chalk_ir::WellFormed::Ty(ty.lower_into(interner)),
@@ -121,6 +129,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::InEnvironment<chalk_ir::Goal<RustInterner<'
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::ConstEquate(..) => bug!("unexpected predicate {}", predicate),
};
let value = chalk_ir::ProgramClauseImplication {
@@ -148,12 +157,12 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::GoalData<RustInterner<'tcx>>> for ty::Predi
collect_bound_vars(interner, interner.tcx, self.kind());
let value = match predicate {
- ty::PredicateKind::Trait(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) => {
chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(
chalk_ir::WhereClause::Implemented(predicate.trait_ref.lower_into(interner)),
))
}
- ty::PredicateKind::RegionOutlives(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(predicate)) => {
chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(
chalk_ir::WhereClause::LifetimeOutlives(chalk_ir::LifetimeOutlives {
a: predicate.0.lower_into(interner),
@@ -161,7 +170,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::GoalData<RustInterner<'tcx>>> for ty::Predi
}),
))
}
- ty::PredicateKind::TypeOutlives(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(predicate)) => {
chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(
chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives {
ty: predicate.0.lower_into(interner),
@@ -169,7 +178,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::GoalData<RustInterner<'tcx>>> for ty::Predi
}),
))
}
- ty::PredicateKind::Projection(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => {
chalk_ir::GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(
chalk_ir::WhereClause::AliasEq(predicate.lower_into(interner)),
))
@@ -212,6 +221,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::GoalData<RustInterner<'tcx>>> for ty::Predi
ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::ConstEquate(..) => {
chalk_ir::GoalData::All(chalk_ir::Goals::empty(interner))
}
@@ -411,7 +421,11 @@ impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty<RustInterner<'tcx>> {
TyKind::Closure(closure, substitution) => {
ty::Closure(closure.0, substitution.lower_into(interner))
}
- TyKind::Generator(..) => unimplemented!(),
+ TyKind::Generator(generator, substitution) => ty::Generator(
+ generator.0,
+ substitution.lower_into(interner),
+ ast::Movability::Static,
+ ),
TyKind::GeneratorWitness(..) => unimplemented!(),
TyKind::Never => ty::Never,
TyKind::Tuple(_len, substitution) => {
@@ -498,18 +512,15 @@ impl<'tcx> LowerInto<'tcx, Region<'tcx>> for &chalk_ir::Lifetime<RustInterner<'t
ty::DebruijnIndex::from_u32(var.debruijn.depth()),
ty::BoundRegion {
var: ty::BoundVar::from_usize(var.index),
- kind: ty::BrAnon(var.index as u32),
+ kind: ty::BrAnon(var.index as u32, None),
},
),
chalk_ir::LifetimeData::InferenceVar(_var) => unimplemented!(),
chalk_ir::LifetimeData::Placeholder(p) => ty::RePlaceholder(ty::Placeholder {
universe: ty::UniverseIndex::from_usize(p.ui.counter),
- name: ty::BoundRegionKind::BrAnon(p.idx as u32),
+ name: ty::BoundRegionKind::BrAnon(p.idx as u32, None),
}),
chalk_ir::LifetimeData::Static => return interner.tcx.lifetimes.re_static,
- chalk_ir::LifetimeData::Empty(_) => {
- bug!("Chalk should not have been passed an empty lifetime.")
- }
chalk_ir::LifetimeData::Erased => return interner.tcx.lifetimes.re_erased,
chalk_ir::LifetimeData::Phantom(void, _) => match *void {},
};
@@ -546,7 +557,7 @@ impl<'tcx> LowerInto<'tcx, ty::Const<'tcx>> for &chalk_ir::Const<RustInterner<'t
chalk_ir::ConstValue::Placeholder(_p) => unimplemented!(),
chalk_ir::ConstValue::Concrete(c) => ty::ConstKind::Value(c.interned),
};
- interner.tcx.mk_const(ty::ConstS { ty, kind })
+ interner.tcx.mk_const(kind, ty)
}
}
@@ -602,22 +613,22 @@ impl<'tcx> LowerInto<'tcx, Option<chalk_ir::QuantifiedWhereClause<RustInterner<'
let (predicate, binders, _named_regions) =
collect_bound_vars(interner, interner.tcx, self.kind());
let value = match predicate {
- ty::PredicateKind::Trait(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) => {
Some(chalk_ir::WhereClause::Implemented(predicate.trait_ref.lower_into(interner)))
}
- ty::PredicateKind::RegionOutlives(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(predicate)) => {
Some(chalk_ir::WhereClause::LifetimeOutlives(chalk_ir::LifetimeOutlives {
a: predicate.0.lower_into(interner),
b: predicate.1.lower_into(interner),
}))
}
- ty::PredicateKind::TypeOutlives(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(predicate)) => {
Some(chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives {
ty: predicate.0.lower_into(interner),
lifetime: predicate.1.lower_into(interner),
}))
}
- ty::PredicateKind::Projection(predicate) => {
+ ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => {
Some(chalk_ir::WhereClause::AliasEq(predicate.lower_into(interner)))
}
ty::PredicateKind::WellFormed(_ty) => None,
@@ -628,6 +639,7 @@ impl<'tcx> LowerInto<'tcx, Option<chalk_ir::QuantifiedWhereClause<RustInterner<'
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => {
bug!("unexpected predicate {}", &self)
}
@@ -637,7 +649,7 @@ impl<'tcx> LowerInto<'tcx, Option<chalk_ir::QuantifiedWhereClause<RustInterner<'
}
impl<'tcx> LowerInto<'tcx, chalk_ir::Binders<chalk_ir::QuantifiedWhereClauses<RustInterner<'tcx>>>>
- for &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>
+ for &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>
{
fn lower_into(
self,
@@ -692,7 +704,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Binders<chalk_ir::QuantifiedWhereClauses<Ru
trait_id: chalk_ir::TraitId(def_id),
substitution: interner
.tcx
- .mk_substs_trait(self_ty, &[])
+ .mk_substs_trait(self_ty, [])
.lower_into(interner),
}),
),
@@ -737,26 +749,31 @@ impl<'tcx> LowerInto<'tcx, Option<chalk_solve::rust_ir::QuantifiedInlineBound<Ru
let (predicate, binders, _named_regions) =
collect_bound_vars(interner, interner.tcx, self.kind());
match predicate {
- ty::PredicateKind::Trait(predicate) => Some(chalk_ir::Binders::new(
- binders,
- chalk_solve::rust_ir::InlineBound::TraitBound(
- predicate.trait_ref.lower_into(interner),
- ),
- )),
- ty::PredicateKind::Projection(predicate) => Some(chalk_ir::Binders::new(
- binders,
- chalk_solve::rust_ir::InlineBound::AliasEqBound(predicate.lower_into(interner)),
- )),
- ty::PredicateKind::TypeOutlives(_predicate) => None,
+ ty::PredicateKind::Clause(ty::Clause::Trait(predicate)) => {
+ Some(chalk_ir::Binders::new(
+ binders,
+ chalk_solve::rust_ir::InlineBound::TraitBound(
+ predicate.trait_ref.lower_into(interner),
+ ),
+ ))
+ }
+ ty::PredicateKind::Clause(ty::Clause::Projection(predicate)) => {
+ Some(chalk_ir::Binders::new(
+ binders,
+ chalk_solve::rust_ir::InlineBound::AliasEqBound(predicate.lower_into(interner)),
+ ))
+ }
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(_predicate)) => None,
ty::PredicateKind::WellFormed(_ty) => None,
- ty::PredicateKind::RegionOutlives(..)
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => {
bug!("unexpected predicate {}", &self)
}
@@ -933,7 +950,7 @@ impl<'tcx> TypeVisitor<'tcx> for BoundVarsCollector<'tcx> {
}
}
- ty::BoundRegionKind::BrAnon(var) => match self.parameters.entry(var) {
+ ty::BoundRegionKind::BrAnon(var, _) => match self.parameters.entry(var) {
Entry::Vacant(entry) => {
entry.insert(chalk_ir::VariableKind::Lifetime);
}
@@ -991,13 +1008,13 @@ impl<'a, 'tcx> TypeFolder<'tcx> for NamedBoundVarSubstitutor<'a, 'tcx> {
ty::ReLateBound(index, br) if index == self.binder_index => match br.kind {
ty::BrNamed(def_id, _name) => match self.named_parameters.get(&def_id) {
Some(idx) => {
- let new_br = ty::BoundRegion { var: br.var, kind: ty::BrAnon(*idx) };
+ let new_br = ty::BoundRegion { var: br.var, kind: ty::BrAnon(*idx, None) };
return self.tcx.mk_region(ty::ReLateBound(index, new_br));
}
None => panic!("Missing `BrNamed`."),
},
ty::BrEnv => unimplemented!(),
- ty::BrAnon(_) => {}
+ ty::BrAnon(..) => {}
},
_ => (),
};
@@ -1072,14 +1089,16 @@ impl<'tcx> TypeFolder<'tcx> for ParamsSubstitutor<'tcx> {
Some(idx) => {
let br = ty::BoundRegion {
var: ty::BoundVar::from_u32(*idx),
- kind: ty::BrAnon(*idx),
+ kind: ty::BrAnon(*idx, None),
};
self.tcx.mk_region(ty::ReLateBound(self.binder_index, br))
}
None => {
let idx = self.named_regions.len() as u32;
- let br =
- ty::BoundRegion { var: ty::BoundVar::from_u32(idx), kind: ty::BrAnon(idx) };
+ let br = ty::BoundRegion {
+ var: ty::BoundVar::from_u32(idx),
+ kind: ty::BrAnon(idx, None),
+ };
self.named_regions.insert(_re.def_id, idx);
self.tcx.mk_region(ty::ReLateBound(self.binder_index, br))
}
@@ -1156,7 +1175,7 @@ impl<'tcx> TypeVisitor<'tcx> for PlaceholdersCollector {
fn visit_region(&mut self, r: Region<'tcx>) -> ControlFlow<Self::BreakTy> {
match *r {
ty::RePlaceholder(p) if p.universe == self.universe_index => {
- if let ty::BoundRegionKind::BrAnon(anon) = p.name {
+ if let ty::BoundRegionKind::BrAnon(anon, _) = p.name {
self.next_anon_region_placeholder = self.next_anon_region_placeholder.max(anon);
}
}
diff --git a/compiler/rustc_trait_selection/src/traits/codegen.rs b/compiler/rustc_traits/src/codegen.rs
index 8a62bf015..f8f74b732 100644
--- a/compiler/rustc_trait_selection/src/traits/codegen.rs
+++ b/compiler/rustc_traits/src/codegen.rs
@@ -3,15 +3,15 @@
// seems likely that they should eventually be merged into more
// general routines.
-use crate::infer::{DefiningAnchor, TyCtxtInferExt};
-use crate::traits::error_reporting::TypeErrCtxtExt;
-use crate::traits::{
- ImplSource, Obligation, ObligationCause, SelectionContext, TraitEngine, TraitEngineExt,
- Unimplemented,
-};
+use rustc_infer::infer::{DefiningAnchor, TyCtxtInferExt};
use rustc_infer::traits::FulfillmentErrorCode;
use rustc_middle::traits::CodegenObligationError;
use rustc_middle::ty::{self, TyCtxt};
+use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt;
+use rustc_trait_selection::traits::{
+ ImplSource, Obligation, ObligationCause, SelectionContext, TraitEngine, TraitEngineExt,
+ Unimplemented,
+};
/// Attempts to resolve an obligation to an `ImplSource`. The result is
/// a shallow `ImplSource` resolution, meaning that we do not
@@ -39,8 +39,7 @@ pub fn codegen_select_candidate<'tcx>(
let mut selcx = SelectionContext::new(&infcx);
let obligation_cause = ObligationCause::dummy();
- let obligation =
- Obligation::new(obligation_cause, param_env, trait_ref.to_poly_trait_predicate());
+ let obligation = Obligation::new(tcx, obligation_cause, param_env, trait_ref);
let selection = match selcx.select(&obligation) {
Ok(Some(selection)) => selection,
@@ -71,7 +70,7 @@ pub fn codegen_select_candidate<'tcx>(
// `rustc_ty_utils::resolve_associated_item` doesn't return `None` post-monomorphization.
for err in errors {
if let FulfillmentErrorCode::CodeCycle(cycle) = err.code {
- infcx.err_ctxt().report_overflow_error_cycle(&cycle);
+ infcx.err_ctxt().report_overflow_obligation_cycle(&cycle);
}
}
return Err(CodegenObligationError::FulfillmentError);
diff --git a/compiler/rustc_traits/src/dropck_outlives.rs b/compiler/rustc_traits/src/dropck_outlives.rs
index d5a8ca5ea..66ab742f1 100644
--- a/compiler/rustc_traits/src/dropck_outlives.rs
+++ b/compiler/rustc_traits/src/dropck_outlives.rs
@@ -2,20 +2,18 @@ use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def_id::DefId;
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
use rustc_infer::infer::TyCtxtInferExt;
-use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::InternalSubsts;
use rustc_middle::ty::{self, EarlyBinder, ParamEnvAnd, Ty, TyCtxt};
use rustc_span::source_map::{Span, DUMMY_SP};
+use rustc_trait_selection::infer::InferCtxtBuilderExt;
use rustc_trait_selection::traits::query::dropck_outlives::trivial_dropck_outlives;
use rustc_trait_selection::traits::query::dropck_outlives::{
DropckConstraint, DropckOutlivesResult,
};
-use rustc_trait_selection::traits::query::normalize::AtExt;
+use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
use rustc_trait_selection::traits::query::{CanonicalTyGoal, NoSolution};
-use rustc_trait_selection::traits::{
- Normalized, ObligationCause, TraitEngine, TraitEngineExt as _,
-};
+use rustc_trait_selection::traits::{Normalized, ObligationCause};
pub(crate) fn provide(p: &mut Providers) {
*p = Providers { dropck_outlives, adt_dtorck_constraint, ..*p };
@@ -27,120 +25,109 @@ fn dropck_outlives<'tcx>(
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, DropckOutlivesResult<'tcx>>>, NoSolution> {
debug!("dropck_outlives(goal={:#?})", canonical_goal);
- let (ref infcx, goal, canonical_inference_vars) =
- tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &canonical_goal);
- let tcx = infcx.tcx;
- let ParamEnvAnd { param_env, value: for_ty } = goal;
-
- let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
-
- // A stack of types left to process. Each round, we pop
- // something from the stack and invoke
- // `dtorck_constraint_for_ty`. This may produce new types that
- // have to be pushed on the stack. This continues until we have explored
- // all the reachable types from the type `for_ty`.
- //
- // Example: Imagine that we have the following code:
- //
- // ```rust
- // struct A {
- // value: B,
- // children: Vec<A>,
- // }
- //
- // struct B {
- // value: u32
- // }
- //
- // fn f() {
- // let a: A = ...;
- // ..
- // } // here, `a` is dropped
- // ```
- //
- // at the point where `a` is dropped, we need to figure out
- // which types inside of `a` contain region data that may be
- // accessed by any destructors in `a`. We begin by pushing `A`
- // onto the stack, as that is the type of `a`. We will then
- // invoke `dtorck_constraint_for_ty` which will expand `A`
- // into the types of its fields `(B, Vec<A>)`. These will get
- // pushed onto the stack. Eventually, expanding `Vec<A>` will
- // lead to us trying to push `A` a second time -- to prevent
- // infinite recursion, we notice that `A` was already pushed
- // once and stop.
- let mut ty_stack = vec![(for_ty, 0)];
-
- // Set used to detect infinite recursion.
- let mut ty_set = FxHashSet::default();
-
- let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
-
- let cause = ObligationCause::dummy();
- let mut constraints = DropckConstraint::empty();
- while let Some((ty, depth)) = ty_stack.pop() {
- debug!(
- "{} kinds, {} overflows, {} ty_stack",
- result.kinds.len(),
- result.overflows.len(),
- ty_stack.len()
- );
- dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?;
-
- // "outlives" represent types/regions that may be touched
- // by a destructor.
- result.kinds.append(&mut constraints.outlives);
- result.overflows.append(&mut constraints.overflows);
-
- // If we have even one overflow, we should stop trying to evaluate further --
- // chances are, the subsequent overflows for this evaluation won't provide useful
- // information and will just decrease the speed at which we can emit these errors
- // (since we'll be printing for just that much longer for the often enormous types
- // that result here).
- if !result.overflows.is_empty() {
- break;
- }
+ tcx.infer_ctxt().enter_canonical_trait_query(&canonical_goal, |ocx, goal| {
+ let tcx = ocx.infcx.tcx;
+ let ParamEnvAnd { param_env, value: for_ty } = goal;
+
+ let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
+
+ // A stack of types left to process. Each round, we pop
+ // something from the stack and invoke
+ // `dtorck_constraint_for_ty`. This may produce new types that
+ // have to be pushed on the stack. This continues until we have explored
+ // all the reachable types from the type `for_ty`.
+ //
+ // Example: Imagine that we have the following code:
+ //
+ // ```rust
+ // struct A {
+ // value: B,
+ // children: Vec<A>,
+ // }
+ //
+ // struct B {
+ // value: u32
+ // }
+ //
+ // fn f() {
+ // let a: A = ...;
+ // ..
+ // } // here, `a` is dropped
+ // ```
+ //
+ // at the point where `a` is dropped, we need to figure out
+ // which types inside of `a` contain region data that may be
+ // accessed by any destructors in `a`. We begin by pushing `A`
+ // onto the stack, as that is the type of `a`. We will then
+ // invoke `dtorck_constraint_for_ty` which will expand `A`
+ // into the types of its fields `(B, Vec<A>)`. These will get
+ // pushed onto the stack. Eventually, expanding `Vec<A>` will
+ // lead to us trying to push `A` a second time -- to prevent
+ // infinite recursion, we notice that `A` was already pushed
+ // once and stop.
+ let mut ty_stack = vec![(for_ty, 0)];
+
+ // Set used to detect infinite recursion.
+ let mut ty_set = FxHashSet::default();
+
+ let cause = ObligationCause::dummy();
+ let mut constraints = DropckConstraint::empty();
+ while let Some((ty, depth)) = ty_stack.pop() {
+ debug!(
+ "{} kinds, {} overflows, {} ty_stack",
+ result.kinds.len(),
+ result.overflows.len(),
+ ty_stack.len()
+ );
+ dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty, &mut constraints)?;
+
+ // "outlives" represent types/regions that may be touched
+ // by a destructor.
+ result.kinds.append(&mut constraints.outlives);
+ result.overflows.append(&mut constraints.overflows);
+
+ // If we have even one overflow, we should stop trying to evaluate further --
+ // chances are, the subsequent overflows for this evaluation won't provide useful
+ // information and will just decrease the speed at which we can emit these errors
+ // (since we'll be printing for just that much longer for the often enormous types
+ // that result here).
+ if !result.overflows.is_empty() {
+ break;
+ }
- // dtorck types are "types that will get dropped but which
- // do not themselves define a destructor", more or less. We have
- // to push them onto the stack to be expanded.
- for ty in constraints.dtorck_types.drain(..) {
- match infcx.at(&cause, param_env).normalize(ty) {
- Ok(Normalized { value: ty, obligations }) => {
- fulfill_cx.register_predicate_obligations(infcx, obligations);
-
- debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
-
- match ty.kind() {
- // All parameters live for the duration of the
- // function.
- ty::Param(..) => {}
-
- // A projection that we couldn't resolve - it
- // might have a destructor.
- ty::Projection(..) | ty::Opaque(..) => {
- result.kinds.push(ty.into());
- }
+ // dtorck types are "types that will get dropped but which
+ // do not themselves define a destructor", more or less. We have
+ // to push them onto the stack to be expanded.
+ for ty in constraints.dtorck_types.drain(..) {
+ let Normalized { value: ty, obligations } =
+ ocx.infcx.at(&cause, param_env).query_normalize(ty)?;
+ ocx.register_obligations(obligations);
+
+ debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
+
+ match ty.kind() {
+ // All parameters live for the duration of the
+ // function.
+ ty::Param(..) => {}
+
+ // A projection that we couldn't resolve - it
+ // might have a destructor.
+ ty::Projection(..) | ty::Opaque(..) => {
+ result.kinds.push(ty.into());
+ }
- _ => {
- if ty_set.insert(ty) {
- ty_stack.push((ty, depth + 1));
- }
+ _ => {
+ if ty_set.insert(ty) {
+ ty_stack.push((ty, depth + 1));
}
}
}
-
- // We don't actually expect to fail to normalize.
- // That implies a WF error somewhere else.
- Err(NoSolution) => {
- return Err(NoSolution);
- }
}
}
- }
-
- debug!("dropck_outlives: result = {:#?}", result);
- infcx.make_canonicalized_query_response(canonical_inference_vars, result, &mut *fulfill_cx)
+ debug!("dropck_outlives: result = {:#?}", result);
+ Ok(result)
+ })
}
/// Returns a set of constraints that needs to be satisfied in
diff --git a/compiler/rustc_traits/src/evaluate_obligation.rs b/compiler/rustc_traits/src/evaluate_obligation.rs
index 493d5de08..e94c8efe6 100644
--- a/compiler/rustc_traits/src/evaluate_obligation.rs
+++ b/compiler/rustc_traits/src/evaluate_obligation.rs
@@ -26,7 +26,7 @@ fn evaluate_obligation<'tcx>(
let ParamEnvAnd { param_env, value: predicate } = goal;
let mut selcx = SelectionContext::with_query_mode(&infcx, TraitQueryMode::Canonical);
- let obligation = Obligation::new(ObligationCause::dummy(), param_env, predicate);
+ let obligation = Obligation::new(tcx, ObligationCause::dummy(), param_env, predicate);
selcx.evaluate_root_obligation(&obligation)
}
diff --git a/compiler/rustc_traits/src/implied_outlives_bounds.rs b/compiler/rustc_traits/src/implied_outlives_bounds.rs
index 7d36b9558..010233d77 100644
--- a/compiler/rustc_traits/src/implied_outlives_bounds.rs
+++ b/compiler/rustc_traits/src/implied_outlives_bounds.rs
@@ -5,16 +5,15 @@
use rustc_hir as hir;
use rustc_infer::infer::canonical::{self, Canonical};
use rustc_infer::infer::outlives::components::{push_outlives_components, Component};
-use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
+use rustc_infer::infer::TyCtxtInferExt;
use rustc_infer::traits::query::OutlivesBound;
-use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable};
use rustc_span::source_map::DUMMY_SP;
use rustc_trait_selection::infer::InferCtxtBuilderExt;
use rustc_trait_selection::traits::query::{CanonicalTyGoal, Fallible, NoSolution};
use rustc_trait_selection::traits::wf;
-use rustc_trait_selection::traits::{TraitEngine, TraitEngineExt};
+use rustc_trait_selection::traits::ObligationCtxt;
use smallvec::{smallvec, SmallVec};
pub(crate) fn provide(p: &mut Providers) {
@@ -28,18 +27,18 @@ fn implied_outlives_bounds<'tcx>(
&'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, Vec<OutlivesBound<'tcx>>>>,
NoSolution,
> {
- tcx.infer_ctxt().enter_canonical_trait_query(&goal, |infcx, _fulfill_cx, key| {
+ tcx.infer_ctxt().enter_canonical_trait_query(&goal, |ocx, key| {
let (param_env, ty) = key.into_parts();
- compute_implied_outlives_bounds(&infcx, param_env, ty)
+ compute_implied_outlives_bounds(ocx, param_env, ty)
})
}
fn compute_implied_outlives_bounds<'tcx>(
- infcx: &InferCtxt<'tcx>,
+ ocx: &ObligationCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
) -> Fallible<Vec<OutlivesBound<'tcx>>> {
- let tcx = infcx.tcx;
+ let tcx = ocx.infcx.tcx;
// Sometimes when we ask what it takes for T: WF, we get back that
// U: WF is required; in that case, we push U onto this stack and
@@ -52,8 +51,6 @@ fn compute_implied_outlives_bounds<'tcx>(
let mut outlives_bounds: Vec<ty::OutlivesPredicate<ty::GenericArg<'tcx>, ty::Region<'tcx>>> =
vec![];
- let mut fulfill_cx = <dyn TraitEngine<'tcx>>::new(tcx);
-
while let Some(arg) = wf_args.pop() {
if !checked_wf_args.insert(arg) {
continue;
@@ -70,15 +67,15 @@ fn compute_implied_outlives_bounds<'tcx>(
// FIXME(@lcnr): It's not really "always fine", having fewer implied
// bounds can be backward incompatible, e.g. #101951 was caused by
// us not dealing with inference vars in `TypeOutlives` predicates.
- let obligations = wf::obligations(infcx, param_env, hir::CRATE_HIR_ID, 0, arg, DUMMY_SP)
- .unwrap_or_default();
+ let obligations =
+ wf::obligations(ocx.infcx, param_env, hir::CRATE_HIR_ID, 0, arg, DUMMY_SP)
+ .unwrap_or_default();
// While these predicates should all be implied by other parts of
// the program, they are still relevant as they may constrain
// inference variables, which is necessary to add the correct
// implied bounds in some cases, mostly when dealing with projections.
- fulfill_cx.register_predicate_obligations(
- infcx,
+ ocx.register_obligations(
obligations.iter().filter(|o| o.predicate.has_non_region_infer()).cloned(),
);
@@ -89,35 +86,37 @@ fn compute_implied_outlives_bounds<'tcx>(
match obligation.predicate.kind().no_bound_vars() {
None => None,
Some(pred) => match pred {
- ty::PredicateKind::Trait(..)
+ ty::PredicateKind::Clause(ty::Clause::Trait(..))
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
- | ty::PredicateKind::Projection(..)
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
ty::PredicateKind::WellFormed(arg) => {
wf_args.push(arg);
None
}
- ty::PredicateKind::RegionOutlives(ty::OutlivesPredicate(r_a, r_b)) => {
- Some(ty::OutlivesPredicate(r_a.into(), r_b))
- }
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(
+ ty::OutlivesPredicate(r_a, r_b),
+ )) => Some(ty::OutlivesPredicate(r_a.into(), r_b)),
- ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty_a, r_b)) => {
- Some(ty::OutlivesPredicate(ty_a.into(), r_b))
- }
+ ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate(
+ ty_a,
+ r_b,
+ ))) => Some(ty::OutlivesPredicate(ty_a.into(), r_b)),
},
}
}));
}
- // Ensure that those obligations that we had to solve
- // get solved *here*.
- match fulfill_cx.select_all_or_error(infcx).as_slice() {
+ // This call to `select_all_or_error` is necessary to constrain inference variables, which we
+ // use further down when computing the implied bounds.
+ match ocx.select_all_or_error().as_slice() {
[] => (),
_ => return Err(NoSolution),
}
@@ -129,7 +128,7 @@ fn compute_implied_outlives_bounds<'tcx>(
.flat_map(|ty::OutlivesPredicate(a, r_b)| match a.unpack() {
ty::GenericArgKind::Lifetime(r_a) => vec![OutlivesBound::RegionSubRegion(r_b, r_a)],
ty::GenericArgKind::Type(ty_a) => {
- let ty_a = infcx.resolve_vars_if_possible(ty_a);
+ let ty_a = ocx.infcx.resolve_vars_if_possible(ty_a);
let mut components = smallvec![];
push_outlives_components(tcx, ty_a, &mut components);
implied_bounds_from_components(r_b, components)
diff --git a/compiler/rustc_traits/src/lib.rs b/compiler/rustc_traits/src/lib.rs
index 0da28737f..9aa26667e 100644
--- a/compiler/rustc_traits/src/lib.rs
+++ b/compiler/rustc_traits/src/lib.rs
@@ -3,6 +3,7 @@
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]
+#![feature(let_chains)]
#![recursion_limit = "256"]
#[macro_use]
@@ -11,6 +12,7 @@ extern crate tracing;
extern crate rustc_middle;
mod chalk;
+mod codegen;
mod dropck_outlives;
mod evaluate_obligation;
mod implied_outlives_bounds;
@@ -30,4 +32,5 @@ pub fn provide(p: &mut Providers) {
normalize_projection_ty::provide(p);
normalize_erasing_regions::provide(p);
type_op::provide(p);
+ p.codegen_select_candidate = codegen::codegen_select_candidate;
}
diff --git a/compiler/rustc_traits/src/normalize_erasing_regions.rs b/compiler/rustc_traits/src/normalize_erasing_regions.rs
index 2da64d73d..44fd8bfb3 100644
--- a/compiler/rustc_traits/src/normalize_erasing_regions.rs
+++ b/compiler/rustc_traits/src/normalize_erasing_regions.rs
@@ -2,7 +2,7 @@ use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::traits::query::NoSolution;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, ParamEnvAnd, TyCtxt, TypeFoldable};
-use rustc_trait_selection::traits::query::normalize::AtExt;
+use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
use rustc_trait_selection::traits::{Normalized, ObligationCause};
use std::sync::atomic::Ordering;
@@ -29,12 +29,14 @@ fn try_normalize_after_erasing_regions<'tcx, T: TypeFoldable<'tcx> + PartialEq +
let ParamEnvAnd { param_env, value } = goal;
let infcx = tcx.infer_ctxt().build();
let cause = ObligationCause::dummy();
- match infcx.at(&cause, param_env).normalize(value) {
+ match infcx.at(&cause, param_env).query_normalize(value) {
Ok(Normalized { value: normalized_value, obligations: normalized_obligations }) => {
// We don't care about the `obligations`; they are
// always only region relations, and we are about to
// erase those anyway:
- debug_assert_eq!(
+ // This has been seen to fail in RL, so making it a non-debug assertion to better catch
+ // those cases.
+ assert_eq!(
normalized_obligations.iter().find(|p| not_outlives_predicate(p.predicate)),
None,
);
@@ -54,9 +56,10 @@ fn try_normalize_after_erasing_regions<'tcx, T: TypeFoldable<'tcx> + PartialEq +
fn not_outlives_predicate<'tcx>(p: ty::Predicate<'tcx>) -> bool {
match p.kind().skip_binder() {
- ty::PredicateKind::RegionOutlives(..) | ty::PredicateKind::TypeOutlives(..) => false,
- ty::PredicateKind::Trait(..)
- | ty::PredicateKind::Projection(..)
+ ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..))
+ | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..)) => false,
+ ty::PredicateKind::Clause(ty::Clause::Trait(..))
+ | ty::PredicateKind::Clause(ty::Clause::Projection(..))
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ClosureKind(..)
@@ -64,6 +67,7 @@ fn not_outlives_predicate<'tcx>(p: ty::Predicate<'tcx>) -> bool {
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
+ | ty::PredicateKind::Ambiguous
| ty::PredicateKind::TypeWellFormedFromEnv(..) => true,
}
}
diff --git a/compiler/rustc_traits/src/normalize_projection_ty.rs b/compiler/rustc_traits/src/normalize_projection_ty.rs
index 98bb42c9a..e805eb428 100644
--- a/compiler/rustc_traits/src/normalize_projection_ty.rs
+++ b/compiler/rustc_traits/src/normalize_projection_ty.rs
@@ -1,6 +1,5 @@
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
use rustc_infer::infer::TyCtxtInferExt;
-use rustc_infer::traits::TraitEngineExt as _;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{ParamEnvAnd, TyCtxt};
use rustc_trait_selection::infer::InferCtxtBuilderExt;
@@ -23,8 +22,8 @@ fn normalize_projection_ty<'tcx>(
tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);
tcx.infer_ctxt().enter_canonical_trait_query(
&goal,
- |infcx, fulfill_cx, ParamEnvAnd { param_env, value: goal }| {
- let selcx = &mut SelectionContext::new(infcx);
+ |ocx, ParamEnvAnd { param_env, value: goal }| {
+ let selcx = &mut SelectionContext::new(ocx.infcx);
let cause = ObligationCause::dummy();
let mut obligations = vec![];
let answer = traits::normalize_projection_type(
@@ -35,7 +34,7 @@ fn normalize_projection_ty<'tcx>(
0,
&mut obligations,
);
- fulfill_cx.register_predicate_obligations(infcx, obligations);
+ ocx.register_obligations(obligations);
// FIXME(associated_const_equality): All users of normalize_projection_ty expected
// a type, but there is the possibility it could've been a const now. Maybe change
// it to a Term later?
diff --git a/compiler/rustc_traits/src/type_op.rs b/compiler/rustc_traits/src/type_op.rs
index bca7458ed..7f964afde 100644
--- a/compiler/rustc_traits/src/type_op.rs
+++ b/compiler/rustc_traits/src/type_op.rs
@@ -1,26 +1,21 @@
use rustc_hir as hir;
-use rustc_hir::def_id::DefId;
-use rustc_infer::infer::at::ToTrace;
use rustc_infer::infer::canonical::{Canonical, QueryResponse};
-use rustc_infer::infer::{DefiningAnchor, InferCtxt, TyCtxtInferExt};
-use rustc_infer::traits::{ObligationCauseCode, TraitEngineExt as _};
+use rustc_infer::infer::{DefiningAnchor, TyCtxtInferExt};
+use rustc_infer::traits::ObligationCauseCode;
use rustc_middle::ty::query::Providers;
-use rustc_middle::ty::{
- self, EarlyBinder, FnSig, Lift, PolyFnSig, Ty, TyCtxt, TypeFoldable, Variance,
-};
-use rustc_middle::ty::{GenericArg, UserSelfTy, UserSubsts};
-use rustc_middle::ty::{ParamEnv, ParamEnvAnd, Predicate, ToPredicate};
+use rustc_middle::ty::{self, FnSig, Lift, PolyFnSig, Ty, TyCtxt, TypeFoldable};
+use rustc_middle::ty::{ParamEnvAnd, Predicate, ToPredicate};
+use rustc_middle::ty::{UserSelfTy, UserSubsts};
use rustc_span::{Span, DUMMY_SP};
use rustc_trait_selection::infer::InferCtxtBuilderExt;
-use rustc_trait_selection::infer::InferCtxtExt;
-use rustc_trait_selection::traits::query::normalize::AtExt;
+use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt;
use rustc_trait_selection::traits::query::type_op::ascribe_user_type::AscribeUserType;
use rustc_trait_selection::traits::query::type_op::eq::Eq;
use rustc_trait_selection::traits::query::type_op::normalize::Normalize;
use rustc_trait_selection::traits::query::type_op::prove_predicate::ProvePredicate;
use rustc_trait_selection::traits::query::type_op::subtype::Subtype;
use rustc_trait_selection::traits::query::{Fallible, NoSolution};
-use rustc_trait_selection::traits::{Normalized, Obligation, ObligationCause, TraitEngine};
+use rustc_trait_selection::traits::{Normalized, Obligation, ObligationCause, ObligationCtxt};
use std::fmt;
use std::iter::zip;
@@ -42,17 +37,16 @@ fn type_op_ascribe_user_type<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, AscribeUserType<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
- tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
- type_op_ascribe_user_type_with_span(infcx, fulfill_cx, key, None)
+ tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
+ type_op_ascribe_user_type_with_span(ocx, key, None)
})
}
/// The core of the `type_op_ascribe_user_type` query: for diagnostics purposes in NLL HRTB errors,
/// this query can be re-run to better track the span of the obligation cause, and improve the error
/// message. Do not call directly unless you're in that very specific context.
-pub fn type_op_ascribe_user_type_with_span<'a, 'tcx: 'a>(
- infcx: &'a InferCtxt<'tcx>,
- fulfill_cx: &'a mut dyn TraitEngine<'tcx>,
+pub fn type_op_ascribe_user_type_with_span<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
key: ParamEnvAnd<'tcx, AscribeUserType<'tcx>>,
span: Option<Span>,
) -> Result<(), NoSolution> {
@@ -61,157 +55,81 @@ pub fn type_op_ascribe_user_type_with_span<'a, 'tcx: 'a>(
"type_op_ascribe_user_type: mir_ty={:?} def_id={:?} user_substs={:?}",
mir_ty, def_id, user_substs
);
+ let span = span.unwrap_or(DUMMY_SP);
- let mut cx = AscribeUserTypeCx { infcx, param_env, span: span.unwrap_or(DUMMY_SP), fulfill_cx };
- cx.relate_mir_and_user_ty(mir_ty, def_id, user_substs)?;
- Ok(())
-}
+ let UserSubsts { user_self_ty, substs } = user_substs;
+ let tcx = ocx.infcx.tcx;
+ let cause = ObligationCause::dummy_with_span(span);
-struct AscribeUserTypeCx<'me, 'tcx> {
- infcx: &'me InferCtxt<'tcx>,
- param_env: ParamEnv<'tcx>,
- span: Span,
- fulfill_cx: &'me mut dyn TraitEngine<'tcx>,
-}
+ let ty = tcx.bound_type_of(def_id).subst(tcx, substs);
+ let ty = ocx.normalize(&cause, param_env, ty);
+ debug!("relate_type_and_user_type: ty of def-id is {:?}", ty);
-impl<'me, 'tcx> AscribeUserTypeCx<'me, 'tcx> {
- fn normalize<T>(&mut self, value: T) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- self.normalize_with_cause(value, ObligationCause::misc(self.span, hir::CRATE_HIR_ID))
- }
+ ocx.eq(&cause, param_env, mir_ty, ty)?;
- fn normalize_with_cause<T>(&mut self, value: T, cause: ObligationCause<'tcx>) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- self.infcx
- .partially_normalize_associated_types_in(cause, self.param_env, value)
- .into_value_registering_obligations(self.infcx, self.fulfill_cx)
- }
+ // Prove the predicates coming along with `def_id`.
+ //
+ // Also, normalize the `instantiated_predicates`
+ // because otherwise we wind up with duplicate "type
+ // outlives" error messages.
+ let instantiated_predicates = tcx.predicates_of(def_id).instantiate(tcx, substs);
- fn relate<T>(&mut self, a: T, variance: Variance, b: T) -> Result<(), NoSolution>
- where
- T: ToTrace<'tcx>,
+ debug!(?instantiated_predicates);
+ for (instantiated_predicate, predicate_span) in
+ zip(instantiated_predicates.predicates, instantiated_predicates.spans)
{
- self.infcx
- .at(&ObligationCause::dummy_with_span(self.span), self.param_env)
- .relate(a, variance, b)?
- .into_value_registering_obligations(self.infcx, self.fulfill_cx);
- Ok(())
- }
-
- fn prove_predicate(&mut self, predicate: Predicate<'tcx>, cause: ObligationCause<'tcx>) {
- self.fulfill_cx.register_predicate_obligation(
- self.infcx,
- Obligation::new(cause, self.param_env, predicate),
+ let span = if span == DUMMY_SP { predicate_span } else { span };
+ let cause = ObligationCause::new(
+ span,
+ hir::CRATE_HIR_ID,
+ ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span),
);
- }
+ let instantiated_predicate =
+ ocx.normalize(&cause.clone(), param_env, instantiated_predicate);
- fn tcx(&self) -> TyCtxt<'tcx> {
- self.infcx.tcx
+ ocx.register_obligation(Obligation::new(tcx, cause, param_env, instantiated_predicate));
}
- fn subst<T>(&self, value: T, substs: &[GenericArg<'tcx>]) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- EarlyBinder(value).subst(self.tcx(), substs)
- }
+ if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty {
+ let impl_self_ty = tcx.bound_type_of(impl_def_id).subst(tcx, substs);
+ let impl_self_ty = ocx.normalize(&cause, param_env, impl_self_ty);
- #[instrument(level = "debug", skip(self))]
- fn relate_mir_and_user_ty(
- &mut self,
- mir_ty: Ty<'tcx>,
- def_id: DefId,
- user_substs: UserSubsts<'tcx>,
- ) -> Result<(), NoSolution> {
- let UserSubsts { user_self_ty, substs } = user_substs;
- let tcx = self.tcx();
+ ocx.eq(&cause, param_env, self_ty, impl_self_ty)?;
- let ty = tcx.type_of(def_id);
- let ty = self.subst(ty, substs);
- let ty = self.normalize(ty);
- debug!("relate_type_and_user_type: ty of def-id is {:?}", ty);
-
- self.relate(mir_ty, Variance::Invariant, ty)?;
-
- // Prove the predicates coming along with `def_id`.
- //
- // Also, normalize the `instantiated_predicates`
- // because otherwise we wind up with duplicate "type
- // outlives" error messages.
- let instantiated_predicates =
- self.tcx().predicates_of(def_id).instantiate(self.tcx(), substs);
-
- let cause = ObligationCause::dummy_with_span(self.span);
-
- debug!(?instantiated_predicates);
- for (instantiated_predicate, predicate_span) in
- zip(instantiated_predicates.predicates, instantiated_predicates.spans)
- {
- let span = if self.span == DUMMY_SP { predicate_span } else { self.span };
- let cause = ObligationCause::new(
- span,
- hir::CRATE_HIR_ID,
- ObligationCauseCode::AscribeUserTypeProvePredicate(predicate_span),
- );
- let instantiated_predicate =
- self.normalize_with_cause(instantiated_predicate, cause.clone());
- self.prove_predicate(instantiated_predicate, cause);
- }
-
- if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty {
- let impl_self_ty = self.tcx().type_of(impl_def_id);
- let impl_self_ty = self.subst(impl_self_ty, &substs);
- let impl_self_ty = self.normalize(impl_self_ty);
-
- self.relate(self_ty, Variance::Invariant, impl_self_ty)?;
-
- self.prove_predicate(
- ty::Binder::dummy(ty::PredicateKind::WellFormed(impl_self_ty.into()))
- .to_predicate(self.tcx()),
- cause.clone(),
- );
- }
-
- // In addition to proving the predicates, we have to
- // prove that `ty` is well-formed -- this is because
- // the WF of `ty` is predicated on the substs being
- // well-formed, and we haven't proven *that*. We don't
- // want to prove the WF of types from `substs` directly because they
- // haven't been normalized.
- //
- // FIXME(nmatsakis): Well, perhaps we should normalize
- // them? This would only be relevant if some input
- // type were ill-formed but did not appear in `ty`,
- // which...could happen with normalization...
- self.prove_predicate(
- ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into())).to_predicate(self.tcx()),
- cause,
- );
- Ok(())
+ let predicate: Predicate<'tcx> =
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(impl_self_ty.into())).to_predicate(tcx);
+ ocx.register_obligation(Obligation::new(tcx, cause.clone(), param_env, predicate));
}
+
+ // In addition to proving the predicates, we have to
+ // prove that `ty` is well-formed -- this is because
+ // the WF of `ty` is predicated on the substs being
+ // well-formed, and we haven't proven *that*. We don't
+ // want to prove the WF of types from `substs` directly because they
+ // haven't been normalized.
+ //
+ // FIXME(nmatsakis): Well, perhaps we should normalize
+ // them? This would only be relevant if some input
+ // type were ill-formed but did not appear in `ty`,
+ // which...could happen with normalization...
+ let predicate: Predicate<'tcx> =
+ ty::Binder::dummy(ty::PredicateKind::WellFormed(ty.into())).to_predicate(tcx);
+ ocx.register_obligation(Obligation::new(tcx, cause, param_env, predicate));
+ Ok(())
}
fn type_op_eq<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
- tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
let (param_env, Eq { a, b }) = key.into_parts();
- infcx
- .at(&ObligationCause::dummy(), param_env)
- .eq(a, b)?
- .into_value_registering_obligations(infcx, fulfill_cx);
- Ok(())
+ Ok(ocx.eq(&ObligationCause::dummy(), param_env, a, b)?)
})
}
fn type_op_normalize<'tcx, T>(
- infcx: &InferCtxt<'tcx>,
- fulfill_cx: &mut dyn TraitEngine<'tcx>,
+ ocx: &ObligationCtxt<'_, 'tcx>,
key: ParamEnvAnd<'tcx, Normalize<T>>,
) -> Fallible<T>
where
@@ -219,8 +137,8 @@ where
{
let (param_env, Normalize { value }) = key.into_parts();
let Normalized { value, obligations } =
- infcx.at(&ObligationCause::dummy(), param_env).normalize(value)?;
- fulfill_cx.register_predicate_obligations(infcx, obligations);
+ ocx.infcx.at(&ObligationCause::dummy(), param_env).query_normalize(value)?;
+ ocx.register_obligations(obligations);
Ok(value)
}
@@ -256,13 +174,9 @@ fn type_op_subtype<'tcx>(
tcx: TyCtxt<'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> {
- tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| {
let (param_env, Subtype { sub, sup }) = key.into_parts();
- infcx
- .at(&ObligationCause::dummy(), param_env)
- .sup(sup, sub)?
- .into_value_registering_obligations(infcx, fulfill_cx);
- Ok(())
+ Ok(ocx.sup(&ObligationCause::dummy(), param_env, sup, sub)?)
})
}
@@ -274,8 +188,8 @@ fn type_op_prove_predicate<'tcx>(
// impl-trait/issue-99642.rs
tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).enter_canonical_trait_query(
&canonicalized,
- |infcx, fulfill_cx, key| {
- type_op_prove_predicate_with_cause(infcx, fulfill_cx, key, ObligationCause::dummy());
+ |ocx, key| {
+ type_op_prove_predicate_with_cause(ocx, key, ObligationCause::dummy());
Ok(())
},
)
@@ -284,12 +198,11 @@ fn type_op_prove_predicate<'tcx>(
/// The core of the `type_op_prove_predicate` query: for diagnostics purposes in NLL HRTB errors,
/// this query can be re-run to better track the span of the obligation cause, and improve the error
/// message. Do not call directly unless you're in that very specific context.
-pub fn type_op_prove_predicate_with_cause<'a, 'tcx: 'a>(
- infcx: &'a InferCtxt<'tcx>,
- fulfill_cx: &'a mut dyn TraitEngine<'tcx>,
+pub fn type_op_prove_predicate_with_cause<'tcx>(
+ ocx: &ObligationCtxt<'_, 'tcx>,
key: ParamEnvAnd<'tcx, ProvePredicate<'tcx>>,
cause: ObligationCause<'tcx>,
) {
let (param_env, ProvePredicate { predicate }) = key.into_parts();
- fulfill_cx.register_predicate_obligation(infcx, Obligation::new(cause, param_env, predicate));
+ ocx.register_obligation(Obligation::new(ocx.infcx.tcx, cause, param_env, predicate));
}
diff --git a/compiler/rustc_transmute/src/layout/tree.rs b/compiler/rustc_transmute/src/layout/tree.rs
index acd4fa63d..30e20ba6f 100644
--- a/compiler/rustc_transmute/src/layout/tree.rs
+++ b/compiler/rustc_transmute/src/layout/tree.rs
@@ -284,7 +284,8 @@ pub(crate) mod rustc {
}
ty::Array(ty, len) => {
- let len = len.try_eval_usize(tcx, ParamEnv::reveal_all()).unwrap();
+ let len =
+ len.try_eval_usize(tcx, ParamEnv::reveal_all()).ok_or(Err::Unspecified)?;
let elt = Tree::from_ty(*ty, tcx)?;
Ok(std::iter::repeat(elt)
.take(len as usize)
@@ -435,8 +436,8 @@ pub(crate) mod rustc {
// finally: padding
let padding_span = trace_span!("adding trailing padding").entered();
- let padding_needed = layout_summary.total_size - variant_layout.size();
- if padding_needed > 0 {
+ if layout_summary.total_size > variant_layout.size() {
+ let padding_needed = layout_summary.total_size - variant_layout.size();
tree = tree.then(Self::padding(padding_needed));
};
drop(padding_span);
diff --git a/compiler/rustc_transmute/src/lib.rs b/compiler/rustc_transmute/src/lib.rs
index f7cc94e53..384d03106 100644
--- a/compiler/rustc_transmute/src/lib.rs
+++ b/compiler/rustc_transmute/src/lib.rs
@@ -122,7 +122,7 @@ mod rustc {
let c = c.eval(tcx, param_env);
- if let Some(err) = c.error_reported() {
+ if let Err(err) = c.error_reported() {
return Some(Self {
alignment: true,
lifetimes: true,
diff --git a/compiler/rustc_ty_utils/Cargo.toml b/compiler/rustc_ty_utils/Cargo.toml
index 5e4ba4730..52fbd3ae0 100644
--- a/compiler/rustc_ty_utils/Cargo.toml
+++ b/compiler/rustc_ty_utils/Cargo.toml
@@ -4,8 +4,6 @@ version = "0.0.0"
edition = "2021"
[dependencies]
-rand = "0.8.4"
-rand_xoshiro = "0.6.0"
tracing = "0.1"
rustc_middle = { path = "../rustc_middle" }
rustc_data_structures = { path = "../rustc_data_structures" }
diff --git a/compiler/rustc_ty_utils/src/consts.rs b/compiler/rustc_ty_utils/src/consts.rs
index e057bb668..f8ff31f97 100644
--- a/compiler/rustc_ty_utils/src/consts.rs
+++ b/compiler/rustc_ty_utils/src/consts.rs
@@ -1,10 +1,11 @@
use rustc_errors::ErrorGuaranteed;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalDefId;
-use rustc_index::vec::IndexVec;
use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
-use rustc_middle::ty::abstract_const::{CastKind, Node, NodeId};
-use rustc_middle::ty::{self, TyCtxt, TypeVisitable};
+use rustc_middle::thir::visit;
+use rustc_middle::thir::visit::Visitor;
+use rustc_middle::ty::abstract_const::CastKind;
+use rustc_middle::ty::{self, Expr, TyCtxt, TypeVisitable};
use rustc_middle::{mir, thir};
use rustc_span::Span;
use rustc_target::abi::VariantIdx;
@@ -31,10 +32,8 @@ pub(crate) fn destructure_const<'tcx>(
let (fields, variant) = match const_.ty().kind() {
ty::Array(inner_ty, _) | ty::Slice(inner_ty) => {
// construct the consts for the elements of the array/slice
- let field_consts = branches
- .iter()
- .map(|b| tcx.mk_const(ty::ConstS { kind: ty::ConstKind::Value(*b), ty: *inner_ty }))
- .collect::<Vec<_>>();
+ let field_consts =
+ branches.iter().map(|b| tcx.mk_const(*b, *inner_ty)).collect::<Vec<_>>();
debug!(?field_consts);
(field_consts, None)
@@ -52,10 +51,7 @@ pub(crate) fn destructure_const<'tcx>(
for (field, field_valtree) in iter::zip(fields, branches) {
let field_ty = field.ty(tcx, substs);
- let field_const = tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Value(*field_valtree),
- ty: field_ty,
- });
+ let field_const = tcx.mk_const(*field_valtree, field_ty);
field_consts.push(field_const);
}
debug!(?field_consts);
@@ -64,12 +60,7 @@ pub(crate) fn destructure_const<'tcx>(
}
ty::Tuple(elem_tys) => {
let fields = iter::zip(*elem_tys, branches)
- .map(|(elem_ty, elem_valtree)| {
- tcx.mk_const(ty::ConstS {
- kind: ty::ConstKind::Value(*elem_valtree),
- ty: elem_ty,
- })
- })
+ .map(|(elem_ty, elem_valtree)| tcx.mk_const(*elem_valtree, elem_ty))
.collect::<Vec<_>>();
(fields, None)
@@ -82,328 +73,278 @@ pub(crate) fn destructure_const<'tcx>(
ty::DestructuredConst { variant, fields }
}
-pub struct AbstractConstBuilder<'a, 'tcx> {
- tcx: TyCtxt<'tcx>,
- body_id: thir::ExprId,
- body: &'a thir::Thir<'tcx>,
- /// The current WIP node tree.
- nodes: IndexVec<NodeId, Node<'tcx>>,
-}
-
-impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
- fn root_span(&self) -> Span {
- self.body.exprs[self.body_id].span
- }
-
- fn error(&mut self, sub: GenericConstantTooComplexSub) -> Result<!, ErrorGuaranteed> {
- let reported = self.tcx.sess.emit_err(GenericConstantTooComplex {
- span: self.root_span(),
- maybe_supported: None,
- sub,
- });
-
- Err(reported)
+/// We do not allow all binary operations in abstract consts, so filter disallowed ones.
+fn check_binop(op: mir::BinOp) -> bool {
+ use mir::BinOp::*;
+ match op {
+ Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr | Eq | Lt | Le | Ne
+ | Ge | Gt => true,
+ Offset => false,
}
+}
- fn maybe_supported_error(
- &mut self,
- sub: GenericConstantTooComplexSub,
- ) -> Result<!, ErrorGuaranteed> {
- let reported = self.tcx.sess.emit_err(GenericConstantTooComplex {
- span: self.root_span(),
- maybe_supported: Some(()),
- sub,
- });
-
- Err(reported)
+/// While we currently allow all unary operations, we still want to explicitly guard against
+/// future changes here.
+fn check_unop(op: mir::UnOp) -> bool {
+ use mir::UnOp::*;
+ match op {
+ Not | Neg => true,
}
+}
- #[instrument(skip(tcx, body, body_id), level = "debug")]
- pub fn new(
- tcx: TyCtxt<'tcx>,
- (body, body_id): (&'a thir::Thir<'tcx>, thir::ExprId),
- ) -> Result<Option<AbstractConstBuilder<'a, 'tcx>>, ErrorGuaranteed> {
- let builder = AbstractConstBuilder { tcx, body_id, body, nodes: IndexVec::new() };
-
- struct IsThirPolymorphic<'a, 'tcx> {
- is_poly: bool,
- thir: &'a thir::Thir<'tcx>,
+fn recurse_build<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ body: &thir::Thir<'tcx>,
+ node: thir::ExprId,
+ root_span: Span,
+) -> Result<ty::Const<'tcx>, ErrorGuaranteed> {
+ use thir::ExprKind;
+ let node = &body.exprs[node];
+
+ let maybe_supported_error = |a| maybe_supported_error(tcx, a, root_span);
+ let error = |a| error(tcx, a, root_span);
+
+ Ok(match &node.kind {
+ // I dont know if handling of these 3 is correct
+ &ExprKind::Scope { value, .. } => recurse_build(tcx, body, value, root_span)?,
+ &ExprKind::PlaceTypeAscription { source, .. }
+ | &ExprKind::ValueTypeAscription { source, .. } => {
+ recurse_build(tcx, body, source, root_span)?
}
-
- use crate::rustc_middle::thir::visit::Visitor;
- use thir::visit;
-
- impl<'a, 'tcx> IsThirPolymorphic<'a, 'tcx> {
- fn expr_is_poly(&mut self, expr: &thir::Expr<'tcx>) -> bool {
- if expr.ty.has_non_region_param() {
- return true;
+ &ExprKind::Literal { lit, neg } => {
+ let sp = node.span;
+ match tcx.at(sp).lit_to_const(LitToConstInput { lit: &lit.node, ty: node.ty, neg }) {
+ Ok(c) => c,
+ Err(LitToConstError::Reported(guar)) => {
+ tcx.const_error_with_guaranteed(node.ty, guar)
}
-
- match expr.kind {
- thir::ExprKind::NamedConst { substs, .. } => substs.has_non_region_param(),
- thir::ExprKind::ConstParam { .. } => true,
- thir::ExprKind::Repeat { value, count } => {
- self.visit_expr(&self.thir()[value]);
- count.has_non_region_param()
- }
- _ => false,
+ Err(LitToConstError::TypeError) => {
+ bug!("encountered type error in lit_to_const")
}
}
+ }
+ &ExprKind::NonHirLiteral { lit, user_ty: _ } => {
+ let val = ty::ValTree::from_scalar_int(lit);
+ tcx.mk_const(val, node.ty)
+ }
+ &ExprKind::ZstLiteral { user_ty: _ } => {
+ let val = ty::ValTree::zst();
+ tcx.mk_const(val, node.ty)
+ }
+ &ExprKind::NamedConst { def_id, substs, user_ty: _ } => {
+ let uneval = ty::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
+ tcx.mk_const(uneval, node.ty)
+ }
+ ExprKind::ConstParam { param, .. } => tcx.mk_const(*param, node.ty),
- fn pat_is_poly(&mut self, pat: &thir::Pat<'tcx>) -> bool {
- if pat.ty.has_non_region_param() {
- return true;
- }
+ ExprKind::Call { fun, args, .. } => {
+ let fun = recurse_build(tcx, body, *fun, root_span)?;
- match pat.kind {
- thir::PatKind::Constant { value } => value.has_non_region_param(),
- thir::PatKind::Range(box thir::PatRange { lo, hi, .. }) => {
- lo.has_non_region_param() || hi.has_non_region_param()
- }
- _ => false,
- }
+ let mut new_args = Vec::<ty::Const<'tcx>>::with_capacity(args.len());
+ for &id in args.iter() {
+ new_args.push(recurse_build(tcx, body, id, root_span)?);
}
+ let new_args = tcx.mk_const_list(new_args.iter());
+ tcx.mk_const(Expr::FunctionCall(fun, new_args), node.ty)
}
-
- impl<'a, 'tcx> visit::Visitor<'a, 'tcx> for IsThirPolymorphic<'a, 'tcx> {
- fn thir(&self) -> &'a thir::Thir<'tcx> {
- &self.thir
- }
-
- #[instrument(skip(self), level = "debug")]
- fn visit_expr(&mut self, expr: &thir::Expr<'tcx>) {
- self.is_poly |= self.expr_is_poly(expr);
- if !self.is_poly {
- visit::walk_expr(self, expr)
- }
+ &ExprKind::Binary { op, lhs, rhs } if check_binop(op) => {
+ let lhs = recurse_build(tcx, body, lhs, root_span)?;
+ let rhs = recurse_build(tcx, body, rhs, root_span)?;
+ tcx.mk_const(Expr::Binop(op, lhs, rhs), node.ty)
+ }
+ &ExprKind::Unary { op, arg } if check_unop(op) => {
+ let arg = recurse_build(tcx, body, arg, root_span)?;
+ tcx.mk_const(Expr::UnOp(op, arg), node.ty)
+ }
+ // This is necessary so that the following compiles:
+ //
+ // ```
+ // fn foo<const N: usize>(a: [(); N + 1]) {
+ // bar::<{ N + 1 }>();
+ // }
+ // ```
+ ExprKind::Block { block } => {
+ if let thir::Block { stmts: box [], expr: Some(e), .. } = &body.blocks[*block] {
+ recurse_build(tcx, body, *e, root_span)?
+ } else {
+ maybe_supported_error(GenericConstantTooComplexSub::BlockNotSupported(node.span))?
}
-
- #[instrument(skip(self), level = "debug")]
- fn visit_pat(&mut self, pat: &thir::Pat<'tcx>) {
- self.is_poly |= self.pat_is_poly(pat);
- if !self.is_poly {
- visit::walk_pat(self, pat);
- }
+ }
+ // `ExprKind::Use` happens when a `hir::ExprKind::Cast` is a
+ // "coercion cast" i.e. using a coercion or is a no-op.
+ // This is important so that `N as usize as usize` doesnt unify with `N as usize`. (untested)
+ &ExprKind::Use { source } => {
+ let arg = recurse_build(tcx, body, source, root_span)?;
+ tcx.mk_const(Expr::Cast(CastKind::Use, arg, node.ty), node.ty)
+ }
+ &ExprKind::Cast { source } => {
+ let arg = recurse_build(tcx, body, source, root_span)?;
+ tcx.mk_const(Expr::Cast(CastKind::As, arg, node.ty), node.ty)
+ }
+ ExprKind::Borrow { arg, .. } => {
+ let arg_node = &body.exprs[*arg];
+
+ // Skip reborrows for now until we allow Deref/Borrow/AddressOf
+ // expressions.
+ // FIXME(generic_const_exprs): Verify/explain why this is sound
+ if let ExprKind::Deref { arg } = arg_node.kind {
+ recurse_build(tcx, body, arg, root_span)?
+ } else {
+ maybe_supported_error(GenericConstantTooComplexSub::BorrowNotSupported(node.span))?
}
}
-
- let mut is_poly_vis = IsThirPolymorphic { is_poly: false, thir: body };
- visit::walk_expr(&mut is_poly_vis, &body[body_id]);
- debug!("AbstractConstBuilder: is_poly={}", is_poly_vis.is_poly);
- if !is_poly_vis.is_poly {
- return Ok(None);
+ // FIXME(generic_const_exprs): We may want to support these.
+ ExprKind::AddressOf { .. } | ExprKind::Deref { .. } => maybe_supported_error(
+ GenericConstantTooComplexSub::AddressAndDerefNotSupported(node.span),
+ )?,
+ ExprKind::Repeat { .. } | ExprKind::Array { .. } => {
+ maybe_supported_error(GenericConstantTooComplexSub::ArrayNotSupported(node.span))?
}
+ ExprKind::NeverToAny { .. } => {
+ maybe_supported_error(GenericConstantTooComplexSub::NeverToAnyNotSupported(node.span))?
+ }
+ ExprKind::Tuple { .. } => {
+ maybe_supported_error(GenericConstantTooComplexSub::TupleNotSupported(node.span))?
+ }
+ ExprKind::Index { .. } => {
+ maybe_supported_error(GenericConstantTooComplexSub::IndexNotSupported(node.span))?
+ }
+ ExprKind::Field { .. } => {
+ maybe_supported_error(GenericConstantTooComplexSub::FieldNotSupported(node.span))?
+ }
+ ExprKind::ConstBlock { .. } => {
+ maybe_supported_error(GenericConstantTooComplexSub::ConstBlockNotSupported(node.span))?
+ }
+ ExprKind::Adt(_) => {
+ maybe_supported_error(GenericConstantTooComplexSub::AdtNotSupported(node.span))?
+ }
+ // dont know if this is correct
+ ExprKind::Pointer { .. } => {
+ error(GenericConstantTooComplexSub::PointerNotSupported(node.span))?
+ }
+ ExprKind::Yield { .. } => {
+ error(GenericConstantTooComplexSub::YieldNotSupported(node.span))?
+ }
+ ExprKind::Continue { .. } | ExprKind::Break { .. } | ExprKind::Loop { .. } => {
+ error(GenericConstantTooComplexSub::LoopNotSupported(node.span))?
+ }
+ ExprKind::Box { .. } => error(GenericConstantTooComplexSub::BoxNotSupported(node.span))?,
- Ok(Some(builder))
- }
-
- /// We do not allow all binary operations in abstract consts, so filter disallowed ones.
- fn check_binop(op: mir::BinOp) -> bool {
- use mir::BinOp::*;
- match op {
- Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr | Eq | Lt | Le
- | Ne | Ge | Gt => true,
- Offset => false,
+ ExprKind::Unary { .. } => unreachable!(),
+ // we handle valid unary/binary ops above
+ ExprKind::Binary { .. } => {
+ error(GenericConstantTooComplexSub::BinaryNotSupported(node.span))?
+ }
+ ExprKind::LogicalOp { .. } => {
+ error(GenericConstantTooComplexSub::LogicalOpNotSupported(node.span))?
+ }
+ ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => {
+ error(GenericConstantTooComplexSub::AssignNotSupported(node.span))?
+ }
+ ExprKind::Closure { .. } | ExprKind::Return { .. } => {
+ error(GenericConstantTooComplexSub::ClosureAndReturnNotSupported(node.span))?
+ }
+ // let expressions imply control flow
+ ExprKind::Match { .. } | ExprKind::If { .. } | ExprKind::Let { .. } => {
+ error(GenericConstantTooComplexSub::ControlFlowNotSupported(node.span))?
+ }
+ ExprKind::InlineAsm { .. } => {
+ error(GenericConstantTooComplexSub::InlineAsmNotSupported(node.span))?
}
- }
- /// While we currently allow all unary operations, we still want to explicitly guard against
- /// future changes here.
- fn check_unop(op: mir::UnOp) -> bool {
- use mir::UnOp::*;
- match op {
- Not | Neg => true,
+ // we dont permit let stmts so `VarRef` and `UpvarRef` cant happen
+ ExprKind::VarRef { .. }
+ | ExprKind::UpvarRef { .. }
+ | ExprKind::StaticRef { .. }
+ | ExprKind::ThreadLocalRef(_) => {
+ error(GenericConstantTooComplexSub::OperationNotSupported(node.span))?
}
- }
+ })
+}
- /// Builds the abstract const by walking the thir and bailing out when
- /// encountering an unsupported operation.
- pub fn build(mut self) -> Result<&'tcx [Node<'tcx>], ErrorGuaranteed> {
- debug!("AbstractConstBuilder::build: body={:?}", &*self.body);
- self.recurse_build(self.body_id)?;
+struct IsThirPolymorphic<'a, 'tcx> {
+ is_poly: bool,
+ thir: &'a thir::Thir<'tcx>,
+}
- Ok(self.tcx.arena.alloc_from_iter(self.nodes.into_iter()))
- }
+fn error<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ sub: GenericConstantTooComplexSub,
+ root_span: Span,
+) -> Result<!, ErrorGuaranteed> {
+ let reported = tcx.sess.emit_err(GenericConstantTooComplex {
+ span: root_span,
+ maybe_supported: None,
+ sub,
+ });
+
+ Err(reported)
+}
- fn recurse_build(&mut self, node: thir::ExprId) -> Result<NodeId, ErrorGuaranteed> {
- use thir::ExprKind;
- let node = &self.body.exprs[node];
- Ok(match &node.kind {
- // I dont know if handling of these 3 is correct
- &ExprKind::Scope { value, .. } => self.recurse_build(value)?,
- &ExprKind::PlaceTypeAscription { source, .. }
- | &ExprKind::ValueTypeAscription { source, .. } => self.recurse_build(source)?,
- &ExprKind::Literal { lit, neg } => {
- let sp = node.span;
- let constant = match self.tcx.at(sp).lit_to_const(LitToConstInput {
- lit: &lit.node,
- ty: node.ty,
- neg,
- }) {
- Ok(c) => c,
- Err(LitToConstError::Reported) => self.tcx.const_error(node.ty),
- Err(LitToConstError::TypeError) => {
- bug!("encountered type error in lit_to_const")
- }
- };
-
- self.nodes.push(Node::Leaf(constant))
- }
- &ExprKind::NonHirLiteral { lit, user_ty: _ } => {
- let val = ty::ValTree::from_scalar_int(lit);
- self.nodes.push(Node::Leaf(ty::Const::from_value(self.tcx, val, node.ty)))
- }
- &ExprKind::ZstLiteral { user_ty: _ } => {
- let val = ty::ValTree::zst();
- self.nodes.push(Node::Leaf(ty::Const::from_value(self.tcx, val, node.ty)))
- }
- &ExprKind::NamedConst { def_id, substs, user_ty: _ } => {
- let uneval =
- ty::UnevaluatedConst::new(ty::WithOptConstParam::unknown(def_id), substs);
+fn maybe_supported_error<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ sub: GenericConstantTooComplexSub,
+ root_span: Span,
+) -> Result<!, ErrorGuaranteed> {
+ let reported = tcx.sess.emit_err(GenericConstantTooComplex {
+ span: root_span,
+ maybe_supported: Some(()),
+ sub,
+ });
+
+ Err(reported)
+}
- let constant = self
- .tcx
- .mk_const(ty::ConstS { kind: ty::ConstKind::Unevaluated(uneval), ty: node.ty });
+impl<'a, 'tcx> IsThirPolymorphic<'a, 'tcx> {
+ fn expr_is_poly(&mut self, expr: &thir::Expr<'tcx>) -> bool {
+ if expr.ty.has_non_region_param() {
+ return true;
+ }
- self.nodes.push(Node::Leaf(constant))
+ match expr.kind {
+ thir::ExprKind::NamedConst { substs, .. } => substs.has_non_region_param(),
+ thir::ExprKind::ConstParam { .. } => true,
+ thir::ExprKind::Repeat { value, count } => {
+ self.visit_expr(&self.thir()[value]);
+ count.has_non_region_param()
}
+ _ => false,
+ }
+ }
+ fn pat_is_poly(&mut self, pat: &thir::Pat<'tcx>) -> bool {
+ if pat.ty.has_non_region_param() {
+ return true;
+ }
- ExprKind::ConstParam { param, .. } => {
- let const_param = self
- .tcx
- .mk_const(ty::ConstS { kind: ty::ConstKind::Param(*param), ty: node.ty });
- self.nodes.push(Node::Leaf(const_param))
+ match pat.kind {
+ thir::PatKind::Constant { value } => value.has_non_region_param(),
+ thir::PatKind::Range(box thir::PatRange { lo, hi, .. }) => {
+ lo.has_non_region_param() || hi.has_non_region_param()
}
+ _ => false,
+ }
+ }
+}
- ExprKind::Call { fun, args, .. } => {
- let fun = self.recurse_build(*fun)?;
-
- let mut new_args = Vec::<NodeId>::with_capacity(args.len());
- for &id in args.iter() {
- new_args.push(self.recurse_build(id)?);
- }
- let new_args = self.tcx.arena.alloc_slice(&new_args);
- self.nodes.push(Node::FunctionCall(fun, new_args))
- }
- &ExprKind::Binary { op, lhs, rhs } if Self::check_binop(op) => {
- let lhs = self.recurse_build(lhs)?;
- let rhs = self.recurse_build(rhs)?;
- self.nodes.push(Node::Binop(op, lhs, rhs))
- }
- &ExprKind::Unary { op, arg } if Self::check_unop(op) => {
- let arg = self.recurse_build(arg)?;
- self.nodes.push(Node::UnaryOp(op, arg))
- }
- // This is necessary so that the following compiles:
- //
- // ```
- // fn foo<const N: usize>(a: [(); N + 1]) {
- // bar::<{ N + 1 }>();
- // }
- // ```
- ExprKind::Block { block } => {
- if let thir::Block { stmts: box [], expr: Some(e), .. } = &self.body.blocks[*block]
- {
- self.recurse_build(*e)?
- } else {
- self.maybe_supported_error(GenericConstantTooComplexSub::BlockNotSupported(
- node.span,
- ))?
- }
- }
- // `ExprKind::Use` happens when a `hir::ExprKind::Cast` is a
- // "coercion cast" i.e. using a coercion or is a no-op.
- // This is important so that `N as usize as usize` doesnt unify with `N as usize`. (untested)
- &ExprKind::Use { source } => {
- let arg = self.recurse_build(source)?;
- self.nodes.push(Node::Cast(CastKind::Use, arg, node.ty))
- }
- &ExprKind::Cast { source } => {
- let arg = self.recurse_build(source)?;
- self.nodes.push(Node::Cast(CastKind::As, arg, node.ty))
- }
- ExprKind::Borrow { arg, .. } => {
- let arg_node = &self.body.exprs[*arg];
-
- // Skip reborrows for now until we allow Deref/Borrow/AddressOf
- // expressions.
- // FIXME(generic_const_exprs): Verify/explain why this is sound
- if let ExprKind::Deref { arg } = arg_node.kind {
- self.recurse_build(arg)?
- } else {
- self.maybe_supported_error(GenericConstantTooComplexSub::BorrowNotSupported(
- node.span,
- ))?
- }
- }
- // FIXME(generic_const_exprs): We may want to support these.
- ExprKind::AddressOf { .. } | ExprKind::Deref { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::AddressAndDerefNotSupported(node.span),
- )?,
- ExprKind::Repeat { .. } | ExprKind::Array { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::ArrayNotSupported(node.span),
- )?,
- ExprKind::NeverToAny { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::NeverToAnyNotSupported(node.span),
- )?,
- ExprKind::Tuple { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::TupleNotSupported(node.span),
- )?,
- ExprKind::Index { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::IndexNotSupported(node.span),
- )?,
- ExprKind::Field { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::FieldNotSupported(node.span),
- )?,
- ExprKind::ConstBlock { .. } => self.maybe_supported_error(
- GenericConstantTooComplexSub::ConstBlockNotSupported(node.span),
- )?,
- ExprKind::Adt(_) => self
- .maybe_supported_error(GenericConstantTooComplexSub::AdtNotSupported(node.span))?,
- // dont know if this is correct
- ExprKind::Pointer { .. } => {
- self.error(GenericConstantTooComplexSub::PointerNotSupported(node.span))?
- }
- ExprKind::Yield { .. } => {
- self.error(GenericConstantTooComplexSub::YieldNotSupported(node.span))?
- }
- ExprKind::Continue { .. } | ExprKind::Break { .. } | ExprKind::Loop { .. } => {
- self.error(GenericConstantTooComplexSub::LoopNotSupported(node.span))?
- }
- ExprKind::Box { .. } => {
- self.error(GenericConstantTooComplexSub::BoxNotSupported(node.span))?
- }
+impl<'a, 'tcx> visit::Visitor<'a, 'tcx> for IsThirPolymorphic<'a, 'tcx> {
+ fn thir(&self) -> &'a thir::Thir<'tcx> {
+ &self.thir
+ }
- ExprKind::Unary { .. } => unreachable!(),
- // we handle valid unary/binary ops above
- ExprKind::Binary { .. } => {
- self.error(GenericConstantTooComplexSub::BinaryNotSupported(node.span))?
- }
- ExprKind::LogicalOp { .. } => {
- self.error(GenericConstantTooComplexSub::LogicalOpNotSupported(node.span))?
- }
- ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => {
- self.error(GenericConstantTooComplexSub::AssignNotSupported(node.span))?
- }
- ExprKind::Closure { .. } | ExprKind::Return { .. } => {
- self.error(GenericConstantTooComplexSub::ClosureAndReturnNotSupported(node.span))?
- }
- // let expressions imply control flow
- ExprKind::Match { .. } | ExprKind::If { .. } | ExprKind::Let { .. } => {
- self.error(GenericConstantTooComplexSub::ControlFlowNotSupported(node.span))?
- }
- ExprKind::InlineAsm { .. } => {
- self.error(GenericConstantTooComplexSub::InlineAsmNotSupported(node.span))?
- }
+ #[instrument(skip(self), level = "debug")]
+ fn visit_expr(&mut self, expr: &thir::Expr<'tcx>) {
+ self.is_poly |= self.expr_is_poly(expr);
+ if !self.is_poly {
+ visit::walk_expr(self, expr)
+ }
+ }
- // we dont permit let stmts so `VarRef` and `UpvarRef` cant happen
- ExprKind::VarRef { .. }
- | ExprKind::UpvarRef { .. }
- | ExprKind::StaticRef { .. }
- | ExprKind::ThreadLocalRef(_) => {
- self.error(GenericConstantTooComplexSub::OperationNotSupported(node.span))?
- }
- })
+ #[instrument(skip(self), level = "debug")]
+ fn visit_pat(&mut self, pat: &thir::Pat<'tcx>) {
+ self.is_poly |= self.pat_is_poly(pat);
+ if !self.is_poly {
+ visit::walk_pat(self, pat);
+ }
}
}
@@ -411,7 +352,7 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
pub fn thir_abstract_const<'tcx>(
tcx: TyCtxt<'tcx>,
def: ty::WithOptConstParam<LocalDefId>,
-) -> Result<Option<&'tcx [Node<'tcx>]>, ErrorGuaranteed> {
+) -> Result<Option<ty::Const<'tcx>>, ErrorGuaranteed> {
if tcx.features().generic_const_exprs {
match tcx.def_kind(def.did) {
// FIXME(generic_const_exprs): We currently only do this for anonymous constants,
@@ -424,10 +365,17 @@ pub fn thir_abstract_const<'tcx>(
}
let body = tcx.thir_body(def)?;
+ let (body, body_id) = (&*body.0.borrow(), body.1);
+
+ let mut is_poly_vis = IsThirPolymorphic { is_poly: false, thir: body };
+ visit::walk_expr(&mut is_poly_vis, &body[body_id]);
+ if !is_poly_vis.is_poly {
+ return Ok(None);
+ }
+
+ let root_span = body.exprs[body_id].span;
- AbstractConstBuilder::new(tcx, (&*body.0.borrow(), body.1))?
- .map(AbstractConstBuilder::build)
- .transpose()
+ Some(recurse_build(tcx, body, body_id, root_span)).transpose()
} else {
Ok(None)
}
diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs
index 6436713b3..c6f2b16ca 100644
--- a/compiler/rustc_ty_utils/src/instance.rs
+++ b/compiler/rustc_ty_utils/src/instance.rs
@@ -202,8 +202,14 @@ fn resolve_associated_item<'tcx>(
)),
substs: generator_data.substs,
}),
+ traits::ImplSource::Future(future_data) => Some(Instance {
+ def: ty::InstanceDef::Item(ty::WithOptConstParam::unknown(
+ future_data.generator_def_id,
+ )),
+ substs: future_data.substs,
+ }),
traits::ImplSource::Closure(closure_data) => {
- let trait_closure_kind = tcx.fn_trait_kind_from_lang_item(trait_id).unwrap();
+ let trait_closure_kind = tcx.fn_trait_kind_from_def_id(trait_id).unwrap();
Instance::resolve_closure(
tcx,
closure_data.closure_def_id,
@@ -264,8 +270,6 @@ fn resolve_associated_item<'tcx>(
traits::ImplSource::AutoImpl(..)
| traits::ImplSource::Param(..)
| traits::ImplSource::TraitAlias(..)
- | traits::ImplSource::DiscriminantKind(..)
- | traits::ImplSource::Pointee(..)
| traits::ImplSource::TraitUpcasting(_)
| traits::ImplSource::ConstDestruct(_) => None,
})
diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs
index 52ba0eee9..fbc055b5d 100644
--- a/compiler/rustc_ty_utils/src/layout.rs
+++ b/compiler/rustc_ty_utils/src/layout.rs
@@ -13,13 +13,8 @@ use rustc_span::symbol::Symbol;
use rustc_span::DUMMY_SP;
use rustc_target::abi::*;
-use std::cmp::{self, Ordering};
+use std::fmt::Debug;
use std::iter;
-use std::num::NonZeroUsize;
-use std::ops::Bound;
-
-use rand::{seq::SliceRandom, SeedableRng};
-use rand_xoshiro::Xoshiro128StarStar;
use crate::layout_sanity_check::sanity_check_layout;
@@ -66,16 +61,6 @@ fn layout_of<'tcx>(
Ok(layout)
}
-#[derive(Copy, Clone, Debug)]
-enum StructKind {
- /// A tuple, closure, or univariant which cannot be coerced to unsized.
- AlwaysSized,
- /// A univariant, the last field of which may be coerced to unsized.
- MaybeUnsized,
- /// A univariant, but with a prefix of an arbitrary size & alignment (e.g., enum tag).
- Prefixed(Size, Align),
-}
-
// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`.
// This is used to go between `memory_index` (source field order to memory order)
// and `inverse_memory_index` (memory order to source field order).
@@ -89,40 +74,13 @@ fn invert_mapping(map: &[u32]) -> Vec<u32> {
inverse
}
-fn scalar_pair<'tcx>(cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, a: Scalar, b: Scalar) -> LayoutS<'tcx> {
- let dl = cx.data_layout();
- let b_align = b.align(dl);
- let align = a.align(dl).max(b_align).max(dl.aggregate_align);
- let b_offset = a.size(dl).align_to(b_align.abi);
- let size = (b_offset + b.size(dl)).align_to(align.abi);
-
- // HACK(nox): We iter on `b` and then `a` because `max_by_key`
- // returns the last maximum.
- let largest_niche = Niche::from_scalar(dl, b_offset, b)
- .into_iter()
- .chain(Niche::from_scalar(dl, Size::ZERO, a))
- .max_by_key(|niche| niche.available(dl));
-
- LayoutS {
- variants: Variants::Single { index: VariantIdx::new(0) },
- fields: FieldsShape::Arbitrary {
- offsets: vec![Size::ZERO, b_offset],
- memory_index: vec![0, 1],
- },
- abi: Abi::ScalarPair(a, b),
- largest_niche,
- align,
- size,
- }
-}
-
fn univariant_uninterned<'tcx>(
cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
ty: Ty<'tcx>,
fields: &[TyAndLayout<'_>],
repr: &ReprOptions,
kind: StructKind,
-) -> Result<LayoutS<'tcx>, LayoutError<'tcx>> {
+) -> Result<LayoutS<VariantIdx>, LayoutError<'tcx>> {
let dl = cx.data_layout();
let pack = repr.pack;
if pack.is_some() && repr.align.is_some() {
@@ -130,208 +88,7 @@ fn univariant_uninterned<'tcx>(
return Err(LayoutError::Unknown(ty));
}
- let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
-
- let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
-
- let optimize = !repr.inhibit_struct_field_reordering_opt();
- if optimize {
- let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
- let optimizing = &mut inverse_memory_index[..end];
- let field_align = |f: &TyAndLayout<'_>| {
- if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi }
- };
-
- // If `-Z randomize-layout` was enabled for the type definition we can shuffle
- // the field ordering to try and catch some code making assumptions about layouts
- // we don't guarantee
- if repr.can_randomize_type_layout() {
- // `ReprOptions.layout_seed` is a deterministic seed that we can use to
- // randomize field ordering with
- let mut rng = Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed);
-
- // Shuffle the ordering of the fields
- optimizing.shuffle(&mut rng);
-
- // Otherwise we just leave things alone and actually optimize the type's fields
- } else {
- match kind {
- StructKind::AlwaysSized | StructKind::MaybeUnsized => {
- optimizing.sort_by_key(|&x| {
- // Place ZSTs first to avoid "interesting offsets",
- // especially with only one or two non-ZST fields.
- let f = &fields[x as usize];
- (!f.is_zst(), cmp::Reverse(field_align(f)))
- });
- }
-
- StructKind::Prefixed(..) => {
- // Sort in ascending alignment so that the layout stays optimal
- // regardless of the prefix
- optimizing.sort_by_key(|&x| field_align(&fields[x as usize]));
- }
- }
-
- // FIXME(Kixiron): We can always shuffle fields within a given alignment class
- // regardless of the status of `-Z randomize-layout`
- }
- }
-
- // inverse_memory_index holds field indices by increasing memory offset.
- // That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
- // We now write field offsets to the corresponding offset slot;
- // field 5 with offset 0 puts 0 in offsets[5].
- // At the bottom of this function, we invert `inverse_memory_index` to
- // produce `memory_index` (see `invert_mapping`).
-
- let mut sized = true;
- let mut offsets = vec![Size::ZERO; fields.len()];
- let mut offset = Size::ZERO;
- let mut largest_niche = None;
- let mut largest_niche_available = 0;
-
- if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
- let prefix_align =
- if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
- align = align.max(AbiAndPrefAlign::new(prefix_align));
- offset = prefix_size.align_to(prefix_align);
- }
-
- for &i in &inverse_memory_index {
- let field = fields[i as usize];
- if !sized {
- cx.tcx.sess.delay_span_bug(
- DUMMY_SP,
- &format!(
- "univariant: field #{} of `{}` comes after unsized field",
- offsets.len(),
- ty
- ),
- );
- }
-
- if field.is_unsized() {
- sized = false;
- }
-
- // Invariant: offset < dl.obj_size_bound() <= 1<<61
- let field_align = if let Some(pack) = pack {
- field.align.min(AbiAndPrefAlign::new(pack))
- } else {
- field.align
- };
- offset = offset.align_to(field_align.abi);
- align = align.max(field_align);
-
- debug!("univariant offset: {:?} field: {:#?}", offset, field);
- offsets[i as usize] = offset;
-
- if let Some(mut niche) = field.largest_niche {
- let available = niche.available(dl);
- if available > largest_niche_available {
- largest_niche_available = available;
- niche.offset += offset;
- largest_niche = Some(niche);
- }
- }
-
- offset = offset.checked_add(field.size, dl).ok_or(LayoutError::SizeOverflow(ty))?;
- }
-
- if let Some(repr_align) = repr.align {
- align = align.max(AbiAndPrefAlign::new(repr_align));
- }
-
- debug!("univariant min_size: {:?}", offset);
- let min_size = offset;
-
- // As stated above, inverse_memory_index holds field indices by increasing offset.
- // This makes it an already-sorted view of the offsets vec.
- // To invert it, consider:
- // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
- // Field 5 would be the first element, so memory_index is i:
- // Note: if we didn't optimize, it's already right.
-
- let memory_index =
- if optimize { invert_mapping(&inverse_memory_index) } else { inverse_memory_index };
-
- let size = min_size.align_to(align.abi);
- let mut abi = Abi::Aggregate { sized };
-
- // Unpack newtype ABIs and find scalar pairs.
- if sized && size.bytes() > 0 {
- // All other fields must be ZSTs.
- let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
-
- match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
- // We have exactly one non-ZST field.
- (Some((i, field)), None, None) => {
- // Field fills the struct and it has a scalar or scalar pair ABI.
- if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size {
- match field.abi {
- // For plain scalars, or vectors of them, we can't unpack
- // newtypes for `#[repr(C)]`, as that affects C ABIs.
- Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
- abi = field.abi;
- }
- // But scalar pairs are Rust-specific and get
- // treated as aggregates by C ABIs anyway.
- Abi::ScalarPair(..) => {
- abi = field.abi;
- }
- _ => {}
- }
- }
- }
-
- // Two non-ZST fields, and they're both scalars.
- (Some((i, a)), Some((j, b)), None) => {
- match (a.abi, b.abi) {
- (Abi::Scalar(a), Abi::Scalar(b)) => {
- // Order by the memory placement, not source order.
- let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
- ((i, a), (j, b))
- } else {
- ((j, b), (i, a))
- };
- let pair = scalar_pair(cx, a, b);
- let pair_offsets = match pair.fields {
- FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
- assert_eq!(memory_index, &[0, 1]);
- offsets
- }
- _ => bug!(),
- };
- if offsets[i] == pair_offsets[0]
- && offsets[j] == pair_offsets[1]
- && align == pair.align
- && size == pair.size
- {
- // We can use `ScalarPair` only when it matches our
- // already computed layout (including `#[repr(C)]`).
- abi = pair.abi;
- }
- }
- _ => {}
- }
- }
-
- _ => {}
- }
- }
-
- if fields.iter().any(|f| f.abi.is_uninhabited()) {
- abi = Abi::Uninhabited;
- }
-
- Ok(LayoutS {
- variants: Variants::Single { index: VariantIdx::new(0) },
- fields: FieldsShape::Arbitrary { offsets, memory_index },
- abi,
- largest_niche,
- align,
- size,
- })
+ cx.univariant(dl, fields, repr, kind).ok_or(LayoutError::SizeOverflow(ty))
}
fn layout_of_uncached<'tcx>(
@@ -382,14 +139,7 @@ fn layout_of_uncached<'tcx>(
}
// The never type.
- ty::Never => tcx.intern_layout(LayoutS {
- variants: Variants::Single { index: VariantIdx::new(0) },
- fields: FieldsShape::Primitive,
- abi: Abi::Uninhabited,
- largest_niche: None,
- align: dl.i8_align,
- size: Size::ZERO,
- }),
+ ty::Never => tcx.intern_layout(cx.layout_of_never_type()),
// Potentially-wide pointers.
ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
@@ -418,7 +168,7 @@ fn layout_of_uncached<'tcx>(
};
// Effectively a (ptr, meta) tuple.
- tcx.intern_layout(scalar_pair(cx, data_ptr, metadata))
+ tcx.intern_layout(cx.scalar_pair(data_ptr, metadata))
}
ty::Dynamic(_, _, ty::DynStar) => {
@@ -426,7 +176,7 @@ fn layout_of_uncached<'tcx>(
data.valid_range_mut().start = 0;
let mut vtable = scalar_unit(Pointer);
vtable.valid_range_mut().start = 1;
- tcx.intern_layout(scalar_pair(cx, data, vtable))
+ tcx.intern_layout(cx.scalar_pair(data, vtable))
}
// Arrays and slices.
@@ -442,8 +192,7 @@ fn layout_of_uncached<'tcx>(
let element = cx.layout_of(element)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow(ty))?;
- let abi = if count != 0 && tcx.conservative_is_privately_uninhabited(param_env.and(ty))
- {
+ let abi = if count != 0 && ty.is_privately_uninhabited(tcx, param_env) {
Abi::Uninhabited
} else {
Abi::Aggregate { sized: true }
@@ -576,8 +325,8 @@ fn layout_of_uncached<'tcx>(
// Extract the number of elements from the layout of the array field:
let FieldsShape::Array { count, .. } = cx.layout_of(f0_ty)?.layout.fields() else {
- return Err(LayoutError::Unknown(ty));
- };
+ return Err(LayoutError::Unknown(ty));
+ };
(*e_ty, *count, true)
} else {
@@ -602,14 +351,14 @@ fn layout_of_uncached<'tcx>(
// Compute the ABI of the element type:
let e_ly = cx.layout_of(e_ty)?;
let Abi::Scalar(e_abi) = e_ly.abi else {
- // This error isn't caught in typeck, e.g., if
- // the element type of the vector is generic.
- tcx.sess.fatal(&format!(
- "monomorphising SIMD type `{}` with a non-primitive-scalar \
- (integer/float/pointer) element type `{}`",
- ty, e_ty
- ))
- };
+ // This error isn't caught in typeck, e.g., if
+ // the element type of the vector is generic.
+ tcx.sess.fatal(&format!(
+ "monomorphising SIMD type `{}` with a non-primitive-scalar \
+ (integer/float/pointer) element type `{}`",
+ ty, e_ty
+ ))
+ };
// Compute the size and alignment of the vector:
let size = e_ly.size.checked_mul(e_len, dl).ok_or(LayoutError::SizeOverflow(ty))?;
@@ -656,681 +405,41 @@ fn layout_of_uncached<'tcx>(
return Err(LayoutError::Unknown(ty));
}
- let mut align =
- if def.repr().pack.is_some() { dl.i8_align } else { dl.aggregate_align };
-
- if let Some(repr_align) = def.repr().align {
- align = align.max(AbiAndPrefAlign::new(repr_align));
- }
-
- let optimize = !def.repr().inhibit_union_abi_opt();
- let mut size = Size::ZERO;
- let mut abi = Abi::Aggregate { sized: true };
- let index = VariantIdx::new(0);
- for field in &variants[index] {
- assert!(!field.is_unsized());
- align = align.max(field.align);
-
- // If all non-ZST fields have the same ABI, forward this ABI
- if optimize && !field.is_zst() {
- // Discard valid range information and allow undef
- let field_abi = match field.abi {
- Abi::Scalar(x) => Abi::Scalar(x.to_union()),
- Abi::ScalarPair(x, y) => Abi::ScalarPair(x.to_union(), y.to_union()),
- Abi::Vector { element: x, count } => {
- Abi::Vector { element: x.to_union(), count }
- }
- Abi::Uninhabited | Abi::Aggregate { .. } => {
- Abi::Aggregate { sized: true }
- }
- };
-
- if size == Size::ZERO {
- // first non ZST: initialize 'abi'
- abi = field_abi;
- } else if abi != field_abi {
- // different fields have different ABI: reset to Aggregate
- abi = Abi::Aggregate { sized: true };
- }
- }
-
- size = cmp::max(size, field.size);
- }
-
- if let Some(pack) = def.repr().pack {
- align = align.min(AbiAndPrefAlign::new(pack));
- }
-
- return Ok(tcx.intern_layout(LayoutS {
- variants: Variants::Single { index },
- fields: FieldsShape::Union(
- NonZeroUsize::new(variants[index].len()).ok_or(LayoutError::Unknown(ty))?,
- ),
- abi,
- largest_niche: None,
- align,
- size: size.align_to(align.abi),
- }));
- }
-
- // A variant is absent if it's uninhabited and only has ZST fields.
- // Present uninhabited variants only require space for their fields,
- // but *not* an encoding of the discriminant (e.g., a tag value).
- // See issue #49298 for more details on the need to leave space
- // for non-ZST uninhabited data (mostly partial initialization).
- let absent = |fields: &[TyAndLayout<'_>]| {
- let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited());
- let is_zst = fields.iter().all(|f| f.is_zst());
- uninhabited && is_zst
- };
- let (present_first, present_second) = {
- let mut present_variants = variants
- .iter_enumerated()
- .filter_map(|(i, v)| if absent(v) { None } else { Some(i) });
- (present_variants.next(), present_variants.next())
- };
- let present_first = match present_first {
- Some(present_first) => present_first,
- // Uninhabited because it has no variants, or only absent ones.
- None if def.is_enum() => {
- return Ok(tcx.layout_of(param_env.and(tcx.types.never))?.layout);
- }
- // If it's a struct, still compute a layout so that we can still compute the
- // field offsets.
- None => VariantIdx::new(0),
- };
-
- let is_struct = !def.is_enum() ||
- // Only one variant is present.
- (present_second.is_none() &&
- // Representation optimizations are allowed.
- !def.repr().inhibit_enum_layout_opt());
- if is_struct {
- // Struct, or univariant enum equivalent to a struct.
- // (Typechecking will reject discriminant-sizing attrs.)
-
- let v = present_first;
- let kind = if def.is_enum() || variants[v].is_empty() {
- StructKind::AlwaysSized
- } else {
- let param_env = tcx.param_env(def.did());
- let last_field = def.variant(v).fields.last().unwrap();
- let always_sized = tcx.type_of(last_field.did).is_sized(tcx, param_env);
- if !always_sized { StructKind::MaybeUnsized } else { StructKind::AlwaysSized }
- };
-
- let mut st = univariant_uninterned(cx, ty, &variants[v], &def.repr(), kind)?;
- st.variants = Variants::Single { index: v };
-
- if def.is_unsafe_cell() {
- let hide_niches = |scalar: &mut _| match scalar {
- Scalar::Initialized { value, valid_range } => {
- *valid_range = WrappingRange::full(value.size(dl))
- }
- // Already doesn't have any niches
- Scalar::Union { .. } => {}
- };
- match &mut st.abi {
- Abi::Uninhabited => {}
- Abi::Scalar(scalar) => hide_niches(scalar),
- Abi::ScalarPair(a, b) => {
- hide_niches(a);
- hide_niches(b);
- }
- Abi::Vector { element, count: _ } => hide_niches(element),
- Abi::Aggregate { sized: _ } => {}
- }
- st.largest_niche = None;
- return Ok(tcx.intern_layout(st));
- }
-
- let (start, end) = cx.tcx.layout_scalar_valid_range(def.did());
- match st.abi {
- Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => {
- // the asserts ensure that we are not using the
- // `#[rustc_layout_scalar_valid_range(n)]`
- // attribute to widen the range of anything as that would probably
- // result in UB somewhere
- // FIXME(eddyb) the asserts are probably not needed,
- // as larger validity ranges would result in missed
- // optimizations, *not* wrongly assuming the inner
- // value is valid. e.g. unions enlarge validity ranges,
- // because the values may be uninitialized.
- if let Bound::Included(start) = start {
- // FIXME(eddyb) this might be incorrect - it doesn't
- // account for wrap-around (end < start) ranges.
- let valid_range = scalar.valid_range_mut();
- assert!(valid_range.start <= start);
- valid_range.start = start;
- }
- if let Bound::Included(end) = end {
- // FIXME(eddyb) this might be incorrect - it doesn't
- // account for wrap-around (end < start) ranges.
- let valid_range = scalar.valid_range_mut();
- assert!(valid_range.end >= end);
- valid_range.end = end;
- }
-
- // Update `largest_niche` if we have introduced a larger niche.
- let niche = Niche::from_scalar(dl, Size::ZERO, *scalar);
- if let Some(niche) = niche {
- match st.largest_niche {
- Some(largest_niche) => {
- // Replace the existing niche even if they're equal,
- // because this one is at a lower offset.
- if largest_niche.available(dl) <= niche.available(dl) {
- st.largest_niche = Some(niche);
- }
- }
- None => st.largest_niche = Some(niche),
- }
- }
- }
- _ => assert!(
- start == Bound::Unbounded && end == Bound::Unbounded,
- "nonscalar layout for layout_scalar_valid_range type {:?}: {:#?}",
- def,
- st,
- ),
- }
-
- return Ok(tcx.intern_layout(st));
- }
-
- // At this point, we have handled all unions and
- // structs. (We have also handled univariant enums
- // that allow representation optimization.)
- assert!(def.is_enum());
-
- // Until we've decided whether to use the tagged or
- // niche filling LayoutS, we don't want to intern the
- // variant layouts, so we can't store them in the
- // overall LayoutS. Store the overall LayoutS
- // and the variant LayoutSs here until then.
- struct TmpLayout<'tcx> {
- layout: LayoutS<'tcx>,
- variants: IndexVec<VariantIdx, LayoutS<'tcx>>,
+ return Ok(tcx.intern_layout(
+ cx.layout_of_union(&def.repr(), &variants).ok_or(LayoutError::Unknown(ty))?,
+ ));
}
- let calculate_niche_filling_layout =
- || -> Result<Option<TmpLayout<'tcx>>, LayoutError<'tcx>> {
- // The current code for niche-filling relies on variant indices
- // instead of actual discriminants, so enums with
- // explicit discriminants (RFC #2363) would misbehave.
- if def.repr().inhibit_enum_layout_opt()
+ tcx.intern_layout(
+ cx.layout_of_struct_or_enum(
+ &def.repr(),
+ &variants,
+ def.is_enum(),
+ def.is_unsafe_cell(),
+ tcx.layout_scalar_valid_range(def.did()),
+ |min, max| Integer::repr_discr(tcx, ty, &def.repr(), min, max),
+ def.is_enum()
+ .then(|| def.discriminants(tcx).map(|(v, d)| (v, d.val as i128)))
+ .into_iter()
+ .flatten(),
+ def.repr().inhibit_enum_layout_opt()
|| def
.variants()
.iter_enumerated()
- .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32()))
- {
- return Ok(None);
- }
-
- if variants.len() < 2 {
- return Ok(None);
- }
-
- let mut align = dl.aggregate_align;
- let mut variant_layouts = variants
- .iter_enumerated()
- .map(|(j, v)| {
- let mut st = univariant_uninterned(
- cx,
- ty,
- v,
- &def.repr(),
- StructKind::AlwaysSized,
- )?;
- st.variants = Variants::Single { index: j };
-
- align = align.max(st.align);
-
- Ok(st)
- })
- .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
-
- let largest_variant_index = match variant_layouts
- .iter_enumerated()
- .max_by_key(|(_i, layout)| layout.size.bytes())
- .map(|(i, _layout)| i)
- {
- None => return Ok(None),
- Some(i) => i,
- };
-
- let all_indices = VariantIdx::new(0)..=VariantIdx::new(variants.len() - 1);
- let needs_disc = |index: VariantIdx| {
- index != largest_variant_index && !absent(&variants[index])
- };
- let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap()
- ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap();
-
- let count = niche_variants.size_hint().1.unwrap() as u128;
-
- // Find the field with the largest niche
- let (field_index, niche, (niche_start, niche_scalar)) = match variants
- [largest_variant_index]
- .iter()
- .enumerate()
- .filter_map(|(j, field)| Some((j, field.largest_niche?)))
- .max_by_key(|(_, niche)| niche.available(dl))
- .and_then(|(j, niche)| Some((j, niche, niche.reserve(cx, count)?)))
+ .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32())),
{
- None => return Ok(None),
- Some(x) => x,
- };
-
- let niche_offset = niche.offset
- + variant_layouts[largest_variant_index].fields.offset(field_index);
- let niche_size = niche.value.size(dl);
- let size = variant_layouts[largest_variant_index].size.align_to(align.abi);
-
- let all_variants_fit =
- variant_layouts.iter_enumerated_mut().all(|(i, layout)| {
- if i == largest_variant_index {
- return true;
- }
-
- layout.largest_niche = None;
-
- if layout.size <= niche_offset {
- // This variant will fit before the niche.
- return true;
- }
-
- // Determine if it'll fit after the niche.
- let this_align = layout.align.abi;
- let this_offset = (niche_offset + niche_size).align_to(this_align);
-
- if this_offset + layout.size > size {
- return false;
- }
-
- // It'll fit, but we need to make some adjustments.
- match layout.fields {
- FieldsShape::Arbitrary { ref mut offsets, .. } => {
- for (j, offset) in offsets.iter_mut().enumerate() {
- if !variants[i][j].is_zst() {
- *offset += this_offset;
- }
- }
- }
- _ => {
- panic!("Layout of fields should be Arbitrary for variants")
- }
- }
-
- // It can't be a Scalar or ScalarPair because the offset isn't 0.
- if !layout.abi.is_uninhabited() {
- layout.abi = Abi::Aggregate { sized: true };
- }
- layout.size += this_offset;
-
- true
- });
-
- if !all_variants_fit {
- return Ok(None);
- }
-
- let largest_niche = Niche::from_scalar(dl, niche_offset, niche_scalar);
-
- let others_zst = variant_layouts
- .iter_enumerated()
- .all(|(i, layout)| i == largest_variant_index || layout.size == Size::ZERO);
- let same_size = size == variant_layouts[largest_variant_index].size;
- let same_align = align == variant_layouts[largest_variant_index].align;
-
- let abi = if variant_layouts.iter().all(|v| v.abi.is_uninhabited()) {
- Abi::Uninhabited
- } else if same_size && same_align && others_zst {
- match variant_layouts[largest_variant_index].abi {
- // When the total alignment and size match, we can use the
- // same ABI as the scalar variant with the reserved niche.
- Abi::Scalar(_) => Abi::Scalar(niche_scalar),
- Abi::ScalarPair(first, second) => {
- // Only the niche is guaranteed to be initialised,
- // so use union layouts for the other primitive.
- if niche_offset == Size::ZERO {
- Abi::ScalarPair(niche_scalar, second.to_union())
- } else {
- Abi::ScalarPair(first.to_union(), niche_scalar)
- }
- }
- _ => Abi::Aggregate { sized: true },
- }
- } else {
- Abi::Aggregate { sized: true }
- };
-
- let layout = LayoutS {
- variants: Variants::Multiple {
- tag: niche_scalar,
- tag_encoding: TagEncoding::Niche {
- untagged_variant: largest_variant_index,
- niche_variants,
- niche_start,
- },
- tag_field: 0,
- variants: IndexVec::new(),
- },
- fields: FieldsShape::Arbitrary {
- offsets: vec![niche_offset],
- memory_index: vec![0],
- },
- abi,
- largest_niche,
- size,
- align,
- };
-
- Ok(Some(TmpLayout { layout, variants: variant_layouts }))
- };
-
- let niche_filling_layout = calculate_niche_filling_layout()?;
-
- let (mut min, mut max) = (i128::MAX, i128::MIN);
- let discr_type = def.repr().discr_type();
- let bits = Integer::from_attr(cx, discr_type).size().bits();
- for (i, discr) in def.discriminants(tcx) {
- if variants[i].iter().any(|f| f.abi.is_uninhabited()) {
- continue;
- }
- let mut x = discr.val as i128;
- if discr_type.is_signed() {
- // sign extend the raw representation to be an i128
- x = (x << (128 - bits)) >> (128 - bits);
- }
- if x < min {
- min = x;
- }
- if x > max {
- max = x;
- }
- }
- // We might have no inhabited variants, so pretend there's at least one.
- if (min, max) == (i128::MAX, i128::MIN) {
- min = 0;
- max = 0;
- }
- assert!(min <= max, "discriminant range is {}...{}", min, max);
- let (min_ity, signed) = Integer::repr_discr(tcx, ty, &def.repr(), min, max);
-
- let mut align = dl.aggregate_align;
- let mut size = Size::ZERO;
-
- // We're interested in the smallest alignment, so start large.
- let mut start_align = Align::from_bytes(256).unwrap();
- assert_eq!(Integer::for_align(dl, start_align), None);
-
- // repr(C) on an enum tells us to make a (tag, union) layout,
- // so we need to grow the prefix alignment to be at least
- // the alignment of the union. (This value is used both for
- // determining the alignment of the overall enum, and the
- // determining the alignment of the payload after the tag.)
- let mut prefix_align = min_ity.align(dl).abi;
- if def.repr().c() {
- for fields in &variants {
- for field in fields {
- prefix_align = prefix_align.max(field.align.abi);
- }
- }
- }
-
- // Create the set of structs that represent each variant.
- let mut layout_variants = variants
- .iter_enumerated()
- .map(|(i, field_layouts)| {
- let mut st = univariant_uninterned(
- cx,
- ty,
- &field_layouts,
- &def.repr(),
- StructKind::Prefixed(min_ity.size(), prefix_align),
- )?;
- st.variants = Variants::Single { index: i };
- // Find the first field we can't move later
- // to make room for a larger discriminant.
- for field in st.fields.index_by_increasing_offset().map(|j| field_layouts[j]) {
- if !field.is_zst() || field.align.abi.bytes() != 1 {
- start_align = start_align.min(field.align.abi);
- break;
- }
- }
- size = cmp::max(size, st.size);
- align = align.max(st.align);
- Ok(st)
- })
- .collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
-
- // Align the maximum variant size to the largest alignment.
- size = size.align_to(align.abi);
-
- if size.bytes() >= dl.obj_size_bound() {
- return Err(LayoutError::SizeOverflow(ty));
- }
-
- let typeck_ity = Integer::from_attr(dl, def.repr().discr_type());
- if typeck_ity < min_ity {
- // It is a bug if Layout decided on a greater discriminant size than typeck for
- // some reason at this point (based on values discriminant can take on). Mostly
- // because this discriminant will be loaded, and then stored into variable of
- // type calculated by typeck. Consider such case (a bug): typeck decided on
- // byte-sized discriminant, but layout thinks we need a 16-bit to store all
- // discriminant values. That would be a bug, because then, in codegen, in order
- // to store this 16-bit discriminant into 8-bit sized temporary some of the
- // space necessary to represent would have to be discarded (or layout is wrong
- // on thinking it needs 16 bits)
- bug!(
- "layout decided on a larger discriminant type ({:?}) than typeck ({:?})",
- min_ity,
- typeck_ity
- );
- // However, it is fine to make discr type however large (as an optimisation)
- // after this point – we’ll just truncate the value we load in codegen.
- }
-
- // Check to see if we should use a different type for the
- // discriminant. We can safely use a type with the same size
- // as the alignment of the first field of each variant.
- // We increase the size of the discriminant to avoid LLVM copying
- // padding when it doesn't need to. This normally causes unaligned
- // load/stores and excessive memcpy/memset operations. By using a
- // bigger integer size, LLVM can be sure about its contents and
- // won't be so conservative.
-
- // Use the initial field alignment
- let mut ity = if def.repr().c() || def.repr().int.is_some() {
- min_ity
- } else {
- Integer::for_align(dl, start_align).unwrap_or(min_ity)
- };
-
- // If the alignment is not larger than the chosen discriminant size,
- // don't use the alignment as the final size.
- if ity <= min_ity {
- ity = min_ity;
- } else {
- // Patch up the variants' first few fields.
- let old_ity_size = min_ity.size();
- let new_ity_size = ity.size();
- for variant in &mut layout_variants {
- match variant.fields {
- FieldsShape::Arbitrary { ref mut offsets, .. } => {
- for i in offsets {
- if *i <= old_ity_size {
- assert_eq!(*i, old_ity_size);
- *i = new_ity_size;
+ let param_env = tcx.param_env(def.did());
+ def.is_struct()
+ && match def.variants().iter().next().and_then(|x| x.fields.last()) {
+ Some(last_field) => {
+ tcx.type_of(last_field.did).is_sized(tcx, param_env)
}
+ None => false,
}
- // We might be making the struct larger.
- if variant.size <= old_ity_size {
- variant.size = new_ity_size;
- }
- }
- _ => bug!(),
- }
- }
- }
-
- let tag_mask = ity.size().unsigned_int_max();
- let tag = Scalar::Initialized {
- value: Int(ity, signed),
- valid_range: WrappingRange {
- start: (min as u128 & tag_mask),
- end: (max as u128 & tag_mask),
- },
- };
- let mut abi = Abi::Aggregate { sized: true };
-
- if layout_variants.iter().all(|v| v.abi.is_uninhabited()) {
- abi = Abi::Uninhabited;
- } else if tag.size(dl) == size {
- // Make sure we only use scalar layout when the enum is entirely its
- // own tag (i.e. it has no padding nor any non-ZST variant fields).
- abi = Abi::Scalar(tag);
- } else {
- // Try to use a ScalarPair for all tagged enums.
- let mut common_prim = None;
- let mut common_prim_initialized_in_all_variants = true;
- for (field_layouts, layout_variant) in iter::zip(&variants, &layout_variants) {
- let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
- bug!();
- };
- let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
- let (field, offset) = match (fields.next(), fields.next()) {
- (None, None) => {
- common_prim_initialized_in_all_variants = false;
- continue;
- }
- (Some(pair), None) => pair,
- _ => {
- common_prim = None;
- break;
- }
- };
- let prim = match field.abi {
- Abi::Scalar(scalar) => {
- common_prim_initialized_in_all_variants &=
- matches!(scalar, Scalar::Initialized { .. });
- scalar.primitive()
- }
- _ => {
- common_prim = None;
- break;
- }
- };
- if let Some(pair) = common_prim {
- // This is pretty conservative. We could go fancier
- // by conflating things like i32 and u32, or even
- // realising that (u8, u8) could just cohabit with
- // u16 or even u32.
- if pair != (prim, offset) {
- common_prim = None;
- break;
- }
- } else {
- common_prim = Some((prim, offset));
- }
- }
- if let Some((prim, offset)) = common_prim {
- let prim_scalar = if common_prim_initialized_in_all_variants {
- scalar_unit(prim)
- } else {
- // Common prim might be uninit.
- Scalar::Union { value: prim }
- };
- let pair = scalar_pair(cx, tag, prim_scalar);
- let pair_offsets = match pair.fields {
- FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
- assert_eq!(memory_index, &[0, 1]);
- offsets
- }
- _ => bug!(),
- };
- if pair_offsets[0] == Size::ZERO
- && pair_offsets[1] == *offset
- && align == pair.align
- && size == pair.size
- {
- // We can use `ScalarPair` only when it matches our
- // already computed layout (including `#[repr(C)]`).
- abi = pair.abi;
- }
- }
- }
-
- // If we pick a "clever" (by-value) ABI, we might have to adjust the ABI of the
- // variants to ensure they are consistent. This is because a downcast is
- // semantically a NOP, and thus should not affect layout.
- if matches!(abi, Abi::Scalar(..) | Abi::ScalarPair(..)) {
- for variant in &mut layout_variants {
- // We only do this for variants with fields; the others are not accessed anyway.
- // Also do not overwrite any already existing "clever" ABIs.
- if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) {
- variant.abi = abi;
- // Also need to bump up the size and alignment, so that the entire value fits in here.
- variant.size = cmp::max(variant.size, size);
- variant.align.abi = cmp::max(variant.align.abi, align.abi);
- }
- }
- }
-
- let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag);
-
- let tagged_layout = LayoutS {
- variants: Variants::Multiple {
- tag,
- tag_encoding: TagEncoding::Direct,
- tag_field: 0,
- variants: IndexVec::new(),
- },
- fields: FieldsShape::Arbitrary { offsets: vec![Size::ZERO], memory_index: vec![0] },
- largest_niche,
- abi,
- align,
- size,
- };
-
- let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
-
- let mut best_layout = match (tagged_layout, niche_filling_layout) {
- (tl, Some(nl)) => {
- // Pick the smaller layout; otherwise,
- // pick the layout with the larger niche; otherwise,
- // pick tagged as it has simpler codegen.
- use Ordering::*;
- let niche_size = |tmp_l: &TmpLayout<'_>| {
- tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
- };
- match (
- tl.layout.size.cmp(&nl.layout.size),
- niche_size(&tl).cmp(&niche_size(&nl)),
- ) {
- (Greater, _) => nl,
- (Equal, Less) => nl,
- _ => tl,
- }
- }
- (tl, None) => tl,
- };
-
- // Now we can intern the variant layouts and store them in the enum layout.
- best_layout.layout.variants = match best_layout.layout.variants {
- Variants::Multiple { tag, tag_encoding, tag_field, .. } => Variants::Multiple {
- tag,
- tag_encoding,
- tag_field,
- variants: best_layout
- .variants
- .into_iter()
- .map(|layout| tcx.intern_layout(layout))
- .collect(),
- },
- _ => bug!(),
- };
-
- tcx.intern_layout(best_layout.layout)
+ },
+ )
+ .ok_or(LayoutError::SizeOverflow(ty))?,
+ )
}
// Types with no meaningful known layout.
@@ -1488,8 +597,8 @@ fn generator_layout<'tcx>(
let subst_field = |ty: Ty<'tcx>| EarlyBinder(ty).subst(tcx, substs);
let Some(info) = tcx.generator_layout(def_id) else {
- return Err(LayoutError::Unknown(ty));
- };
+ return Err(LayoutError::Unknown(ty));
+ };
let (ineligible_locals, assignments) = generator_saved_local_eligibility(&info);
// Build a prefix layout, including "promoting" all ineligible
@@ -1592,8 +701,8 @@ fn generator_layout<'tcx>(
variant.variants = Variants::Single { index };
let FieldsShape::Arbitrary { offsets, memory_index } = variant.fields else {
- bug!();
- };
+ bug!();
+ };
// Now, stitch the promoted and variant-only fields back together in
// the order they are mentioned by our GeneratorLayout.
@@ -1640,13 +749,13 @@ fn generator_layout<'tcx>(
size = size.max(variant.size);
align = align.max(variant.align);
- Ok(tcx.intern_layout(variant))
+ Ok(variant)
})
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
size = size.align_to(align.abi);
- let abi = if prefix.abi.is_uninhabited() || variants.iter().all(|v| v.abi().is_uninhabited()) {
+ let abi = if prefix.abi.is_uninhabited() || variants.iter().all(|v| v.abi.is_uninhabited()) {
Abi::Uninhabited
} else {
Abi::Aggregate { sized: true }
diff --git a/compiler/rustc_ty_utils/src/layout_sanity_check.rs b/compiler/rustc_ty_utils/src/layout_sanity_check.rs
index 100926ad4..a5311dbd1 100644
--- a/compiler/rustc_ty_utils/src/layout_sanity_check.rs
+++ b/compiler/rustc_ty_utils/src/layout_sanity_check.rs
@@ -12,7 +12,7 @@ pub(super) fn sanity_check_layout<'tcx>(
layout: &TyAndLayout<'tcx>,
) {
// Type-level uninhabitedness should always imply ABI uninhabitedness.
- if cx.tcx.conservative_is_privately_uninhabited(cx.param_env.and(layout.ty)) {
+ if layout.ty.is_privately_uninhabited(cx.tcx, cx.param_env) {
assert!(layout.abi.is_uninhabited());
}
@@ -20,283 +20,293 @@ pub(super) fn sanity_check_layout<'tcx>(
bug!("size is not a multiple of align, in the following layout:\n{layout:#?}");
}
- if cfg!(debug_assertions) {
- /// Yields non-ZST fields of the type
- fn non_zst_fields<'tcx, 'a>(
- cx: &'a LayoutCx<'tcx, TyCtxt<'tcx>>,
- layout: &'a TyAndLayout<'tcx>,
- ) -> impl Iterator<Item = (Size, TyAndLayout<'tcx>)> + 'a {
- (0..layout.layout.fields().count()).filter_map(|i| {
- let field = layout.field(cx, i);
- // Also checking `align == 1` here leads to test failures in
- // `layout/zero-sized-array-union.rs`, where a type has a zero-size field with
- // alignment 4 that still gets ignored during layout computation (which is okay
- // since other fields already force alignment 4).
- let zst = field.is_zst();
- (!zst).then(|| (layout.fields.offset(i), field))
- })
- }
+ if !cfg!(debug_assertions) {
+ // Stop here, the rest is kind of expensive.
+ return;
+ }
- fn skip_newtypes<'tcx>(
- cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
- layout: &TyAndLayout<'tcx>,
- ) -> TyAndLayout<'tcx> {
- if matches!(layout.layout.variants(), Variants::Multiple { .. }) {
- // Definitely not a newtype of anything.
- return *layout;
- }
- let mut fields = non_zst_fields(cx, layout);
- let Some(first) = fields.next() else {
- // No fields here, so this could be a primitive or enum -- either way it's not a newtype around a thing
- return *layout
- };
- if fields.next().is_none() {
- let (offset, first) = first;
- if offset == Size::ZERO && first.layout.size() == layout.size {
- // This is a newtype, so keep recursing.
- // FIXME(RalfJung): I don't think it would be correct to do any checks for
- // alignment here, so we don't. Is that correct?
- return skip_newtypes(cx, &first);
- }
+ /// Yields non-ZST fields of the type
+ fn non_zst_fields<'tcx, 'a>(
+ cx: &'a LayoutCx<'tcx, TyCtxt<'tcx>>,
+ layout: &'a TyAndLayout<'tcx>,
+ ) -> impl Iterator<Item = (Size, TyAndLayout<'tcx>)> + 'a {
+ (0..layout.layout.fields().count()).filter_map(|i| {
+ let field = layout.field(cx, i);
+ // Also checking `align == 1` here leads to test failures in
+ // `layout/zero-sized-array-union.rs`, where a type has a zero-size field with
+ // alignment 4 that still gets ignored during layout computation (which is okay
+ // since other fields already force alignment 4).
+ let zst = field.is_zst();
+ (!zst).then(|| (layout.fields.offset(i), field))
+ })
+ }
+
+ fn skip_newtypes<'tcx>(
+ cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
+ layout: &TyAndLayout<'tcx>,
+ ) -> TyAndLayout<'tcx> {
+ if matches!(layout.layout.variants(), Variants::Multiple { .. }) {
+ // Definitely not a newtype of anything.
+ return *layout;
+ }
+ let mut fields = non_zst_fields(cx, layout);
+ let Some(first) = fields.next() else {
+ // No fields here, so this could be a primitive or enum -- either way it's not a newtype around a thing
+ return *layout
+ };
+ if fields.next().is_none() {
+ let (offset, first) = first;
+ if offset == Size::ZERO && first.layout.size() == layout.size {
+ // This is a newtype, so keep recursing.
+ // FIXME(RalfJung): I don't think it would be correct to do any checks for
+ // alignment here, so we don't. Is that correct?
+ return skip_newtypes(cx, &first);
}
- // No more newtypes here.
- *layout
}
+ // No more newtypes here.
+ *layout
+ }
- fn check_layout_abi<'tcx>(cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, layout: &TyAndLayout<'tcx>) {
- match layout.layout.abi() {
- Abi::Scalar(scalar) => {
- // No padding in scalars.
- let size = scalar.size(cx);
- let align = scalar.align(cx).abi;
- assert_eq!(
- layout.layout.size(),
- size,
- "size mismatch between ABI and layout in {layout:#?}"
- );
- assert_eq!(
- layout.layout.align().abi,
- align,
- "alignment mismatch between ABI and layout in {layout:#?}"
- );
- // Check that this matches the underlying field.
- let inner = skip_newtypes(cx, layout);
- assert!(
- matches!(inner.layout.abi(), Abi::Scalar(_)),
- "`Scalar` type {} is newtype around non-`Scalar` type {}",
- layout.ty,
- inner.ty
- );
- match inner.layout.fields() {
- FieldsShape::Primitive => {
- // Fine.
- }
- FieldsShape::Union(..) => {
- // FIXME: I guess we could also check something here? Like, look at all fields?
- return;
- }
- FieldsShape::Arbitrary { .. } => {
- // Should be an enum, the only field is the discriminant.
- assert!(
- inner.ty.is_enum(),
- "`Scalar` layout for non-primitive non-enum type {}",
- inner.ty
- );
- assert_eq!(
- inner.layout.fields().count(),
- 1,
- "`Scalar` layout for multiple-field type in {inner:#?}",
- );
- let offset = inner.layout.fields().offset(0);
- let field = inner.field(cx, 0);
- // The field should be at the right offset, and match the `scalar` layout.
- assert_eq!(
- offset,
- Size::ZERO,
- "`Scalar` field at non-0 offset in {inner:#?}",
- );
- assert_eq!(
- field.size, size,
- "`Scalar` field with bad size in {inner:#?}",
- );
- assert_eq!(
- field.align.abi, align,
- "`Scalar` field with bad align in {inner:#?}",
- );
- assert!(
- matches!(field.abi, Abi::Scalar(_)),
- "`Scalar` field with bad ABI in {inner:#?}",
- );
- }
- _ => {
- panic!("`Scalar` layout for non-primitive non-enum type {}", inner.ty);
- }
+ fn check_layout_abi<'tcx>(cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, layout: &TyAndLayout<'tcx>) {
+ match layout.layout.abi() {
+ Abi::Scalar(scalar) => {
+ // No padding in scalars.
+ let size = scalar.size(cx);
+ let align = scalar.align(cx).abi;
+ assert_eq!(
+ layout.layout.size(),
+ size,
+ "size mismatch between ABI and layout in {layout:#?}"
+ );
+ assert_eq!(
+ layout.layout.align().abi,
+ align,
+ "alignment mismatch between ABI and layout in {layout:#?}"
+ );
+ // Check that this matches the underlying field.
+ let inner = skip_newtypes(cx, layout);
+ assert!(
+ matches!(inner.layout.abi(), Abi::Scalar(_)),
+ "`Scalar` type {} is newtype around non-`Scalar` type {}",
+ layout.ty,
+ inner.ty
+ );
+ match inner.layout.fields() {
+ FieldsShape::Primitive => {
+ // Fine.
}
- }
- Abi::ScalarPair(scalar1, scalar2) => {
- // Sanity-check scalar pairs. These are a bit more flexible and support
- // padding, but we can at least ensure both fields actually fit into the layout
- // and the alignment requirement has not been weakened.
- let size1 = scalar1.size(cx);
- let align1 = scalar1.align(cx).abi;
- let size2 = scalar2.size(cx);
- let align2 = scalar2.align(cx).abi;
- assert!(
- layout.layout.align().abi >= cmp::max(align1, align2),
- "alignment mismatch between ABI and layout in {layout:#?}",
- );
- let field2_offset = size1.align_to(align2);
- assert!(
- layout.layout.size() >= field2_offset + size2,
- "size mismatch between ABI and layout in {layout:#?}"
- );
- // Check that the underlying pair of fields matches.
- let inner = skip_newtypes(cx, layout);
- assert!(
- matches!(inner.layout.abi(), Abi::ScalarPair(..)),
- "`ScalarPair` type {} is newtype around non-`ScalarPair` type {}",
- layout.ty,
- inner.ty
- );
- if matches!(inner.layout.variants(), Variants::Multiple { .. }) {
- // FIXME: ScalarPair for enums is enormously complicated and it is very hard
- // to check anything about them.
+ FieldsShape::Union(..) => {
+ // FIXME: I guess we could also check something here? Like, look at all fields?
return;
}
- match inner.layout.fields() {
- FieldsShape::Arbitrary { .. } => {
- // Checked below.
- }
- FieldsShape::Union(..) => {
- // FIXME: I guess we could also check something here? Like, look at all fields?
- return;
- }
- _ => {
- panic!("`ScalarPair` layout with unexpected field shape in {inner:#?}");
- }
+ FieldsShape::Arbitrary { .. } => {
+ // Should be an enum, the only field is the discriminant.
+ assert!(
+ inner.ty.is_enum(),
+ "`Scalar` layout for non-primitive non-enum type {}",
+ inner.ty
+ );
+ assert_eq!(
+ inner.layout.fields().count(),
+ 1,
+ "`Scalar` layout for multiple-field type in {inner:#?}",
+ );
+ let offset = inner.layout.fields().offset(0);
+ let field = inner.field(cx, 0);
+ // The field should be at the right offset, and match the `scalar` layout.
+ assert_eq!(
+ offset,
+ Size::ZERO,
+ "`Scalar` field at non-0 offset in {inner:#?}",
+ );
+ assert_eq!(field.size, size, "`Scalar` field with bad size in {inner:#?}",);
+ assert_eq!(
+ field.align.abi, align,
+ "`Scalar` field with bad align in {inner:#?}",
+ );
+ assert!(
+ matches!(field.abi, Abi::Scalar(_)),
+ "`Scalar` field with bad ABI in {inner:#?}",
+ );
+ }
+ _ => {
+ panic!("`Scalar` layout for non-primitive non-enum type {}", inner.ty);
}
- let mut fields = non_zst_fields(cx, &inner);
- let (offset1, field1) = fields.next().unwrap_or_else(|| {
- panic!("`ScalarPair` layout for type with not even one non-ZST field: {inner:#?}")
- });
- let (offset2, field2) = fields.next().unwrap_or_else(|| {
- panic!("`ScalarPair` layout for type with less than two non-ZST fields: {inner:#?}")
- });
- assert!(
- fields.next().is_none(),
- "`ScalarPair` layout for type with at least three non-ZST fields: {inner:#?}"
- );
- // The fields might be in opposite order.
- let (offset1, field1, offset2, field2) = if offset1 <= offset2 {
- (offset1, field1, offset2, field2)
- } else {
- (offset2, field2, offset1, field1)
- };
- // The fields should be at the right offset, and match the `scalar` layout.
- assert_eq!(
- offset1,
- Size::ZERO,
- "`ScalarPair` first field at non-0 offset in {inner:#?}",
- );
- assert_eq!(
- field1.size, size1,
- "`ScalarPair` first field with bad size in {inner:#?}",
- );
- assert_eq!(
- field1.align.abi, align1,
- "`ScalarPair` first field with bad align in {inner:#?}",
- );
- assert!(
- matches!(field1.abi, Abi::Scalar(_)),
- "`ScalarPair` first field with bad ABI in {inner:#?}",
- );
- assert_eq!(
- offset2, field2_offset,
- "`ScalarPair` second field at bad offset in {inner:#?}",
- );
- assert_eq!(
- field2.size, size2,
- "`ScalarPair` second field with bad size in {inner:#?}",
- );
- assert_eq!(
- field2.align.abi, align2,
- "`ScalarPair` second field with bad align in {inner:#?}",
- );
- assert!(
- matches!(field2.abi, Abi::Scalar(_)),
- "`ScalarPair` second field with bad ABI in {inner:#?}",
- );
}
- Abi::Vector { count, element } => {
- // No padding in vectors. Alignment can be strengthened, though.
- assert!(
- layout.layout.align().abi >= element.align(cx).abi,
- "alignment mismatch between ABI and layout in {layout:#?}"
- );
- let size = element.size(cx) * count;
- assert_eq!(
- layout.layout.size(),
- size.align_to(cx.data_layout().vector_align(size).abi),
- "size mismatch between ABI and layout in {layout:#?}"
- );
+ }
+ Abi::ScalarPair(scalar1, scalar2) => {
+ // Sanity-check scalar pairs. Computing the expected size and alignment is a bit of work.
+ let size1 = scalar1.size(cx);
+ let align1 = scalar1.align(cx).abi;
+ let size2 = scalar2.size(cx);
+ let align2 = scalar2.align(cx).abi;
+ let align = cmp::max(align1, align2);
+ let field2_offset = size1.align_to(align2);
+ let size = (field2_offset + size2).align_to(align);
+ assert_eq!(
+ layout.layout.size(),
+ size,
+ "size mismatch between ABI and layout in {layout:#?}"
+ );
+ assert_eq!(
+ layout.layout.align().abi,
+ align,
+ "alignment mismatch between ABI and layout in {layout:#?}",
+ );
+ // Check that the underlying pair of fields matches.
+ let inner = skip_newtypes(cx, layout);
+ assert!(
+ matches!(inner.layout.abi(), Abi::ScalarPair(..)),
+ "`ScalarPair` type {} is newtype around non-`ScalarPair` type {}",
+ layout.ty,
+ inner.ty
+ );
+ if matches!(inner.layout.variants(), Variants::Multiple { .. }) {
+ // FIXME: ScalarPair for enums is enormously complicated and it is very hard
+ // to check anything about them.
+ return;
+ }
+ match inner.layout.fields() {
+ FieldsShape::Arbitrary { .. } => {
+ // Checked below.
+ }
+ FieldsShape::Union(..) => {
+ // FIXME: I guess we could also check something here? Like, look at all fields?
+ return;
+ }
+ _ => {
+ panic!("`ScalarPair` layout with unexpected field shape in {inner:#?}");
+ }
}
- Abi::Uninhabited | Abi::Aggregate { .. } => {} // Nothing to check.
+ let mut fields = non_zst_fields(cx, &inner);
+ let (offset1, field1) = fields.next().unwrap_or_else(|| {
+ panic!(
+ "`ScalarPair` layout for type with not even one non-ZST field: {inner:#?}"
+ )
+ });
+ let (offset2, field2) = fields.next().unwrap_or_else(|| {
+ panic!(
+ "`ScalarPair` layout for type with less than two non-ZST fields: {inner:#?}"
+ )
+ });
+ assert!(
+ fields.next().is_none(),
+ "`ScalarPair` layout for type with at least three non-ZST fields: {inner:#?}"
+ );
+ // The fields might be in opposite order.
+ let (offset1, field1, offset2, field2) = if offset1 <= offset2 {
+ (offset1, field1, offset2, field2)
+ } else {
+ (offset2, field2, offset1, field1)
+ };
+ // The fields should be at the right offset, and match the `scalar` layout.
+ assert_eq!(
+ offset1,
+ Size::ZERO,
+ "`ScalarPair` first field at non-0 offset in {inner:#?}",
+ );
+ assert_eq!(
+ field1.size, size1,
+ "`ScalarPair` first field with bad size in {inner:#?}",
+ );
+ assert_eq!(
+ field1.align.abi, align1,
+ "`ScalarPair` first field with bad align in {inner:#?}",
+ );
+ assert!(
+ matches!(field1.abi, Abi::Scalar(_)),
+ "`ScalarPair` first field with bad ABI in {inner:#?}",
+ );
+ assert_eq!(
+ offset2, field2_offset,
+ "`ScalarPair` second field at bad offset in {inner:#?}",
+ );
+ assert_eq!(
+ field2.size, size2,
+ "`ScalarPair` second field with bad size in {inner:#?}",
+ );
+ assert_eq!(
+ field2.align.abi, align2,
+ "`ScalarPair` second field with bad align in {inner:#?}",
+ );
+ assert!(
+ matches!(field2.abi, Abi::Scalar(_)),
+ "`ScalarPair` second field with bad ABI in {inner:#?}",
+ );
}
+ Abi::Vector { count, element } => {
+ // No padding in vectors, except possibly for trailing padding to make the size a multiple of align.
+ let size = element.size(cx) * count;
+ let align = cx.data_layout().vector_align(size).abi;
+ let size = size.align_to(align); // needed e.g. for vectors of size 3
+ assert!(align >= element.align(cx).abi); // just sanity-checking `vector_align`.
+ assert_eq!(
+ layout.layout.size(),
+ size,
+ "size mismatch between ABI and layout in {layout:#?}"
+ );
+ assert_eq!(
+ layout.layout.align().abi,
+ align,
+ "alignment mismatch between ABI and layout in {layout:#?}"
+ );
+ // FIXME: Do some kind of check of the inner type, like for Scalar and ScalarPair.
+ }
+ Abi::Uninhabited | Abi::Aggregate { .. } => {} // Nothing to check.
}
+ }
- check_layout_abi(cx, layout);
+ check_layout_abi(cx, layout);
- if let Variants::Multiple { variants, .. } = &layout.variants {
- for variant in variants.iter() {
- // No nested "multiple".
- assert!(matches!(variant.variants(), Variants::Single { .. }));
- // Variants should have the same or a smaller size as the full thing,
- // and same for alignment.
- if variant.size() > layout.size {
- bug!(
- "Type with size {} bytes has variant with size {} bytes: {layout:#?}",
- layout.size.bytes(),
- variant.size().bytes(),
- )
- }
- if variant.align().abi > layout.align.abi {
- bug!(
- "Type with alignment {} bytes has variant with alignment {} bytes: {layout:#?}",
- layout.align.abi.bytes(),
- variant.align().abi.bytes(),
- )
- }
- // Skip empty variants.
- if variant.size() == Size::ZERO
- || variant.fields().count() == 0
- || variant.abi().is_uninhabited()
- {
- // These are never actually accessed anyway, so we can skip the coherence check
- // for them. They also fail that check, since they have
- // `Aggregate`/`Uninhbaited` ABI even when the main type is
- // `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
- // 0, and sometimes, variants without fields have non-0 size.)
- continue;
- }
- // The top-level ABI and the ABI of the variants should be coherent.
- let scalar_coherent = |s1: Scalar, s2: Scalar| {
- s1.size(cx) == s2.size(cx) && s1.align(cx) == s2.align(cx)
- };
- let abi_coherent = match (layout.abi, variant.abi()) {
- (Abi::Scalar(s1), Abi::Scalar(s2)) => scalar_coherent(s1, s2),
- (Abi::ScalarPair(a1, b1), Abi::ScalarPair(a2, b2)) => {
- scalar_coherent(a1, a2) && scalar_coherent(b1, b2)
- }
- (Abi::Uninhabited, _) => true,
- (Abi::Aggregate { .. }, _) => true,
- _ => false,
- };
- if !abi_coherent {
- bug!(
- "Variant ABI is incompatible with top-level ABI:\nvariant={:#?}\nTop-level: {layout:#?}",
- variant
- );
+ if let Variants::Multiple { variants, .. } = &layout.variants {
+ for variant in variants.iter() {
+ // No nested "multiple".
+ assert!(matches!(variant.variants, Variants::Single { .. }));
+ // Variants should have the same or a smaller size as the full thing,
+ // and same for alignment.
+ if variant.size > layout.size {
+ bug!(
+ "Type with size {} bytes has variant with size {} bytes: {layout:#?}",
+ layout.size.bytes(),
+ variant.size.bytes(),
+ )
+ }
+ if variant.align.abi > layout.align.abi {
+ bug!(
+ "Type with alignment {} bytes has variant with alignment {} bytes: {layout:#?}",
+ layout.align.abi.bytes(),
+ variant.align.abi.bytes(),
+ )
+ }
+ // Skip empty variants.
+ if variant.size == Size::ZERO
+ || variant.fields.count() == 0
+ || variant.abi.is_uninhabited()
+ {
+ // These are never actually accessed anyway, so we can skip the coherence check
+ // for them. They also fail that check, since they have
+ // `Aggregate`/`Uninhbaited` ABI even when the main type is
+ // `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
+ // 0, and sometimes, variants without fields have non-0 size.)
+ continue;
+ }
+ // The top-level ABI and the ABI of the variants should be coherent.
+ let scalar_coherent =
+ |s1: Scalar, s2: Scalar| s1.size(cx) == s2.size(cx) && s1.align(cx) == s2.align(cx);
+ let abi_coherent = match (layout.abi, variant.abi) {
+ (Abi::Scalar(s1), Abi::Scalar(s2)) => scalar_coherent(s1, s2),
+ (Abi::ScalarPair(a1, b1), Abi::ScalarPair(a2, b2)) => {
+ scalar_coherent(a1, a2) && scalar_coherent(b1, b2)
}
+ (Abi::Uninhabited, _) => true,
+ (Abi::Aggregate { .. }, _) => true,
+ _ => false,
+ };
+ if !abi_coherent {
+ bug!(
+ "Variant ABI is incompatible with top-level ABI:\nvariant={:#?}\nTop-level: {layout:#?}",
+ variant
+ );
}
}
}
diff --git a/compiler/rustc_ty_utils/src/lib.rs b/compiler/rustc_ty_utils/src/lib.rs
index cce5a79dd..7ad5cbc01 100644
--- a/compiler/rustc_ty_utils/src/lib.rs
+++ b/compiler/rustc_ty_utils/src/lib.rs
@@ -29,6 +29,7 @@ mod layout;
mod layout_sanity_check;
mod needs_drop;
pub mod representability;
+mod structural_match;
mod ty;
pub fn provide(providers: &mut Providers) {
@@ -42,4 +43,5 @@ pub fn provide(providers: &mut Providers) {
representability::provide(providers);
ty::provide(providers);
instance::provide(providers);
+ structural_match::provide(providers);
}
diff --git a/compiler/rustc_ty_utils/src/structural_match.rs b/compiler/rustc_ty_utils/src/structural_match.rs
new file mode 100644
index 000000000..a55bb7e7e
--- /dev/null
+++ b/compiler/rustc_ty_utils/src/structural_match.rs
@@ -0,0 +1,44 @@
+use rustc_hir::lang_items::LangItem;
+use rustc_middle::ty::query::Providers;
+use rustc_middle::ty::{self, Ty, TyCtxt};
+
+use rustc_infer::infer::TyCtxtInferExt;
+use rustc_trait_selection::traits::{ObligationCause, ObligationCtxt};
+
+/// This method returns true if and only if `adt_ty` itself has been marked as
+/// eligible for structural-match: namely, if it implements both
+/// `StructuralPartialEq` and `StructuralEq` (which are respectively injected by
+/// `#[derive(PartialEq)]` and `#[derive(Eq)]`).
+///
+/// Note that this does *not* recursively check if the substructure of `adt_ty`
+/// implements the traits.
+fn has_structural_eq_impls<'tcx>(tcx: TyCtxt<'tcx>, adt_ty: Ty<'tcx>) -> bool {
+ let ref infcx = tcx.infer_ctxt().build();
+ let cause = ObligationCause::dummy();
+
+ let ocx = ObligationCtxt::new(infcx);
+ // require `#[derive(PartialEq)]`
+ let structural_peq_def_id =
+ infcx.tcx.require_lang_item(LangItem::StructuralPeq, Some(cause.span));
+ ocx.register_bound(cause.clone(), ty::ParamEnv::empty(), adt_ty, structural_peq_def_id);
+ // for now, require `#[derive(Eq)]`. (Doing so is a hack to work around
+ // the type `for<'a> fn(&'a ())` failing to implement `Eq` itself.)
+ let structural_teq_def_id =
+ infcx.tcx.require_lang_item(LangItem::StructuralTeq, Some(cause.span));
+ ocx.register_bound(cause, ty::ParamEnv::empty(), adt_ty, structural_teq_def_id);
+
+ // We deliberately skip *reporting* fulfillment errors (via
+ // `report_fulfillment_errors`), for two reasons:
+ //
+ // 1. The error messages would mention `std::marker::StructuralPartialEq`
+ // (a trait which is solely meant as an implementation detail
+ // for now), and
+ //
+ // 2. We are sometimes doing future-incompatibility lints for
+ // now, so we do not want unconditional errors here.
+ ocx.select_all_or_error().is_empty()
+}
+
+pub fn provide(providers: &mut Providers) {
+ providers.has_structural_eq_impls = has_structural_eq_impls;
+}
diff --git a/compiler/rustc_ty_utils/src/ty.rs b/compiler/rustc_ty_utils/src/ty.rs
index 3eebb4ace..5fc9bcac1 100644
--- a/compiler/rustc_ty_utils/src/ty.rs
+++ b/compiler/rustc_ty_utils/src/ty.rs
@@ -49,12 +49,9 @@ fn sized_constraint_for_ty<'tcx>(
// it on the impl.
let Some(sized_trait) = tcx.lang_items().sized_trait() else { return vec![ty] };
- let sized_predicate = ty::Binder::dummy(ty::TraitRef {
- def_id: sized_trait,
- substs: tcx.mk_substs_trait(ty, &[]),
- })
- .without_const()
- .to_predicate(tcx);
+ let sized_predicate = ty::Binder::dummy(tcx.mk_trait_ref(sized_trait, [ty]))
+ .without_const()
+ .to_predicate(tcx);
let predicates = tcx.predicates_of(adtdef.did()).predicates;
if predicates.iter().any(|(p, _)| *p == sized_predicate) { vec![] } else { vec![ty] }
}
@@ -108,12 +105,7 @@ fn adt_sized_constraint(tcx: TyCtxt<'_>, def_id: DefId) -> &[Ty<'_>] {
/// See `ParamEnv` struct definition for details.
fn param_env(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ParamEnv<'_> {
- // The param_env of an impl Trait type is its defining function's param_env
- if let Some(parent) = ty::is_impl_trait_defn(tcx, def_id) {
- return param_env(tcx, parent.to_def_id());
- }
// Compute the bounds on Self and the type parameters.
-
let ty::InstantiatedPredicates { mut predicates, .. } =
tcx.predicates_of(def_id).instantiate_identity(tcx);
@@ -413,63 +405,7 @@ fn issue33140_self_ty(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Ty<'_>> {
/// Check if a function is async.
fn asyncness(tcx: TyCtxt<'_>, def_id: DefId) -> hir::IsAsync {
let node = tcx.hir().get_by_def_id(def_id.expect_local());
- if let Some(fn_kind) = node.fn_kind() { fn_kind.asyncness() } else { hir::IsAsync::NotAsync }
-}
-
-/// Don't call this directly: use ``tcx.conservative_is_privately_uninhabited`` instead.
-pub fn conservative_is_privately_uninhabited_raw<'tcx>(
- tcx: TyCtxt<'tcx>,
- param_env_and: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
-) -> bool {
- let (param_env, ty) = param_env_and.into_parts();
- match ty.kind() {
- ty::Never => {
- debug!("ty::Never =>");
- true
- }
- ty::Adt(def, _) if def.is_union() => {
- debug!("ty::Adt(def, _) if def.is_union() =>");
- // For now, `union`s are never considered uninhabited.
- false
- }
- ty::Adt(def, substs) => {
- debug!("ty::Adt(def, _) if def.is_not_union() =>");
- // Any ADT is uninhabited if either:
- // (a) It has no variants (i.e. an empty `enum`);
- // (b) Each of its variants (a single one in the case of a `struct`) has at least
- // one uninhabited field.
- def.variants().iter().all(|var| {
- var.fields.iter().any(|field| {
- let ty = tcx.bound_type_of(field.did).subst(tcx, substs);
- tcx.conservative_is_privately_uninhabited(param_env.and(ty))
- })
- })
- }
- ty::Tuple(fields) => {
- debug!("ty::Tuple(..) =>");
- fields.iter().any(|ty| tcx.conservative_is_privately_uninhabited(param_env.and(ty)))
- }
- ty::Array(ty, len) => {
- debug!("ty::Array(ty, len) =>");
- match len.try_eval_usize(tcx, param_env) {
- Some(0) | None => false,
- // If the array is definitely non-empty, it's uninhabited if
- // the type of its elements is uninhabited.
- Some(1..) => tcx.conservative_is_privately_uninhabited(param_env.and(*ty)),
- }
- }
- ty::Ref(..) => {
- debug!("ty::Ref(..) =>");
- // References to uninitialised memory is valid for any type, including
- // uninhabited types, in unsafe code, so we treat all references as
- // inhabited.
- false
- }
- _ => {
- debug!("_ =>");
- false
- }
- }
+ node.fn_sig().map_or(hir::IsAsync::NotAsync, |sig| sig.header.asyncness)
}
pub fn provide(providers: &mut ty::query::Providers) {
@@ -481,7 +417,6 @@ pub fn provide(providers: &mut ty::query::Providers) {
instance_def_size_estimate,
issue33140_self_ty,
impl_defaultness,
- conservative_is_privately_uninhabited: conservative_is_privately_uninhabited_raw,
..*providers
};
}
diff --git a/compiler/rustc_type_ir/src/lib.rs b/compiler/rustc_type_ir/src/lib.rs
index 7fbe78aa5..e3f7a1bd0 100644
--- a/compiler/rustc_type_ir/src/lib.rs
+++ b/compiler/rustc_type_ir/src/lib.rs
@@ -19,9 +19,11 @@ use std::mem::discriminant;
pub mod codec;
pub mod sty;
+pub mod ty_info;
pub use codec::*;
pub use sty::*;
+pub use ty_info::*;
/// Needed so we can use #[derive(HashStable_Generic)]
pub trait HashStableContext {}
@@ -45,7 +47,7 @@ pub trait Interner {
type BoundTy: Clone + Debug + Hash + PartialEq + Eq + PartialOrd + Ord;
type PlaceholderType: Clone + Debug + Hash + PartialEq + Eq + PartialOrd + Ord;
type InferTy: Clone + Debug + Hash + PartialEq + Eq + PartialOrd + Ord;
- type DelaySpanBugEmitted: Clone + Debug + Hash + PartialEq + Eq + PartialOrd + Ord;
+ type ErrorGuaranteed: Clone + Debug + Hash + PartialEq + Eq + PartialOrd + Ord;
type PredicateKind: Clone + Debug + Hash + PartialEq + Eq;
type AllocId: Clone + Debug + Hash + PartialEq + Eq + PartialOrd + Ord;
@@ -60,10 +62,10 @@ pub trait InternAs<T: ?Sized, R> {
type Output;
fn intern_with<F>(self, f: F) -> Self::Output
where
- F: FnOnce(&T) -> R;
+ F: FnOnce(&[T]) -> R;
}
-impl<I, T, R, E> InternAs<[T], R> for I
+impl<I, T, R, E> InternAs<T, R> for I
where
E: InternIteratorElement<T, R>,
I: Iterator<Item = E>,
diff --git a/compiler/rustc_type_ir/src/sty.rs b/compiler/rustc_type_ir/src/sty.rs
index a4fb1480f..3ed616d70 100644
--- a/compiler/rustc_type_ir/src/sty.rs
+++ b/compiler/rustc_type_ir/src/sty.rs
@@ -217,7 +217,7 @@ pub enum TyKind<I: Interner> {
/// A placeholder for a type which could not be computed; this is
/// propagated to avoid useless error messages.
- Error(I::DelaySpanBugEmitted),
+ Error(I::ErrorGuaranteed),
}
impl<I: Interner> TyKind<I> {
@@ -301,61 +301,44 @@ impl<I: Interner> Clone for TyKind<I> {
impl<I: Interner> PartialEq for TyKind<I> {
#[inline]
fn eq(&self, other: &TyKind<I>) -> bool {
- let __self_vi = tykind_discriminant(self);
- let __arg_1_vi = tykind_discriminant(other);
- if __self_vi == __arg_1_vi {
- match (&*self, &*other) {
- (&Int(ref __self_0), &Int(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Uint(ref __self_0), &Uint(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Float(ref __self_0), &Float(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Adt(ref __self_0, ref __self_1), &Adt(ref __arg_1_0, ref __arg_1_1)) => {
- __self_0 == __arg_1_0 && __self_1 == __arg_1_1
+ tykind_discriminant(self) == tykind_discriminant(other)
+ && match (self, other) {
+ (Int(a_i), Int(b_i)) => a_i == b_i,
+ (Uint(a_u), Uint(b_u)) => a_u == b_u,
+ (Float(a_f), Float(b_f)) => a_f == b_f,
+ (Adt(a_d, a_s), Adt(b_d, b_s)) => a_d == b_d && a_s == b_s,
+ (Foreign(a_d), Foreign(b_d)) => a_d == b_d,
+ (Array(a_t, a_c), Array(b_t, b_c)) => a_t == b_t && a_c == b_c,
+ (Slice(a_t), Slice(b_t)) => a_t == b_t,
+ (RawPtr(a_t), RawPtr(b_t)) => a_t == b_t,
+ (Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => a_r == b_r && a_t == b_t && a_m == b_m,
+ (FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d == b_d && a_s == b_s,
+ (FnPtr(a_s), FnPtr(b_s)) => a_s == b_s,
+ (Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => {
+ a_p == b_p && a_r == b_r && a_repr == b_repr
}
- (&Foreign(ref __self_0), &Foreign(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Array(ref __self_0, ref __self_1), &Array(ref __arg_1_0, ref __arg_1_1)) => {
- __self_0 == __arg_1_0 && __self_1 == __arg_1_1
+ (Closure(a_d, a_s), Closure(b_d, b_s)) => a_d == b_d && a_s == b_s,
+ (Generator(a_d, a_s, a_m), Generator(b_d, b_s, b_m)) => {
+ a_d == b_d && a_s == b_s && a_m == b_m
}
- (&Slice(ref __self_0), &Slice(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&RawPtr(ref __self_0), &RawPtr(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (
- &Ref(ref __self_0, ref __self_1, ref __self_2),
- &Ref(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
- ) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1 && __self_2 == __arg_1_2,
- (&FnDef(ref __self_0, ref __self_1), &FnDef(ref __arg_1_0, ref __arg_1_1)) => {
- __self_0 == __arg_1_0 && __self_1 == __arg_1_1
+ (GeneratorWitness(a_g), GeneratorWitness(b_g)) => a_g == b_g,
+ (Tuple(a_t), Tuple(b_t)) => a_t == b_t,
+ (Projection(a_p), Projection(b_p)) => a_p == b_p,
+ (Opaque(a_d, a_s), Opaque(b_d, b_s)) => a_d == b_d && a_s == b_s,
+ (Param(a_p), Param(b_p)) => a_p == b_p,
+ (Bound(a_d, a_b), Bound(b_d, b_b)) => a_d == b_d && a_b == b_b,
+ (Placeholder(a_p), Placeholder(b_p)) => a_p == b_p,
+ (Infer(a_t), Infer(b_t)) => a_t == b_t,
+ (Error(a_e), Error(b_e)) => a_e == b_e,
+ (Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => true,
+ _ => {
+ debug_assert!(
+ false,
+ "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}"
+ );
+ true
}
- (&FnPtr(ref __self_0), &FnPtr(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (
- &Dynamic(ref __self_0, ref __self_1, ref self_repr),
- &Dynamic(ref __arg_1_0, ref __arg_1_1, ref arg_repr),
- ) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1 && self_repr == arg_repr,
- (&Closure(ref __self_0, ref __self_1), &Closure(ref __arg_1_0, ref __arg_1_1)) => {
- __self_0 == __arg_1_0 && __self_1 == __arg_1_1
- }
- (
- &Generator(ref __self_0, ref __self_1, ref __self_2),
- &Generator(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
- ) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1 && __self_2 == __arg_1_2,
- (&GeneratorWitness(ref __self_0), &GeneratorWitness(ref __arg_1_0)) => {
- __self_0 == __arg_1_0
- }
- (&Tuple(ref __self_0), &Tuple(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Projection(ref __self_0), &Projection(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Opaque(ref __self_0, ref __self_1), &Opaque(ref __arg_1_0, ref __arg_1_1)) => {
- __self_0 == __arg_1_0 && __self_1 == __arg_1_1
- }
- (&Param(ref __self_0), &Param(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Bound(ref __self_0, ref __self_1), &Bound(ref __arg_1_0, ref __arg_1_1)) => {
- __self_0 == __arg_1_0 && __self_1 == __arg_1_1
- }
- (&Placeholder(ref __self_0), &Placeholder(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Infer(ref __self_0), &Infer(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&Error(ref __self_0), &Error(ref __arg_1_0)) => __self_0 == __arg_1_0,
- _ => true,
}
- } else {
- false
- }
}
}
@@ -366,7 +349,7 @@ impl<I: Interner> Eq for TyKind<I> {}
impl<I: Interner> PartialOrd for TyKind<I> {
#[inline]
fn partial_cmp(&self, other: &TyKind<I>) -> Option<Ordering> {
- Some(Ord::cmp(self, other))
+ Some(self.cmp(other))
}
}
@@ -374,213 +357,106 @@ impl<I: Interner> PartialOrd for TyKind<I> {
impl<I: Interner> Ord for TyKind<I> {
#[inline]
fn cmp(&self, other: &TyKind<I>) -> Ordering {
- let __self_vi = tykind_discriminant(self);
- let __arg_1_vi = tykind_discriminant(other);
- if __self_vi == __arg_1_vi {
- match (&*self, &*other) {
- (&Int(ref __self_0), &Int(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Uint(ref __self_0), &Uint(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Float(ref __self_0), &Float(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Adt(ref __self_0, ref __self_1), &Adt(ref __arg_1_0, ref __arg_1_1)) => {
- match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- }
- }
- (&Foreign(ref __self_0), &Foreign(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Array(ref __self_0, ref __self_1), &Array(ref __arg_1_0, ref __arg_1_1)) => {
- match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- }
- }
- (&Slice(ref __self_0), &Slice(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&RawPtr(ref __self_0), &RawPtr(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (
- &Ref(ref __self_0, ref __self_1, ref __self_2),
- &Ref(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
- ) => match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => match Ord::cmp(__self_1, __arg_1_1) {
- Ordering::Equal => Ord::cmp(__self_2, __arg_1_2),
- cmp => cmp,
- },
- cmp => cmp,
- },
- (&FnDef(ref __self_0, ref __self_1), &FnDef(ref __arg_1_0, ref __arg_1_1)) => {
- match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- }
+ tykind_discriminant(self).cmp(&tykind_discriminant(other)).then_with(|| {
+ match (self, other) {
+ (Int(a_i), Int(b_i)) => a_i.cmp(b_i),
+ (Uint(a_u), Uint(b_u)) => a_u.cmp(b_u),
+ (Float(a_f), Float(b_f)) => a_f.cmp(b_f),
+ (Adt(a_d, a_s), Adt(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)),
+ (Foreign(a_d), Foreign(b_d)) => a_d.cmp(b_d),
+ (Array(a_t, a_c), Array(b_t, b_c)) => a_t.cmp(b_t).then_with(|| a_c.cmp(b_c)),
+ (Slice(a_t), Slice(b_t)) => a_t.cmp(b_t),
+ (RawPtr(a_t), RawPtr(b_t)) => a_t.cmp(b_t),
+ (Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => {
+ a_r.cmp(b_r).then_with(|| a_t.cmp(b_t).then_with(|| a_m.cmp(b_m)))
}
- (&FnPtr(ref __self_0), &FnPtr(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (
- &Dynamic(ref __self_0, ref __self_1, ref self_repr),
- &Dynamic(ref __arg_1_0, ref __arg_1_1, ref arg_repr),
- ) => match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => match Ord::cmp(__self_1, __arg_1_1) {
- Ordering::Equal => Ord::cmp(self_repr, arg_repr),
- cmp => cmp,
- },
- cmp => cmp,
- },
- (&Closure(ref __self_0, ref __self_1), &Closure(ref __arg_1_0, ref __arg_1_1)) => {
- match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- }
+ (FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)),
+ (FnPtr(a_s), FnPtr(b_s)) => a_s.cmp(b_s),
+ (Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => {
+ a_p.cmp(b_p).then_with(|| a_r.cmp(b_r).then_with(|| a_repr.cmp(b_repr)))
}
- (
- &Generator(ref __self_0, ref __self_1, ref __self_2),
- &Generator(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
- ) => match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => match Ord::cmp(__self_1, __arg_1_1) {
- Ordering::Equal => Ord::cmp(__self_2, __arg_1_2),
- cmp => cmp,
- },
- cmp => cmp,
- },
- (&GeneratorWitness(ref __self_0), &GeneratorWitness(ref __arg_1_0)) => {
- Ord::cmp(__self_0, __arg_1_0)
+ (Closure(a_p, a_s), Closure(b_p, b_s)) => a_p.cmp(b_p).then_with(|| a_s.cmp(b_s)),
+ (Generator(a_d, a_s, a_m), Generator(b_d, b_s, b_m)) => {
+ a_d.cmp(b_d).then_with(|| a_s.cmp(b_s).then_with(|| a_m.cmp(b_m)))
}
- (&Tuple(ref __self_0), &Tuple(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Projection(ref __self_0), &Projection(ref __arg_1_0)) => {
- Ord::cmp(__self_0, __arg_1_0)
+ (GeneratorWitness(a_g), GeneratorWitness(b_g)) => a_g.cmp(b_g),
+ (Tuple(a_t), Tuple(b_t)) => a_t.cmp(b_t),
+ (Projection(a_p), Projection(b_p)) => a_p.cmp(b_p),
+ (Opaque(a_d, a_s), Opaque(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)),
+ (Param(a_p), Param(b_p)) => a_p.cmp(b_p),
+ (Bound(a_d, a_b), Bound(b_d, b_b)) => a_d.cmp(b_d).then_with(|| a_b.cmp(b_b)),
+ (Placeholder(a_p), Placeholder(b_p)) => a_p.cmp(b_p),
+ (Infer(a_t), Infer(b_t)) => a_t.cmp(b_t),
+ (Error(a_e), Error(b_e)) => a_e.cmp(b_e),
+ (Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => Ordering::Equal,
+ _ => {
+ debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}");
+ Ordering::Equal
}
- (&Opaque(ref __self_0, ref __self_1), &Opaque(ref __arg_1_0, ref __arg_1_1)) => {
- match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- }
- }
- (&Param(ref __self_0), &Param(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Bound(ref __self_0, ref __self_1), &Bound(ref __arg_1_0, ref __arg_1_1)) => {
- match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- }
- }
- (&Placeholder(ref __self_0), &Placeholder(ref __arg_1_0)) => {
- Ord::cmp(__self_0, __arg_1_0)
- }
- (&Infer(ref __self_0), &Infer(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&Error(ref __self_0), &Error(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- _ => Ordering::Equal,
}
- } else {
- Ord::cmp(&__self_vi, &__arg_1_vi)
- }
+ })
}
}
// This is manually implemented because a derive would require `I: Hash`
impl<I: Interner> hash::Hash for TyKind<I> {
fn hash<__H: hash::Hasher>(&self, state: &mut __H) -> () {
- match (&*self,) {
- (&Int(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Uint(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Float(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Adt(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&Foreign(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Array(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&Slice(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&RawPtr(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Ref(ref __self_0, ref __self_1, ref __self_2),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state);
- hash::Hash::hash(__self_2, state)
- }
- (&FnDef(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&FnPtr(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Dynamic(ref __self_0, ref __self_1, ref repr),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state);
- hash::Hash::hash(repr, state)
- }
- (&Closure(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&Generator(ref __self_0, ref __self_1, ref __self_2),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state);
- hash::Hash::hash(__self_2, state)
- }
- (&GeneratorWitness(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Tuple(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Projection(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Opaque(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&Param(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Bound(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&Placeholder(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&Infer(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
+ tykind_discriminant(self).hash(state);
+ match self {
+ Int(i) => i.hash(state),
+ Uint(u) => u.hash(state),
+ Float(f) => f.hash(state),
+ Adt(d, s) => {
+ d.hash(state);
+ s.hash(state)
+ }
+ Foreign(d) => d.hash(state),
+ Array(t, c) => {
+ t.hash(state);
+ c.hash(state)
}
- (&Error(ref __self_0),) => {
- hash::Hash::hash(&tykind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
+ Slice(t) => t.hash(state),
+ RawPtr(t) => t.hash(state),
+ Ref(r, t, m) => {
+ r.hash(state);
+ t.hash(state);
+ m.hash(state)
+ }
+ FnDef(d, s) => {
+ d.hash(state);
+ s.hash(state)
+ }
+ FnPtr(s) => s.hash(state),
+ Dynamic(p, r, repr) => {
+ p.hash(state);
+ r.hash(state);
+ repr.hash(state)
+ }
+ Closure(d, s) => {
+ d.hash(state);
+ s.hash(state)
+ }
+ Generator(d, s, m) => {
+ d.hash(state);
+ s.hash(state);
+ m.hash(state)
+ }
+ GeneratorWitness(g) => g.hash(state),
+ Tuple(t) => t.hash(state),
+ Projection(p) => p.hash(state),
+ Opaque(d, s) => {
+ d.hash(state);
+ s.hash(state)
+ }
+ Param(p) => p.hash(state),
+ Bound(d, b) => {
+ d.hash(state);
+ b.hash(state)
}
- _ => hash::Hash::hash(&tykind_discriminant(self), state),
+ Placeholder(p) => p.hash(state),
+ Infer(t) => t.hash(state),
+ Error(e) => e.hash(state),
+ Bool | Char | Str | Never => (),
}
}
}
@@ -588,37 +464,34 @@ impl<I: Interner> hash::Hash for TyKind<I> {
// This is manually implemented because a derive would require `I: Debug`
impl<I: Interner> fmt::Debug for TyKind<I> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- use std::fmt::*;
match self {
- Bool => Formatter::write_str(f, "Bool"),
- Char => Formatter::write_str(f, "Char"),
- Int(f0) => Formatter::debug_tuple_field1_finish(f, "Int", f0),
- Uint(f0) => Formatter::debug_tuple_field1_finish(f, "Uint", f0),
- Float(f0) => Formatter::debug_tuple_field1_finish(f, "Float", f0),
- Adt(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Adt", f0, f1),
- Foreign(f0) => Formatter::debug_tuple_field1_finish(f, "Foreign", f0),
- Str => Formatter::write_str(f, "Str"),
- Array(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Array", f0, f1),
- Slice(f0) => Formatter::debug_tuple_field1_finish(f, "Slice", f0),
- RawPtr(f0) => Formatter::debug_tuple_field1_finish(f, "RawPtr", f0),
- Ref(f0, f1, f2) => Formatter::debug_tuple_field3_finish(f, "Ref", f0, f1, f2),
- FnDef(f0, f1) => Formatter::debug_tuple_field2_finish(f, "FnDef", f0, f1),
- FnPtr(f0) => Formatter::debug_tuple_field1_finish(f, "FnPtr", f0),
- Dynamic(f0, f1, f2) => Formatter::debug_tuple_field3_finish(f, "Dynamic", f0, f1, f2),
- Closure(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Closure", f0, f1),
- Generator(f0, f1, f2) => {
- Formatter::debug_tuple_field3_finish(f, "Generator", f0, f1, f2)
- }
- GeneratorWitness(f0) => Formatter::debug_tuple_field1_finish(f, "GeneratorWitness", f0),
- Never => Formatter::write_str(f, "Never"),
- Tuple(f0) => Formatter::debug_tuple_field1_finish(f, "Tuple", f0),
- Projection(f0) => Formatter::debug_tuple_field1_finish(f, "Projection", f0),
- Opaque(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Opaque", f0, f1),
- Param(f0) => Formatter::debug_tuple_field1_finish(f, "Param", f0),
- Bound(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Bound", f0, f1),
- Placeholder(f0) => Formatter::debug_tuple_field1_finish(f, "Placeholder", f0),
- Infer(f0) => Formatter::debug_tuple_field1_finish(f, "Infer", f0),
- TyKind::Error(f0) => Formatter::debug_tuple_field1_finish(f, "Error", f0),
+ Bool => f.write_str("Bool"),
+ Char => f.write_str("Char"),
+ Int(i) => f.debug_tuple_field1_finish("Int", i),
+ Uint(u) => f.debug_tuple_field1_finish("Uint", u),
+ Float(float) => f.debug_tuple_field1_finish("Float", float),
+ Adt(d, s) => f.debug_tuple_field2_finish("Adt", d, s),
+ Foreign(d) => f.debug_tuple_field1_finish("Foreign", d),
+ Str => f.write_str("Str"),
+ Array(t, c) => f.debug_tuple_field2_finish("Array", t, c),
+ Slice(t) => f.debug_tuple_field1_finish("Slice", t),
+ RawPtr(t) => f.debug_tuple_field1_finish("RawPtr", t),
+ Ref(r, t, m) => f.debug_tuple_field3_finish("Ref", r, t, m),
+ FnDef(d, s) => f.debug_tuple_field2_finish("FnDef", d, s),
+ FnPtr(s) => f.debug_tuple_field1_finish("FnPtr", s),
+ Dynamic(p, r, repr) => f.debug_tuple_field3_finish("Dynamic", p, r, repr),
+ Closure(d, s) => f.debug_tuple_field2_finish("Closure", d, s),
+ Generator(d, s, m) => f.debug_tuple_field3_finish("Generator", d, s, m),
+ GeneratorWitness(g) => f.debug_tuple_field1_finish("GeneratorWitness", g),
+ Never => f.write_str("Never"),
+ Tuple(t) => f.debug_tuple_field1_finish("Tuple", t),
+ Projection(p) => f.debug_tuple_field1_finish("Projection", p),
+ Opaque(d, s) => f.debug_tuple_field2_finish("Opaque", d, s),
+ Param(p) => f.debug_tuple_field1_finish("Param", p),
+ Bound(d, b) => f.debug_tuple_field2_finish("Bound", d, b),
+ Placeholder(p) => f.debug_tuple_field1_finish("Placeholder", p),
+ Infer(t) => f.debug_tuple_field1_finish("Infer", t),
+ TyKind::Error(e) => f.debug_tuple_field1_finish("Error", e),
}
}
}
@@ -626,7 +499,7 @@ impl<I: Interner> fmt::Debug for TyKind<I> {
// This is manually implemented because a derive would require `I: Encodable`
impl<I: Interner, E: TyEncoder> Encodable<E> for TyKind<I>
where
- I::DelaySpanBugEmitted: Encodable<E>,
+ I::ErrorGuaranteed: Encodable<E>,
I::AdtDef: Encodable<E>,
I::SubstsRef: Encodable<E>,
I::DefId: Encodable<E>,
@@ -645,7 +518,6 @@ where
I::BoundTy: Encodable<E>,
I::PlaceholderType: Encodable<E>,
I::InferTy: Encodable<E>,
- I::DelaySpanBugEmitted: Encodable<E>,
I::PredicateKind: Encodable<E>,
I::AllocId: Encodable<E>,
{
@@ -744,7 +616,7 @@ where
// This is manually implemented because a derive would require `I: Decodable`
impl<I: Interner, D: TyDecoder<I = I>> Decodable<D> for TyKind<I>
where
- I::DelaySpanBugEmitted: Decodable<D>,
+ I::ErrorGuaranteed: Decodable<D>,
I::AdtDef: Decodable<D>,
I::SubstsRef: Decodable<D>,
I::DefId: Decodable<D>,
@@ -763,7 +635,6 @@ where
I::BoundTy: Decodable<D>,
I::PlaceholderType: Decodable<D>,
I::InferTy: Decodable<D>,
- I::DelaySpanBugEmitted: Decodable<D>,
I::PredicateKind: Decodable<D>,
I::AllocId: Decodable<D>,
{
@@ -829,7 +700,7 @@ where
I::ParamTy: HashStable<CTX>,
I::PlaceholderType: HashStable<CTX>,
I::InferTy: HashStable<CTX>,
- I::DelaySpanBugEmitted: HashStable<CTX>,
+ I::ErrorGuaranteed: HashStable<CTX>,
{
#[inline]
fn hash_stable(
@@ -1093,12 +964,12 @@ where
impl<I: Interner> Clone for RegionKind<I> {
fn clone(&self) -> Self {
match self {
- ReEarlyBound(a) => ReEarlyBound(a.clone()),
- ReLateBound(a, b) => ReLateBound(a.clone(), b.clone()),
- ReFree(a) => ReFree(a.clone()),
+ ReEarlyBound(r) => ReEarlyBound(r.clone()),
+ ReLateBound(d, r) => ReLateBound(d.clone(), r.clone()),
+ ReFree(r) => ReFree(r.clone()),
ReStatic => ReStatic,
- ReVar(a) => ReVar(a.clone()),
- RePlaceholder(a) => RePlaceholder(a.clone()),
+ ReVar(r) => ReVar(r.clone()),
+ RePlaceholder(r) => RePlaceholder(r.clone()),
ReErased => ReErased,
}
}
@@ -1108,29 +979,23 @@ impl<I: Interner> Clone for RegionKind<I> {
impl<I: Interner> PartialEq for RegionKind<I> {
#[inline]
fn eq(&self, other: &RegionKind<I>) -> bool {
- let __self_vi = regionkind_discriminant(self);
- let __arg_1_vi = regionkind_discriminant(other);
- if __self_vi == __arg_1_vi {
- match (&*self, &*other) {
- (&ReEarlyBound(ref __self_0), &ReEarlyBound(ref __arg_1_0)) => {
- __self_0 == __arg_1_0
+ regionkind_discriminant(self) == regionkind_discriminant(other)
+ && match (self, other) {
+ (ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r == b_r,
+ (ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => a_d == b_d && a_r == b_r,
+ (ReFree(a_r), ReFree(b_r)) => a_r == b_r,
+ (ReStatic, ReStatic) => true,
+ (ReVar(a_r), ReVar(b_r)) => a_r == b_r,
+ (RePlaceholder(a_r), RePlaceholder(b_r)) => a_r == b_r,
+ (ReErased, ReErased) => true,
+ _ => {
+ debug_assert!(
+ false,
+ "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}"
+ );
+ true
}
- (
- &ReLateBound(ref __self_0, ref __self_1),
- &ReLateBound(ref __arg_1_0, ref __arg_1_1),
- ) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1,
- (&ReFree(ref __self_0), &ReFree(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&ReStatic, &ReStatic) => true,
- (&ReVar(ref __self_0), &ReVar(ref __arg_1_0)) => __self_0 == __arg_1_0,
- (&RePlaceholder(ref __self_0), &RePlaceholder(ref __arg_1_0)) => {
- __self_0 == __arg_1_0
- }
- (&ReErased, &ReErased) => true,
- _ => true,
}
- } else {
- false
- }
}
}
@@ -1141,7 +1006,7 @@ impl<I: Interner> Eq for RegionKind<I> {}
impl<I: Interner> PartialOrd for RegionKind<I> {
#[inline]
fn partial_cmp(&self, other: &RegionKind<I>) -> Option<Ordering> {
- Some(Ord::cmp(self, other))
+ Some(self.cmp(other))
}
}
@@ -1149,66 +1014,41 @@ impl<I: Interner> PartialOrd for RegionKind<I> {
impl<I: Interner> Ord for RegionKind<I> {
#[inline]
fn cmp(&self, other: &RegionKind<I>) -> Ordering {
- let __self_vi = regionkind_discriminant(self);
- let __arg_1_vi = regionkind_discriminant(other);
- if __self_vi == __arg_1_vi {
- match (&*self, &*other) {
- (&ReEarlyBound(ref __self_0), &ReEarlyBound(ref __arg_1_0)) => {
- Ord::cmp(__self_0, __arg_1_0)
+ regionkind_discriminant(self).cmp(&regionkind_discriminant(other)).then_with(|| {
+ match (self, other) {
+ (ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r.cmp(b_r),
+ (ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => {
+ a_d.cmp(b_d).then_with(|| a_r.cmp(b_r))
}
- (
- &ReLateBound(ref __self_0, ref __self_1),
- &ReLateBound(ref __arg_1_0, ref __arg_1_1),
- ) => match Ord::cmp(__self_0, __arg_1_0) {
- Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
- cmp => cmp,
- },
- (&ReFree(ref __self_0), &ReFree(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&ReStatic, &ReStatic) => Ordering::Equal,
- (&ReVar(ref __self_0), &ReVar(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
- (&RePlaceholder(ref __self_0), &RePlaceholder(ref __arg_1_0)) => {
- Ord::cmp(__self_0, __arg_1_0)
+ (ReFree(a_r), ReFree(b_r)) => a_r.cmp(b_r),
+ (ReStatic, ReStatic) => Ordering::Equal,
+ (ReVar(a_r), ReVar(b_r)) => a_r.cmp(b_r),
+ (RePlaceholder(a_r), RePlaceholder(b_r)) => a_r.cmp(b_r),
+ (ReErased, ReErased) => Ordering::Equal,
+ _ => {
+ debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}");
+ Ordering::Equal
}
- (&ReErased, &ReErased) => Ordering::Equal,
- _ => Ordering::Equal,
}
- } else {
- Ord::cmp(&__self_vi, &__arg_1_vi)
- }
+ })
}
}
// This is manually implemented because a derive would require `I: Hash`
impl<I: Interner> hash::Hash for RegionKind<I> {
- fn hash<__H: hash::Hasher>(&self, state: &mut __H) -> () {
- match (&*self,) {
- (&ReEarlyBound(ref __self_0),) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&ReLateBound(ref __self_0, ref __self_1),) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- hash::Hash::hash(__self_0, state);
- hash::Hash::hash(__self_1, state)
- }
- (&ReFree(ref __self_0),) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&ReStatic,) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- }
- (&ReVar(ref __self_0),) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&RePlaceholder(ref __self_0),) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- hash::Hash::hash(__self_0, state)
- }
- (&ReErased,) => {
- hash::Hash::hash(&regionkind_discriminant(self), state);
- }
+ fn hash<H: hash::Hasher>(&self, state: &mut H) -> () {
+ regionkind_discriminant(self).hash(state);
+ match self {
+ ReEarlyBound(r) => r.hash(state),
+ ReLateBound(d, r) => {
+ d.hash(state);
+ r.hash(state)
+ }
+ ReFree(r) => r.hash(state),
+ ReStatic => (),
+ ReVar(r) => r.hash(state),
+ RePlaceholder(r) => r.hash(state),
+ ReErased => (),
}
}
}
@@ -1217,21 +1057,21 @@ impl<I: Interner> hash::Hash for RegionKind<I> {
impl<I: Interner> fmt::Debug for RegionKind<I> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- ReEarlyBound(ref data) => write!(f, "ReEarlyBound({:?})", data),
+ ReEarlyBound(data) => write!(f, "ReEarlyBound({:?})", data),
- ReLateBound(binder_id, ref bound_region) => {
+ ReLateBound(binder_id, bound_region) => {
write!(f, "ReLateBound({:?}, {:?})", binder_id, bound_region)
}
- ReFree(ref fr) => fr.fmt(f),
+ ReFree(fr) => fr.fmt(f),
- ReStatic => write!(f, "ReStatic"),
+ ReStatic => f.write_str("ReStatic"),
- ReVar(ref vid) => vid.fmt(f),
+ ReVar(vid) => vid.fmt(f),
RePlaceholder(placeholder) => write!(f, "RePlaceholder({:?})", placeholder),
- ReErased => write!(f, "ReErased"),
+ ReErased => f.write_str("ReErased"),
}
}
}
@@ -1319,18 +1159,18 @@ where
ReErased | ReStatic => {
// No variant fields to hash for these ...
}
- ReLateBound(db, br) => {
- db.hash_stable(hcx, hasher);
- br.hash_stable(hcx, hasher);
+ ReLateBound(d, r) => {
+ d.hash_stable(hcx, hasher);
+ r.hash_stable(hcx, hasher);
}
- ReEarlyBound(eb) => {
- eb.hash_stable(hcx, hasher);
+ ReEarlyBound(r) => {
+ r.hash_stable(hcx, hasher);
}
- ReFree(ref free_region) => {
- free_region.hash_stable(hcx, hasher);
+ ReFree(r) => {
+ r.hash_stable(hcx, hasher);
}
- RePlaceholder(p) => {
- p.hash_stable(hcx, hasher);
+ RePlaceholder(r) => {
+ r.hash_stable(hcx, hasher);
}
ReVar(_) => {
panic!("region variables should not be hashed: {self:?}")
diff --git a/compiler/rustc_type_ir/src/ty_info.rs b/compiler/rustc_type_ir/src/ty_info.rs
new file mode 100644
index 000000000..4e5d42488
--- /dev/null
+++ b/compiler/rustc_type_ir/src/ty_info.rs
@@ -0,0 +1,122 @@
+use std::{
+ cmp::Ordering,
+ hash::{Hash, Hasher},
+ ops::Deref,
+};
+
+use rustc_data_structures::{
+ fingerprint::Fingerprint,
+ stable_hasher::{HashStable, StableHasher},
+};
+
+use crate::{DebruijnIndex, TypeFlags};
+
+/// A helper type that you can wrap round your own type in order to automatically
+/// cache the stable hash, type flags and debruijn index on creation and
+/// not recompute it whenever the information is needed.
+/// This is only done in incremental mode. You can also opt out of caching by using
+/// StableHash::ZERO for the hash, in which case the hash gets computed each time.
+/// This is useful if you have values that you intern but never (can?) use for stable
+/// hashing.
+#[derive(Copy, Clone)]
+pub struct WithCachedTypeInfo<T> {
+ pub internee: T,
+ pub stable_hash: Fingerprint,
+
+ /// This field provides fast access to information that is also contained
+ /// in `kind`.
+ ///
+ /// This field shouldn't be used directly and may be removed in the future.
+ /// Use `Ty::flags()` instead.
+ pub flags: TypeFlags,
+
+ /// This field provides fast access to information that is also contained
+ /// in `kind`.
+ ///
+ /// This is a kind of confusing thing: it stores the smallest
+ /// binder such that
+ ///
+ /// (a) the binder itself captures nothing but
+ /// (b) all the late-bound things within the type are captured
+ /// by some sub-binder.
+ ///
+ /// So, for a type without any late-bound things, like `u32`, this
+ /// will be *innermost*, because that is the innermost binder that
+ /// captures nothing. But for a type `&'D u32`, where `'D` is a
+ /// late-bound region with De Bruijn index `D`, this would be `D + 1`
+ /// -- the binder itself does not capture `D`, but `D` is captured
+ /// by an inner binder.
+ ///
+ /// We call this concept an "exclusive" binder `D` because all
+ /// De Bruijn indices within the type are contained within `0..D`
+ /// (exclusive).
+ pub outer_exclusive_binder: DebruijnIndex,
+}
+
+impl<T: PartialEq> PartialEq for WithCachedTypeInfo<T> {
+ #[inline]
+ fn eq(&self, other: &Self) -> bool {
+ self.internee.eq(&other.internee)
+ }
+}
+
+impl<T: Eq> Eq for WithCachedTypeInfo<T> {}
+
+impl<T: Ord> PartialOrd for WithCachedTypeInfo<T> {
+ fn partial_cmp(&self, other: &WithCachedTypeInfo<T>) -> Option<Ordering> {
+ Some(self.internee.cmp(&other.internee))
+ }
+}
+
+impl<T: Ord> Ord for WithCachedTypeInfo<T> {
+ fn cmp(&self, other: &WithCachedTypeInfo<T>) -> Ordering {
+ self.internee.cmp(&other.internee)
+ }
+}
+
+impl<T> Deref for WithCachedTypeInfo<T> {
+ type Target = T;
+
+ #[inline]
+ fn deref(&self) -> &T {
+ &self.internee
+ }
+}
+
+impl<T: Hash> Hash for WithCachedTypeInfo<T> {
+ #[inline]
+ fn hash<H: Hasher>(&self, s: &mut H) {
+ if self.stable_hash != Fingerprint::ZERO {
+ self.stable_hash.hash(s)
+ } else {
+ self.internee.hash(s)
+ }
+ }
+}
+
+impl<T: HashStable<CTX>, CTX> HashStable<CTX> for WithCachedTypeInfo<T> {
+ fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
+ if self.stable_hash == Fingerprint::ZERO || cfg!(debug_assertions) {
+ // No cached hash available. This can only mean that incremental is disabled.
+ // We don't cache stable hashes in non-incremental mode, because they are used
+ // so rarely that the performance actually suffers.
+
+ // We need to build the hash as if we cached it and then hash that hash, as
+ // otherwise the hashes will differ between cached and non-cached mode.
+ let stable_hash: Fingerprint = {
+ let mut hasher = StableHasher::new();
+ self.internee.hash_stable(hcx, &mut hasher);
+ hasher.finish()
+ };
+ if cfg!(debug_assertions) && self.stable_hash != Fingerprint::ZERO {
+ assert_eq!(
+ stable_hash, self.stable_hash,
+ "cached stable hash does not match freshly computed stable hash"
+ );
+ }
+ stable_hash.hash_stable(hcx, hasher);
+ } else {
+ self.stable_hash.hash_stable(hcx, hasher);
+ }
+ }
+}